CombinedText stringlengths 4 3.42M |
|---|
require "filerary"
require "fileutils"
class FileraryTest < Test::Unit::TestCase
def setup
@test_base_dir = File.join(File.dirname(__FILE__), "tmp")
FileUtils.mkdir_p(@test_base_dir)
@librarian = Filerary::Librarian.new(@test_base_dir)
end
def teardown
FileUtils.rm_rf(@test_base_dir)
end
def test_default_db_dir
home_dir = File.expand_path("~")
librarian = Filerary::Librarian.new
default_db_dir = File.join(home_dir, ".filerary", "db")
assert_equal(default_db_dir, librarian.db_dir)
end
class CollectTest < self
def test_string
assert_equal([__FILE__], @librarian.collect(__FILE__))
end
def test_array
assert_equal([__FILE__], @librarian.collect([__FILE__]))
end
end
class SearchTest < self
def setup
super
@librarian.collect(__FILE__)
end
def test_found
assert_equal([__FILE__], @librarian.search("Librarian"))
end
def test_not_found
assert_equal([], @librarian.search("AAA" * 5))
end
end
class CleanupTest < self
def setup
super
@temp_file = File.join(@test_base_dir, "cleanup.txt")
FileUtils.cp(__FILE__, @temp_file)
@librarian.collect(@temp_file)
end
def teardown
super
FileUtils.rm_f(@temp_file)
end
def test_removed
FileUtils.rm(@temp_file)
assert_equal([@temp_file], @librarian.search("Librarian"))
@librarian.cleanup
assert_equal([], @librarian.search("Librarian"))
end
def test_not_removed
assert_equal([@temp_file], @librarian.search("Librarian"))
@librarian.cleanup
assert_equal([@temp_file], @librarian.search("Librarian"))
end
end
end
test: use meaningful name
require "filerary"
require "fileutils"
class FileraryTest < Test::Unit::TestCase
def setup
@test_dir = File.expand_path(File.dirname(__FILE__))
@test_tmp_dir = File.join(@test_dir, "tmp")
FileUtils.mkdir_p(@test_tmp_dir)
@librarian = Filerary::Librarian.new(@test_tmp_dir)
end
def teardown
FileUtils.rm_rf(@test_tmp_dir)
end
def test_default_db_dir
home_dir = File.expand_path("~")
librarian = Filerary::Librarian.new
default_db_dir = File.join(home_dir, ".filerary", "db")
assert_equal(default_db_dir, librarian.db_dir)
end
class CollectTest < self
def test_argument_is_string
assert_equal([__FILE__], @librarian.collect(__FILE__))
end
def test_argument_is_array
assert_equal([__FILE__], @librarian.collect([__FILE__]))
end
end
class SearchTest < self
def setup
super
@librarian.collect(__FILE__)
end
def test_found
assert_equal([__FILE__], @librarian.search("Librarian"))
end
def test_not_found
assert_equal([], @librarian.search("AAA" * 5))
end
end
class CleanupTest < self
def setup
super
@temp_file = File.join(@test_tmp_dir, "cleanup.txt")
FileUtils.cp(__FILE__, @temp_file)
@librarian.collect(@temp_file)
end
def teardown
super
FileUtils.rm_f(@temp_file)
end
def test_removed
FileUtils.rm(@temp_file)
assert_equal([@temp_file], @librarian.search("Librarian"))
@librarian.cleanup
assert_equal([], @librarian.search("Librarian"))
end
def test_not_removed
assert_equal([@temp_file], @librarian.search("Librarian"))
@librarian.cleanup
assert_equal([@temp_file], @librarian.search("Librarian"))
end
end
end
|
module EncryptedSearchAttributes
VERSION = "1.1.0"
end
Bumps version
module EncryptedSearchAttributes
VERSION = "1.2.0"
end
|
require "faraday/for_test/response"
class Faraday::ForTest::Response
module Formatter
# TODO: actual http version
def request_line
[ env[:method].upcase, env[:url], "HTTP/1.1" ].join(" ") + "\n"
end
def request_headers
env[:request_headers].map {|k, v| "#{k}: #{v}" }.join("\n")
end
# TODO: configuraable
def request_body(pretty = false)
if request_params && !request_params.empty?
if formatter = Faraday::ForTest.configuration.body_formatters.find {|k, v| env[:request_headers]["Content-Type"] =~ /#{k}/ }
formatter.call(request_params)
else
request_params
end
else
""
end + "\n"
end
# TODO: reason-phase
def status_line
[ "HTTP/1.1", status ].join(" ") + "\n"
end
def response_headers
headers.map {|k, v| "#{k}: #{v}" }.join("\n")
end
def response_body(prettyr = false)
if body && !body.empty?
if formatter = Faraday::ForTest.configuration.body_formatters.find {|k, v| headers["content-type"] =~ /#{k}/ }
formatter.last.call(body)
else
body
end
else
""
end + "\n"
end
end
include Formatter
end
same here
require "faraday/for_test/response"
class Faraday::ForTest::Response
module Formatter
# TODO: actual http version
def request_line
[ env[:method].upcase, env[:url], "HTTP/1.1" ].join(" ") + "\n"
end
def request_headers
env[:request_headers].map {|k, v| "#{k}: #{v}" }.join("\n")
end
# TODO: configuraable
def request_body(pretty = false)
if request_params && !request_params.empty?
if formatter = Faraday::ForTest.configuration.body_formatters.find {|k, v| env[:request_headers]["Content-Type"] =~ /#{k}/ }
formatter.last.call(request_params)
else
request_params
end
else
""
end + "\n"
end
# TODO: reason-phase
def status_line
[ "HTTP/1.1", status ].join(" ") + "\n"
end
def response_headers
headers.map {|k, v| "#{k}: #{v}" }.join("\n")
end
def response_body(prettyr = false)
if body && !body.empty?
if formatter = Faraday::ForTest.configuration.body_formatters.find {|k, v| headers["content-type"] =~ /#{k}/ }
formatter.last.call(body)
else
body
end
else
""
end + "\n"
end
end
include Formatter
end
|
require 'plist'
module Fastlane
module Actions
class UpdateUrlSchemesAction < Action
def self.run(params)
path = params[:path]
url_schemes = params[:url_schemes]
hash = Plist.parse_xml(path)
hash['CFBundleURLTypes'].first['CFBundleURLSchemes'] = url_schemes
File.write(path, hash.to_plist)
end
def self.description
'Updates the URL schemes in the given Info.plist'
end
def self.available_options
[
FastlaneCore::ConfigItem.new(
key: :path,
env_name: 'FL_UPDATE_URL_SCHEMES_PATH',
description: 'The Plist file\'s path',
is_string: true,
verify_block: verify_path_block
),
FastlaneCore::ConfigItem.new(
key: :url_schemes,
env_name: "FL_UPDATE_URL_SCHEMES_URL_SCHEMES",
description: 'The new URL schemes',
is_string: false,
verify_block: verify_url_schemes_block
)
]
end
def self.output
[]
end
def self.authors
['kmikael']
end
def self.is_supported?(platform)
[:ios, :mac].include? platform
end
def self.verify_path_block
lambda do |path|
raise "Could not find plist at path '#{path}'".red unless File.exist?(path)
end
end
def self.verify_url_schemes_block
lambda do |url_schemes|
string = "The URL schemes must be an array of strings, got '#{url_schemes}'.".red
raise string unless url_schemes.kind_of?(Array)
url_schemes.each do |url_scheme|
raise string unless url_scheme.kind_of?(String)
end
end
end
end
end
end
Improved options for update_url_schemes
require 'plist'
module Fastlane
module Actions
class UpdateUrlSchemesAction < Action
def self.run(params)
path = params[:path]
url_schemes = params[:url_schemes]
hash = Plist.parse_xml(path)
hash['CFBundleURLTypes'].first['CFBundleURLSchemes'] = url_schemes
File.write(path, hash.to_plist)
end
def self.description
'Updates the URL schemes in the given Info.plist'
end
def self.available_options
[
FastlaneCore::ConfigItem.new(
key: :path,
env_name: 'FL_UPDATE_URL_SCHEMES_PATH',
description: 'The Plist file\'s path',
is_string: true,
optional: false,
verify_block: proc do |path|
raise "Could not find plist at path '#{path}'".red unless File.exist?(path)
end
),
FastlaneCore::ConfigItem.new(
key: :url_schemes,
env_name: "FL_UPDATE_URL_SCHEMES_SCHEMES",
description: 'The new URL schemes',
is_string: false,
optional: false,
verify_block: proc do |url_schemes|
string = "The URL schemes must be an array of strings, got '#{url_schemes}'.".red
raise string unless url_schemes.kind_of?(Array)
url_schemes.each do |url_scheme|
raise string unless url_scheme.kind_of?(String)
end
end
)
]
end
def self.output
[]
end
def self.authors
['kmikael']
end
def self.is_supported?(platform)
[:ios, :mac].include? platform
end
end
end
end
|
require 'fog/compute/models/server'
require 'fog/openstack/models/compute/metadata'
module Fog
module Compute
class OpenStack
class Server < Fog::Compute::Server
identity :id
attribute :instance_name, :aliases => 'OS-EXT-SRV-ATTR:instance_name'
attribute :addresses
attribute :flavor
attribute :host_id, :aliases => 'hostId'
attribute :image
attribute :metadata
attribute :links
attribute :name
attribute :personality
attribute :progress
attribute :accessIPv4
attribute :accessIPv6
attribute :availability_zone
attribute :user_data_encoded
attribute :state, :aliases => 'status'
attribute :created, :type => :time
attribute :updated, :type => :time
attribute :tenant_id
attribute :user_id
attribute :key_name
attribute :fault
attribute :os_dcf_disk_config, :aliases => 'OS-DCF:diskConfig'
attribute :os_ext_srv_attr_host, :aliases => 'OS-EXT-SRV-ATTR:host'
attribute :os_ext_srv_attr_hypervisor_hostname, :aliases => 'OS-EXT-SRV-ATTR:hypervisor_hostname'
attribute :os_ext_srv_attr_instance_name, :aliases => 'OS-EXT-SRV-ATTR:instance_name'
attribute :os_ext_sts_power_state, :aliases => 'OS-EXT-STS:power_state'
attribute :os_ext_sts_task_state, :aliases => 'OS-EXT-STS:task_state'
attribute :os_ext_sts_vm_state, :aliases => 'OS-EXT-STS:vm_state'
attr_reader :password
attr_writer :image_ref, :flavor_ref, :nics, :os_scheduler_hints
attr_accessor :block_device_mapping
def initialize(attributes={})
# Old 'connection' is renamed as service and should be used instead
prepare_service_value(attributes)
self.security_groups = attributes.delete(:security_groups)
self.min_count = attributes.delete(:min_count)
self.max_count = attributes.delete(:max_count)
self.nics = attributes.delete(:nics)
self.os_scheduler_hints = attributes.delete(:os_scheduler_hints)
self.block_device_mapping = attributes.delete(:block_device_mapping)
super
end
def metadata
@metadata ||= begin
Fog::Compute::OpenStack::Metadata.new({
:service => service,
:parent => self
})
end
end
def metadata=(new_metadata={})
return unless new_metadata
metas = []
new_metadata.each_pair {|k,v| metas << {"key" => k, "value" => v} }
@metadata = metadata.load(metas)
end
def user_data=(ascii_userdata)
self.user_data_encoded = [ascii_userdata].pack('m')
end
def destroy
requires :id
service.delete_server(id)
true
end
def images
requires :id
service.images(:server => self)
end
def all_addresses
# currently openstack API does not tell us what is a floating ip vs a fixed ip for the vm listing,
# we fall back to get all addresses and filter sadly.
@all_addresses ||= service.list_all_addresses.body["floating_ips"].select{|data| data['instance_id'] == id}
end
def reload
@all_addresses = nil
super
end
# returns all ip_addresses for a given instance
# this includes both the fixed ip(s) and the floating ip(s)
def ip_addresses
addresses.values.flatten.map{|x| x['addr']}
end
def floating_ip_addresses
all_addresses.map{|addr| addr["ip"]}
end
alias_method :public_ip_addresses, :floating_ip_addresses
def floating_ip_address
floating_ip_addresses.first
end
alias_method :public_ip_address, :floating_ip_address
def private_ip_addresses
ip_addresses - floating_ip_addresses
end
def private_ip_address
private_ip_addresses.first
end
def image_ref
@image_ref
end
def image_ref=(new_image_ref)
@image_ref = new_image_ref
end
def flavor_ref
@flavor_ref
end
def flavor_ref=(new_flavor_ref)
@flavor_ref = new_flavor_ref
end
def ready?
self.state == 'ACTIVE'
end
def change_password(admin_password)
requires :id
service.change_server_password(id, admin_password)
true
end
def rebuild(image_ref, name, admin_pass=nil, metadata=nil, personality=nil)
requires :id
service.rebuild_server(id, image_ref, name, admin_pass, metadata, personality)
true
end
def resize(flavor_ref)
requires :id
service.resize_server(id, flavor_ref)
true
end
def revert_resize
requires :id
service.revert_resize_server(id)
true
end
def confirm_resize
requires :id
service.confirm_resize_server(id)
true
end
def security_groups
requires :id
groups = service.list_security_groups(id).body['security_groups']
groups.map do |group|
sg = Fog::Compute::OpenStack::SecurityGroup.new group
sg.connection = service
sg
end
end
def security_groups=(new_security_groups)
@security_groups = new_security_groups
end
def reboot(type = 'SOFT')
requires :id
service.reboot_server(id, type)
true
end
def create_image(name, metadata={})
requires :id
service.create_image(id, name, metadata)
end
def console(log_length = nil)
requires :id
service.get_console_output(id, log_length)
end
def migrate
requires :id
service.migrate_server(id)
end
def live_migrate(host, block_migration, disk_over_commit)
requires :id
service.live_migrate_server(id, host, block_migration, disk_over_commit)
end
def associate_address(floating_ip)
requires :id
service.associate_address id, floating_ip
end
def disassociate_address(floating_ip)
requires :id
service.disassociate_address id, floating_ip
end
def reset_vm_state(vm_state)
requires :id
service.reset_server_state id, vm_state
end
def min_count=(new_min_count)
@min_count = new_min_count
end
def max_count=(new_max_count)
@max_count = new_max_count
end
def networks
service.networks(:server => self)
end
def volumes
requires :id
service.volumes.find_all do |vol|
vol.attachments.find { |attachment| attachment["serverId"] == id }
end
end
def volume_attachments
requires :id
service.get_server_volumes(id).body['volumeAttachments']
end
def attach_volume(volume_id, device_name)
requires :id
service.attach_volume(volume_id, id, device_name)
true
end
def detach_volume(volume_id)
requires :id
service.detach_volume(id, volume_id)
true
end
def save
raise Fog::Errors::Error.new('Resaving an existing object may create a duplicate') if persisted?
requires :flavor_ref, :name
requires_one :image_ref, :block_device_mapping
options = {
'personality' => personality,
'accessIPv4' => accessIPv4,
'accessIPv6' => accessIPv6,
'availability_zone' => availability_zone,
'user_data' => user_data_encoded,
'key_name' => key_name,
'security_groups' => @security_groups,
'min_count' => @min_count,
'max_count' => @max_count,
'nics' => @nics,
'os:scheduler_hints' => @os_scheduler_hints,
'block_device_mapping' => @block_device_mapping
}
options['metadata'] = metadata.to_hash unless @metadata.nil?
options = options.reject {|key, value| value.nil?}
data = service.create_server(name, image_ref, flavor_ref, options)
merge_attributes(data.body['server'])
true
end
def setup(credentials = {})
requires :public_ip_address, :identity, :public_key, :username
Fog::SSH.new(public_ip_address, username, credentials).run([
%{mkdir .ssh},
%{echo "#{public_key}" >> ~/.ssh/authorized_keys},
%{passwd -l #{username}},
%{echo "#{Fog::JSON.encode(attributes)}" >> ~/attributes.json},
%{echo "#{Fog::JSON.encode(metadata)}" >> ~/metadata.json}
])
rescue Errno::ECONNREFUSED
sleep(1)
retry
end
private
def adminPass=(new_admin_pass)
@password = new_admin_pass
end
end
end
end
end
Add alias for openstack availablilty zone server attribute
The availability zone attribute in server API messages is actually
OS-EXT-AZ:availability_zone. Adding the alias will correctly map the
availability zone values into fog responses.
require 'fog/compute/models/server'
require 'fog/openstack/models/compute/metadata'
module Fog
module Compute
class OpenStack
class Server < Fog::Compute::Server
identity :id
attribute :instance_name, :aliases => 'OS-EXT-SRV-ATTR:instance_name'
attribute :addresses
attribute :flavor
attribute :host_id, :aliases => 'hostId'
attribute :image
attribute :metadata
attribute :links
attribute :name
attribute :personality
attribute :progress
attribute :accessIPv4
attribute :accessIPv6
attribute :availability_zone, :aliases => 'OS-EXT-AZ:availability_zone'
attribute :user_data_encoded
attribute :state, :aliases => 'status'
attribute :created, :type => :time
attribute :updated, :type => :time
attribute :tenant_id
attribute :user_id
attribute :key_name
attribute :fault
attribute :os_dcf_disk_config, :aliases => 'OS-DCF:diskConfig'
attribute :os_ext_srv_attr_host, :aliases => 'OS-EXT-SRV-ATTR:host'
attribute :os_ext_srv_attr_hypervisor_hostname, :aliases => 'OS-EXT-SRV-ATTR:hypervisor_hostname'
attribute :os_ext_srv_attr_instance_name, :aliases => 'OS-EXT-SRV-ATTR:instance_name'
attribute :os_ext_sts_power_state, :aliases => 'OS-EXT-STS:power_state'
attribute :os_ext_sts_task_state, :aliases => 'OS-EXT-STS:task_state'
attribute :os_ext_sts_vm_state, :aliases => 'OS-EXT-STS:vm_state'
attr_reader :password
attr_writer :image_ref, :flavor_ref, :nics, :os_scheduler_hints
attr_accessor :block_device_mapping
def initialize(attributes={})
# Old 'connection' is renamed as service and should be used instead
prepare_service_value(attributes)
self.security_groups = attributes.delete(:security_groups)
self.min_count = attributes.delete(:min_count)
self.max_count = attributes.delete(:max_count)
self.nics = attributes.delete(:nics)
self.os_scheduler_hints = attributes.delete(:os_scheduler_hints)
self.block_device_mapping = attributes.delete(:block_device_mapping)
super
end
def metadata
@metadata ||= begin
Fog::Compute::OpenStack::Metadata.new({
:service => service,
:parent => self
})
end
end
def metadata=(new_metadata={})
return unless new_metadata
metas = []
new_metadata.each_pair {|k,v| metas << {"key" => k, "value" => v} }
@metadata = metadata.load(metas)
end
def user_data=(ascii_userdata)
self.user_data_encoded = [ascii_userdata].pack('m')
end
def destroy
requires :id
service.delete_server(id)
true
end
def images
requires :id
service.images(:server => self)
end
def all_addresses
# currently openstack API does not tell us what is a floating ip vs a fixed ip for the vm listing,
# we fall back to get all addresses and filter sadly.
@all_addresses ||= service.list_all_addresses.body["floating_ips"].select{|data| data['instance_id'] == id}
end
def reload
@all_addresses = nil
super
end
# returns all ip_addresses for a given instance
# this includes both the fixed ip(s) and the floating ip(s)
def ip_addresses
addresses.values.flatten.map{|x| x['addr']}
end
def floating_ip_addresses
all_addresses.map{|addr| addr["ip"]}
end
alias_method :public_ip_addresses, :floating_ip_addresses
def floating_ip_address
floating_ip_addresses.first
end
alias_method :public_ip_address, :floating_ip_address
def private_ip_addresses
ip_addresses - floating_ip_addresses
end
def private_ip_address
private_ip_addresses.first
end
def image_ref
@image_ref
end
def image_ref=(new_image_ref)
@image_ref = new_image_ref
end
def flavor_ref
@flavor_ref
end
def flavor_ref=(new_flavor_ref)
@flavor_ref = new_flavor_ref
end
def ready?
self.state == 'ACTIVE'
end
def change_password(admin_password)
requires :id
service.change_server_password(id, admin_password)
true
end
def rebuild(image_ref, name, admin_pass=nil, metadata=nil, personality=nil)
requires :id
service.rebuild_server(id, image_ref, name, admin_pass, metadata, personality)
true
end
def resize(flavor_ref)
requires :id
service.resize_server(id, flavor_ref)
true
end
def revert_resize
requires :id
service.revert_resize_server(id)
true
end
def confirm_resize
requires :id
service.confirm_resize_server(id)
true
end
def security_groups
requires :id
groups = service.list_security_groups(id).body['security_groups']
groups.map do |group|
sg = Fog::Compute::OpenStack::SecurityGroup.new group
sg.connection = service
sg
end
end
def security_groups=(new_security_groups)
@security_groups = new_security_groups
end
def reboot(type = 'SOFT')
requires :id
service.reboot_server(id, type)
true
end
def create_image(name, metadata={})
requires :id
service.create_image(id, name, metadata)
end
def console(log_length = nil)
requires :id
service.get_console_output(id, log_length)
end
def migrate
requires :id
service.migrate_server(id)
end
def live_migrate(host, block_migration, disk_over_commit)
requires :id
service.live_migrate_server(id, host, block_migration, disk_over_commit)
end
def associate_address(floating_ip)
requires :id
service.associate_address id, floating_ip
end
def disassociate_address(floating_ip)
requires :id
service.disassociate_address id, floating_ip
end
def reset_vm_state(vm_state)
requires :id
service.reset_server_state id, vm_state
end
def min_count=(new_min_count)
@min_count = new_min_count
end
def max_count=(new_max_count)
@max_count = new_max_count
end
def networks
service.networks(:server => self)
end
def volumes
requires :id
service.volumes.find_all do |vol|
vol.attachments.find { |attachment| attachment["serverId"] == id }
end
end
def volume_attachments
requires :id
service.get_server_volumes(id).body['volumeAttachments']
end
def attach_volume(volume_id, device_name)
requires :id
service.attach_volume(volume_id, id, device_name)
true
end
def detach_volume(volume_id)
requires :id
service.detach_volume(id, volume_id)
true
end
def save
raise Fog::Errors::Error.new('Resaving an existing object may create a duplicate') if persisted?
requires :flavor_ref, :name
requires_one :image_ref, :block_device_mapping
options = {
'personality' => personality,
'accessIPv4' => accessIPv4,
'accessIPv6' => accessIPv6,
'availability_zone' => availability_zone,
'user_data' => user_data_encoded,
'key_name' => key_name,
'security_groups' => @security_groups,
'min_count' => @min_count,
'max_count' => @max_count,
'nics' => @nics,
'os:scheduler_hints' => @os_scheduler_hints,
'block_device_mapping' => @block_device_mapping
}
options['metadata'] = metadata.to_hash unless @metadata.nil?
options = options.reject {|key, value| value.nil?}
data = service.create_server(name, image_ref, flavor_ref, options)
merge_attributes(data.body['server'])
true
end
def setup(credentials = {})
requires :public_ip_address, :identity, :public_key, :username
Fog::SSH.new(public_ip_address, username, credentials).run([
%{mkdir .ssh},
%{echo "#{public_key}" >> ~/.ssh/authorized_keys},
%{passwd -l #{username}},
%{echo "#{Fog::JSON.encode(attributes)}" >> ~/attributes.json},
%{echo "#{Fog::JSON.encode(metadata)}" >> ~/metadata.json}
])
rescue Errno::ECONNREFUSED
sleep(1)
retry
end
private
def adminPass=(new_admin_pass)
@password = new_admin_pass
end
end
end
end
end
|
#
# Author:: Matt Eldridge (<matt.eldridge@us.ibm.com>)
# © Copyright IBM Corporation 2014.
#
# LICENSE: MIT (http://opensource.org/licenses/MIT)
#
require 'fog/compute/models/server'
module Fog
module Compute
class Softlayer
class Server < Fog::Compute::Server
identity :id, :type => :integer
attribute :name, :aliases => 'hostname'
attribute :domain
attribute :fqdn, :aliases => 'fullyQualifiedDomainName'
attribute :cpu, :aliases => ['startCpus', 'processorCoreAmount']
attribute :ram, :aliases => ['maxMemory', 'memory']
attribute :disk, :aliases => ['blockDevices','hardDrives']
attribute :private_ip_address, :aliases => 'primaryBackendIpAddress'
attribute :public_ip_address, :aliases => 'primaryIpAddress'
attribute :flavor_id
attribute :bare_metal, :type => :boolean
attribute :os_code
attribute :image_id
attribute :ephemeral_storage, :aliases => 'localDiskFlag'
attribute :key_pairs, :aliases => 'sshKeys'
attribute :network_components
# Times
attribute :created_at, :aliases => ['createDate', 'provisionDate'], :type => :time
attribute :last_verified_date, :aliases => 'lastVerifiedDate', :type => :time
attribute :metric_poll_date, :aliases => 'metricPollDate', :type => :time
attribute :modify_date, :aliases => 'modifyDate', :type => :time
# Metadata
attribute :account_id, :aliases => 'accountId', :type => :integer
attribute :datacenter, :aliases => 'datacenter'
attribute :single_tenant, :aliases => 'dedicatedAccountHostOnlyFlag'
attribute :global_identifier, :aliases => 'globalIdentifier'
attribute :hourly_billing_flag, :aliases => 'hourlyBillingFlag'
attribute :tags, :aliases => 'tagReferences'
attribute :private_network_only, :aliases => 'privateNetworkOnlyFlag'
attribute :user_data, :aliases => 'userData'
attribute :uid, :aliases => 'globalIdentifier'
attribute :provision_script, :aliases => 'postInstallScriptUri'
def initialize(attributes = {})
# Forces every request inject bare_metal parameter
raise Exception if attributes[:collection].nil? and attributes['bare_metal'].nil?
super(attributes)
set_defaults
end
def add_tags(tags)
requires :id
raise ArgumentError, "Tags argument for #{self.class.name}##{__method__} must be Array." unless tags.is_a?(Array)
tags.each do |tag|
service.tags.new(:resource_id => self.id, :name => tag).save
end
self.reload
true
end
def bare_metal?
bare_metal
end
def bare_metal
@bare_metal
end
def datacenter=(name)
name = name['name'] if name.is_a?(Hash)
attributes[:datacenter] = { :name => name }
end
def datacenter
attributes[:datacenter][:name] unless attributes[:datacenter].nil?
end
def delete_tags(tags)
requires :id
raise ArgumentError, "Tags argument for #{self.class.name}##{__method__} must be Array." unless tags.is_a?(Array)
tags.each do |tag|
service.tags.new(:resource_id => self.id, :name => tag).destroy
end
self.reload
true
end
def destroy
requires :id
request = bare_metal? ? :delete_bare_metal_server : :delete_vm
response = service.send(request, self.id)
response.body
end
def dns_name
fqdn
end
def image_id=(uuid)
attributes[:image_id] = {:globalIdentifier => uuid}
end
def image_id
attributes[:image_id][:globalIdentifier] unless attributes[:image_id].nil?
end
def name=(set)
attributes[:hostname] = set
end
def name
attributes[:hostname]
end
def pre_save
extract_flavor
validate_attributes
if self.vlan
attributes[:vlan] = { :networkVlan => { :id => self.vlan.id } }
end
if self.private_vlan
attributes[:private_vlan] = { :networkVlan => { :id => self.private_vlan.id } }
end
if self.key_pairs
attributes[:key_pairs].map! { |key| { :id => key.id } }
end
if self.network_components
self.network_components = self.network_components.map do |component|
component[:maxSpeed] = component.delete(:speed) if component[:speed]
component[:maxSpeed] = component.delete(:max_speed) if component[:max_speed]
component
end
end
remap_attributes(attributes, attributes_mapping)
clean_attributes
end
def private_ip # maintain backward compatibility with <0.3.13
private_ip_address
end
def public_ip # maintain backward compatibility with <0.3.13
public_ip_address
end
def os_code
attributes['operatingSystem']['softwareLicense']['softwareDescription']['referenceCode'] if attributes['operatingSystem']
end
def private_vlan
attributes[:private_vlan] ||= _get_private_vlan
end
def private_vlan=(value)
unless value.is_a?(Integer) or value.is_a?(Fog::Network::Softlayer::Network)
raise ArgumentError, "vlan argument for #{self.class.name}##{__method__} must be Integer or Fog::Network::Softlayer::Network."
end
value = network_connection.networks.get(value) if value.is_a?(Integer)
attributes[:private_vlan] = value
end
# reload the OS on a server (method name reload was already taken)
def relaunch!
requires :id
body = [ "FORCE", {}]
body[1][:sshKeyIds] = key_pairs.map {|kp| kp.id} unless key_pairs.empty?
type = bare_metal? ? :hardware_server : :virtual_guest
status = service.request(type, "#{id}/reloadOperatingSystem", :body => body, :http_method => :post).status
wait_for { not ready? } # block until the relaunch has begun
[200, 201].include?(status)
end
def key_pairs
attributes[:key_pairs]
end
def key_pairs=(keys)
raise ArgumentError, "Argument #{local_variables.first.to_s} for #{self.class.name}##{__method__} must be Array." unless keys.is_a?(Array)
attributes[:key_pairs] = []
keys.map do |key|
## This was nice but causing an intolerable number of requests on an account with lots of keys.
## ToDo: something better...
#key = self.symbolize_keys(key) if key.is_a?(Hash)
#unless key.is_a?(Fog::Compute::Softlayer::KeyPair) or (key.is_a?(Hash) and key[:id])
# raise ArgumentError, "Elements of keys array for #{self.class.name}##{__method__} must be a Hash with key 'id', or Fog::Compute::Softlayer::KeyPair"
#end
#key = service.key_pairs.get(key[:id]) unless key.is_a?(Fog::Compute::Softlayer::KeyPair)
attributes[:key_pairs] << key
end
end
def vlan
attributes[:vlan] ||= _get_vlan
end
def vlan=(value)
unless value.is_a?(Integer) or value.is_a?(Fog::Network::Softlayer::Network)
raise ArgumentError, "vlan argument for #{self.class.name}##{__method__} must be Integer or Fog::Network::Softlayer::Network."
end
value = network_connection.networks.get(value) if value.is_a?(Integer)
attributes[:vlan] = value
end
def ram=(set)
if set.is_a?(Array) and set.first['hardwareComponentModel']
set = 1024 * set.first['hardwareComponentModel']['capacity'].to_i
end
attributes[:ram] = set
end
# @params value [String]
def user_data=(value)
attributes[:user_data] = [{'value' => value}]
end
def user_data
attributes[:user_data]
end
def provision_script=(value)
attributes[:provision_script] = value
end
def provision_script
attributes[:provision_script]
end
def network_components
if id
(public_network_components << private_network_components).flatten
else
attributes[:network_components]
end
end
def public_network_components
if attributes['frontendNetworkComponents']
attributes['frontendNetworkComponents'].map { |n| Fog::Compute::Softlayer::NetworkComponent.new(n) }
else
[]
end
end
def private_network_components
if attributes['backendNetworkComponents']
attributes['backendNetworkComponents'].map { |n| Fog::Compute::Softlayer::NetworkComponent.new(n) }
else
[]
end
end
def ready?
begin
if bare_metal?
state == "on"
else
state == "Running"
end
rescue Excon::Errors::InternalServerError => e
false
end
end
def reboot(use_hard_reboot = true)
# requires :id # TODO: debug why this breaks the tests on bare metal and uncomment this
if bare_metal?
service.reboot_bare_metal_server(id, use_hard_reboot)
else
service.reboot_vm(id, use_hard_reboot)
end
true
end
def ssh_password
requires :id
service_path = bare_metal? ? :hardware_server : :virtual_guest
@sshpass ||= service.request(service_path, id, :query => 'objectMask=mask[id,operatingSystem.passwords[password]]').body
@sshpass['operatingSystem']['passwords'][0]['password'] unless @sshpass['operatingSystem'].nil? or @sshpass['operatingSystem']['passwords'].empty?
end
def snapshot
# TODO: implement
end
def start
# requires :id # TODO: debug why this breaks the tests on bare metal and uncomment this
if bare_metal?
service.power_on_bare_metal_server(id)
else
service.power_on_vm(id)
end
true
end
# Hard power off
def stop
# requires :id # TODO: debug why this breaks the tests on bare metal and uncomment this
if bare_metal?
service.power_off_bare_metal_server(id)
else
service.power_off_vm(id, true)
end
true
end
# Soft power off
def shutdown
# requires :id # TODO: debug why this breaks the tests on bare metal and uncomment this
if bare_metal?
raise Fog::Errors::Error.new('Shutdown not supported on baremetal servers. Use #stop.')
else
service.power_off_vm(id, false)
end
true
end
def state
if bare_metal?
service.request(:hardware_server, "#{id}/getServerPowerState").body
else
service.request(:virtual_guest, "#{id}/getPowerState").body['name']
end
end
# Creates server
# * requires attributes: :name, :domain, and :flavor_id OR (:cpu_count && :ram && :disks)
#
# @note You should use servers.create to create servers instead calling this method directly
#
# * State Transitions
# * BUILD -> ACTIVE
# * BUILD -> ERROR (on error)
def save
raise Fog::Errors::Error.new('Resaving an existing object may create a duplicate') if persisted?
copy = self.dup
copy.pre_save
data = if bare_metal?
service.create_bare_metal_server(copy.attributes).body
else
service.create_vm(copy.attributes).body.first
end
data.delete("bare_metal")
merge_attributes(data)
true
end
def tags
attributes[:tags].map { |i| i['tag']['name'] if i['tag'] }.compact if attributes[:tags]
end
def get_active_tickets
return service.get_bare_metal_active_tickets(id).body if bare_metal?
service.get_virtual_guest_active_tickets(id).body
end
def get_users
return service.get_bare_metal_users(id).body if bare_metal?
service.get_virtual_guest_users(id).body
end
def get_upgrade_options
return service.get_bare_metal_upgrade_item_prices(id).body if bare_metal?
service.get_virtual_guest_upgrade_item_prices(id).body
end
def update(update_attributes)
raise ArgumentError if update_attributes.nil?
product_connection
prices = get_item_prices_id(update_attributes)
order = generate_upgrade_order(prices, update_attributes[:time] || update_attributes[:maintenance_window])
@product_conn.place_order(order).body
end
def generate_order_template
copy = self.dup
copy.pre_save
return service.generate_bare_metal_order_template(copy.attributes).body if bare_metal?
service.generate_virtual_guest_order_template(copy.attributes).body
end
private
def network_connection
@network_conn ||= Fog::Network.new(
:provider => :softlayer,
:softlayer_username => service.instance_variable_get(:@softlayer_username),
:softlayer_api_key => service.instance_variable_get(:@softlayer_api_key)
)
end
def product_connection
if Fog.mock?
@product_conn = Fog::Softlayer::Product.new(
:provider => :softlayer,
:softlayer_username => service.instance_variable_get(:@credentials)[:username],
:softlayer_api_key => service.instance_variable_get(:@credentials)[:api_key]
)
end
@product_conn ||= Fog::Softlayer::Product.new(
:provider => :softlayer,
:softlayer_username => service.instance_variable_get(:@softlayer_username),
:softlayer_api_key => service.instance_variable_get(:@softlayer_api_key)
)
end
def _get_private_vlan
if self.id
vlan_id = if bare_metal?
service.request(:hardware_server, "#{self.id}/get_private_vlan").body['id']
else
service.request(:virtual_guest, self.id, :query => 'objectMask=primaryBackendNetworkComponent.networkVlan').body['primaryBackendNetworkComponent']['networkVlan']['id']
end
network_connection.networks.get(vlan_id)
end
end
def _get_vlan
if self.id
vlan_id = if bare_metal?
service.request(:hardware_server, "#{self.id}/get_public_vlan").body['id']
else
service.request(:virtual_guest, self.id, :query => 'objectMask=primaryNetworkComponent.networkVlan').body['primaryNetworkComponent']['networkVlan']['id']
end
network_connection.networks.get(vlan_id)
end
end
##
# Generate mapping for use with remap_attributes
def attributes_mapping
common = {
:hourly_billing_flag => :hourlyBillingFlag,
:os_code => :operatingSystemReferenceCode,
:vlan => :primaryNetworkComponent,
:private_vlan => :primaryBackendNetworkComponent,
:key_pairs => :sshKeys,
:private_network_only => :privateNetworkOnlyFlag,
:user_data => :userData,
:provision_script => :postInstallScriptUri,
:network_components => :networkComponents,
}
conditional = if bare_metal?
{
:cpu => :processorCoreAmount,
:ram => :memoryCapacity,
:disk => :hardDrives,
:bare_metal => :bareMetalInstanceFlag
}
else
{
:cpu => :startCpus,
:ram => :maxMemory,
:disk => :blockDevices,
:image_id => :blockDeviceTemplateGroup,
:ephemeral_storage => :localDiskFlag,
}
end
common.merge(conditional)
end
def bare_metal=(set)
raise Exception, "Bare metal flag has already been set" unless @bare_metal.nil?
@bare_metal = case set
when false, 'false', 0, nil, ''
false
else
true
end
end
##
# Remove model attributes that aren't expected by the SoftLayer API
def clean_attributes
attributes.delete(:bare_metal)
attributes.delete(:flavor_id)
attributes.delete(:ephemeral_storage)
end
##
# Expand a "flavor" into cpu, ram, and disk attributes
def extract_flavor
if attributes[:flavor_id]
flavor = @service.flavors.get(attributes[:flavor_id])
flavor.nil? and Fog::Errors::Error.new("Unrecognized flavor in #{self.class}##{__method__}")
attributes[:cpu] = flavor.cpu
attributes[:ram] = flavor.ram
attributes[:disk] = flavor.disk unless attributes[:image_id]
if bare_metal?
value = flavor.disk.first['diskImage']['capacity'] < 500 ? 250 : 500
attributes[:disk] = [{'capacity'=>value}]
attributes[:ram] = attributes[:ram] / 1024 if attributes[:ram] > 64
end
end
end
def validate_attributes
requires :name, :domain, :cpu, :ram, :datacenter
requires_one :os_code, :image_id
requires_one :disk, :image_id
bare_metal? and image_id and raise ArgumentError, "Bare Metal Cloud does not support booting from Image"
end
def set_defaults
attributes[:hourly_billing_flag] = true if attributes[:hourly_billing_flag].nil?
attributes[:ephemeral_storage] = false if attributes[:ephemeral_storage].nil?
attributes[:domain] = service.softlayer_default_domain if service.softlayer_default_domain and attributes[:domain].nil?
self.datacenter = service.softlayer_default_datacenter if service.softlayer_default_datacenter and attributes[:datacenter].nil?
end
def get_item_prices_id_by_value(item_price_array, category, value)
item_prices = item_price_array.select { |item_price| item_price["categories"].find { |category_hash| category_hash["categoryCode"] == category } }
item_price = item_prices.find { |item_price| item_price['item']['capacity'] == value.to_s }
item_price.nil? ? "" : item_price["id"]
end
def get_item_prices_id(update_attributes)
item_price_array = get_upgrade_options
update_attributes.delete(:time)
update_attributes.delete(:maintenance_window)
update_attributes.map { |key, value| { :id => get_item_prices_id_by_value(item_price_array, key.to_s, value) } }
end
def bm_upgrade_order_template(value)
{
:complexType => 'SoftLayer_Container_Product_Order_Hardware_Server_Upgrade',
:hardware => [
{
:id => id
}
],
:properties => [
{
:name => 'MAINTENANCE_WINDOW_ID',
:value => value
}
]
}
end
def vm_upgrade_order_template(time)
{
:complexType => 'SoftLayer_Container_Product_Order_Virtual_Guest_Upgrade',
:virtualGuests => [
{
:id => id
}
],
:properties => [
{
:name => 'MAINTENANCE_WINDOW',
:value => (time.nil? || time.empty?) ? Time.now.iso8601 : time.iso8601
}
]
}
end
def generate_upgrade_order(prices, value)
return bm_upgrade_order_template(value).merge({ :prices => prices }) if bare_metal?
vm_upgrade_order_template(value).merge({ :prices => prices })
end
end
end
end
end
wait_for_id and fixed_configuration_preset
fixed_configuration_preset support for fast provision bare metal.
wait_for_id to help wait for bare metal id to be available.
#
# Author:: Matt Eldridge (<matt.eldridge@us.ibm.com>)
# © Copyright IBM Corporation 2014.
#
# LICENSE: MIT (http://opensource.org/licenses/MIT)
#
require 'fog/compute/models/server'
module Fog
module Compute
class Softlayer
class Server < Fog::Compute::Server
identity :id, :type => :integer
attribute :name, :aliases => 'hostname'
attribute :domain
attribute :fqdn, :aliases => 'fullyQualifiedDomainName'
attribute :cpu, :aliases => ['startCpus', 'processorCoreAmount']
attribute :ram, :aliases => ['maxMemory', 'memory']
attribute :disk, :aliases => ['blockDevices','hardDrives']
attribute :private_ip_address, :aliases => 'primaryBackendIpAddress'
attribute :public_ip_address, :aliases => 'primaryIpAddress'
attribute :flavor_id
attribute :bare_metal, :type => :boolean
attribute :os_code
attribute :image_id
attribute :ephemeral_storage, :aliases => 'localDiskFlag'
attribute :key_pairs, :aliases => 'sshKeys'
attribute :network_components
attribute :fixed_configuration_preset, :aliases => 'fixedConfigurationPreset'
# Times
attribute :created_at, :aliases => ['createDate', 'provisionDate'], :type => :time
attribute :last_verified_date, :aliases => 'lastVerifiedDate', :type => :time
attribute :metric_poll_date, :aliases => 'metricPollDate', :type => :time
attribute :modify_date, :aliases => 'modifyDate', :type => :time
# Metadata
attribute :account_id, :aliases => 'accountId', :type => :integer
attribute :datacenter, :aliases => 'datacenter'
attribute :single_tenant, :aliases => 'dedicatedAccountHostOnlyFlag'
attribute :global_identifier, :aliases => 'globalIdentifier'
attribute :hourly_billing_flag, :aliases => 'hourlyBillingFlag'
attribute :tags, :aliases => 'tagReferences'
attribute :private_network_only, :aliases => 'privateNetworkOnlyFlag'
attribute :user_data, :aliases => 'userData'
attribute :uid, :aliases => 'globalIdentifier'
attribute :provision_script, :aliases => 'postInstallScriptUri'
def initialize(attributes = {})
# Forces every request inject bare_metal parameter
raise Exception if attributes[:collection].nil? and attributes['bare_metal'].nil?
super(attributes)
set_defaults
end
def add_tags(tags)
requires :id
raise ArgumentError, "Tags argument for #{self.class.name}##{__method__} must be Array." unless tags.is_a?(Array)
tags.each do |tag|
service.tags.new(:resource_id => self.id, :name => tag).save
end
self.reload
true
end
def bare_metal?
bare_metal
end
def bare_metal
@bare_metal
end
def datacenter=(name)
name = name['name'] if name.is_a?(Hash)
attributes[:datacenter] = { :name => name }
end
def datacenter
attributes[:datacenter][:name] unless attributes[:datacenter].nil?
end
def delete_tags(tags)
requires :id
raise ArgumentError, "Tags argument for #{self.class.name}##{__method__} must be Array." unless tags.is_a?(Array)
tags.each do |tag|
service.tags.new(:resource_id => self.id, :name => tag).destroy
end
self.reload
true
end
def destroy
requires :id
request = bare_metal? ? :delete_bare_metal_server : :delete_vm
response = service.send(request, self.id)
response.body
end
def dns_name
fqdn
end
def image_id=(uuid)
attributes[:image_id] = {:globalIdentifier => uuid}
end
def image_id
attributes[:image_id][:globalIdentifier] unless attributes[:image_id].nil?
end
def name=(set)
attributes[:hostname] = set
end
def name
attributes[:hostname]
end
def pre_save
extract_flavor
self.bare_metal = true if attributes[:fixed_configuration_preset] and not bare_metal?
validate_attributes
if self.vlan
attributes[:vlan] = { :networkVlan => { :id => self.vlan.id } }
end
if self.private_vlan
attributes[:private_vlan] = { :networkVlan => { :id => self.private_vlan.id } }
end
if self.key_pairs
attributes[:key_pairs].map! { |key| { :id => key.id } }
end
if self.network_components
self.network_components = self.network_components.map do |component|
component[:maxSpeed] = component.delete(:speed) if component[:speed]
component[:maxSpeed] = component.delete(:max_speed) if component[:max_speed]
component
end
end
if attributes[:fixed_configuration_preset].is_a? String
attributes[:fixedConfigurationPreset] = {:keyName => attributes.delete(:fixed_configuration_preset)}
end
remap_attributes(attributes, attributes_mapping)
clean_attributes
end
def private_ip # maintain backward compatibility with <0.3.13
private_ip_address
end
def public_ip # maintain backward compatibility with <0.3.13
public_ip_address
end
def os_code
attributes['operatingSystem']['softwareLicense']['softwareDescription']['referenceCode'] if attributes['operatingSystem']
end
def private_vlan
attributes[:private_vlan] ||= _get_private_vlan
end
def private_vlan=(value)
unless value.is_a?(Integer) or value.is_a?(Fog::Network::Softlayer::Network)
raise ArgumentError, "vlan argument for #{self.class.name}##{__method__} must be Integer or Fog::Network::Softlayer::Network."
end
value = network_connection.networks.get(value) if value.is_a?(Integer)
attributes[:private_vlan] = value
end
# reload the OS on a server (method name reload was already taken)
def relaunch!
requires :id
body = [ "FORCE", {}]
body[1][:sshKeyIds] = key_pairs.map {|kp| kp.id} unless key_pairs.empty?
type = bare_metal? ? :hardware_server : :virtual_guest
status = service.request(type, "#{id}/reloadOperatingSystem", :body => body, :http_method => :post).status
wait_for { not ready? } # block until the relaunch has begun
[200, 201].include?(status)
end
def key_pairs
attributes[:key_pairs]
end
def key_pairs=(keys)
raise ArgumentError, "Argument #{local_variables.first.to_s} for #{self.class.name}##{__method__} must be Array." unless keys.is_a?(Array)
attributes[:key_pairs] = []
keys.map do |key|
## This was nice but causing an intolerable number of requests on an account with lots of keys.
## ToDo: something better...
#key = self.symbolize_keys(key) if key.is_a?(Hash)
#unless key.is_a?(Fog::Compute::Softlayer::KeyPair) or (key.is_a?(Hash) and key[:id])
# raise ArgumentError, "Elements of keys array for #{self.class.name}##{__method__} must be a Hash with key 'id', or Fog::Compute::Softlayer::KeyPair"
#end
#key = service.key_pairs.get(key[:id]) unless key.is_a?(Fog::Compute::Softlayer::KeyPair)
attributes[:key_pairs] << key
end
end
def vlan
attributes[:vlan] ||= _get_vlan
end
def vlan=(value)
unless value.is_a?(Integer) or value.is_a?(Fog::Network::Softlayer::Network)
raise ArgumentError, "vlan argument for #{self.class.name}##{__method__} must be Integer or Fog::Network::Softlayer::Network."
end
value = network_connection.networks.get(value) if value.is_a?(Integer)
attributes[:vlan] = value
end
def ram=(set)
if set.is_a?(Array) and set.first['hardwareComponentModel']
set = 1024 * set.first['hardwareComponentModel']['capacity'].to_i
end
attributes[:ram] = set
end
# @params value [String]
def user_data=(value)
attributes[:user_data] = [{'value' => value}]
end
def user_data
attributes[:user_data]
end
def provision_script=(value)
attributes[:provision_script] = value
end
def provision_script
attributes[:provision_script]
end
def network_components
if id
(public_network_components << private_network_components).flatten
else
attributes[:network_components]
end
end
def public_network_components
if attributes['frontendNetworkComponents']
attributes['frontendNetworkComponents'].map { |n| Fog::Compute::Softlayer::NetworkComponent.new(n) }
else
[]
end
end
def private_network_components
if attributes['backendNetworkComponents']
attributes['backendNetworkComponents'].map { |n| Fog::Compute::Softlayer::NetworkComponent.new(n) }
else
[]
end
end
def ready?
begin
if bare_metal?
state == "on"
else
state == "Running"
end
rescue Excon::Errors::InternalServerError => e
false
end
end
def reboot(use_hard_reboot = true)
# requires :id # TODO: debug why this breaks the tests on bare metal and uncomment this
if bare_metal?
service.reboot_bare_metal_server(id, use_hard_reboot)
else
service.reboot_vm(id, use_hard_reboot)
end
true
end
def ssh_password
requires :id
service_path = bare_metal? ? :hardware_server : :virtual_guest
@sshpass ||= service.request(service_path, id, :query => 'objectMask=mask[id,operatingSystem.passwords[password]]').body
@sshpass['operatingSystem']['passwords'][0]['password'] unless @sshpass['operatingSystem'].nil? or @sshpass['operatingSystem']['passwords'].empty?
end
def snapshot
# TODO: implement
end
def start
# requires :id # TODO: debug why this breaks the tests on bare metal and uncomment this
if bare_metal?
service.power_on_bare_metal_server(id)
else
service.power_on_vm(id)
end
true
end
# Hard power off
def stop
# requires :id # TODO: debug why this breaks the tests on bare metal and uncomment this
if bare_metal?
service.power_off_bare_metal_server(id)
else
service.power_off_vm(id, true)
end
true
end
# Soft power off
def shutdown
# requires :id # TODO: debug why this breaks the tests on bare metal and uncomment this
if bare_metal?
raise Fog::Errors::Error.new('Shutdown not supported on baremetal servers. Use #stop.')
else
service.power_off_vm(id, false)
end
true
end
def state
if bare_metal?
service.request(:hardware_server, "#{id}/getServerPowerState").body
else
service.request(:virtual_guest, "#{id}/getPowerState").body['name']
end
end
# Creates server
# * requires attributes: :name, :domain, and :flavor_id OR (:cpu_count && :ram && :disks)
#
# @note You should use servers.create to create servers instead calling this method directly
#
# * State Transitions
# * BUILD -> ACTIVE
# * BUILD -> ERROR (on error)
def save
raise Fog::Errors::Error.new('Resaving an existing object may create a duplicate') if persisted?
copy = self.dup
copy.pre_save
data = if copy.bare_metal?
service.create_bare_metal_server(copy.attributes).body
else
service.create_vm(copy.attributes).body.first
end
data.delete("bare_metal")
merge_attributes(data)
true
end
def tags
attributes[:tags].map { |i| i['tag']['name'] if i['tag'] }.compact if attributes[:tags]
end
def get_active_tickets
return service.get_bare_metal_active_tickets(id).body if bare_metal?
service.get_virtual_guest_active_tickets(id).body
end
def get_users
return service.get_bare_metal_users(id).body if bare_metal?
service.get_virtual_guest_users(id).body
end
def get_upgrade_options
return service.get_bare_metal_upgrade_item_prices(id).body if bare_metal?
service.get_virtual_guest_upgrade_item_prices(id).body
end
def update(update_attributes)
raise ArgumentError if update_attributes.nil?
product_connection
prices = get_item_prices_id(update_attributes)
order = generate_upgrade_order(prices, update_attributes[:time] || update_attributes[:maintenance_window])
@product_conn.place_order(order).body
end
def generate_order_template
copy = self.dup
copy.pre_save
return service.generate_bare_metal_order_template(copy.attributes).body if copy.bare_metal?
service.generate_virtual_guest_order_template(copy.attributes).body
end
def wait_for_id(timeout=14400, delay=30)
# Cannot use self.wait_for because it calls reload which requires
# self.id which is not initially available for bare metal.
filterStr = Fog::JSON.encode({
"hardware" => {
"hostname" => {
"operation" => self.name,
},
"domain" => {
"operation" => self.domain,
},
"globalIdentifier" => {
"operation" => self.uid,
},
}
})
Fog.wait_for(timeout, delay) do
res = service.request(:account, 'getHardware', :query => {
:objectMask => 'mask[id,fullyQualifiedDomainName,provisionDate,hardwareStatus,lastTransaction[elapsedSeconds,transactionStatus[friendlyName]],operatingSystem[id,passwords[password,username]]]',
:objectFilter => filterStr,
})
server = res.body.first
yield server if block_given?
if server and server["provisionDate"]
attributes[:id] = server['id']
true
else
false
end
end
self.reload
true
end
private
def network_connection
@network_conn ||= Fog::Network.new(
:provider => :softlayer,
:softlayer_username => service.instance_variable_get(:@softlayer_username),
:softlayer_api_key => service.instance_variable_get(:@softlayer_api_key)
)
end
def product_connection
if Fog.mock?
@product_conn = Fog::Softlayer::Product.new(
:provider => :softlayer,
:softlayer_username => service.instance_variable_get(:@credentials)[:username],
:softlayer_api_key => service.instance_variable_get(:@credentials)[:api_key]
)
end
@product_conn ||= Fog::Softlayer::Product.new(
:provider => :softlayer,
:softlayer_username => service.instance_variable_get(:@softlayer_username),
:softlayer_api_key => service.instance_variable_get(:@softlayer_api_key)
)
end
def _get_private_vlan
if self.id
vlan_id = if bare_metal?
service.request(:hardware_server, "#{self.id}/get_private_vlan").body['id']
else
service.request(:virtual_guest, self.id, :query => 'objectMask=primaryBackendNetworkComponent.networkVlan').body['primaryBackendNetworkComponent']['networkVlan']['id']
end
network_connection.networks.get(vlan_id)
end
end
def _get_vlan
if self.id
vlan_id = if bare_metal?
service.request(:hardware_server, "#{self.id}/get_public_vlan").body['id']
else
service.request(:virtual_guest, self.id, :query => 'objectMask=primaryNetworkComponent.networkVlan').body['primaryNetworkComponent']['networkVlan']['id']
end
network_connection.networks.get(vlan_id)
end
end
##
# Generate mapping for use with remap_attributes
def attributes_mapping
common = {
:hourly_billing_flag => :hourlyBillingFlag,
:os_code => :operatingSystemReferenceCode,
:vlan => :primaryNetworkComponent,
:private_vlan => :primaryBackendNetworkComponent,
:key_pairs => :sshKeys,
:private_network_only => :privateNetworkOnlyFlag,
:user_data => :userData,
:provision_script => :postInstallScriptUri,
:network_components => :networkComponents,
}
conditional = if bare_metal?
{
:cpu => :processorCoreAmount,
:ram => :memoryCapacity,
:disk => :hardDrives,
:bare_metal => :bareMetalInstanceFlag,
:fixed_configuration_preset => :fixedConfigurationPreset,
}
else
{
:cpu => :startCpus,
:ram => :maxMemory,
:disk => :blockDevices,
:image_id => :blockDeviceTemplateGroup,
:ephemeral_storage => :localDiskFlag,
}
end
common.merge(conditional)
end
def bare_metal=(set)
return @bare_metal if set == @bare_metal
raise Exception, "Bare metal flag has already been set" unless @bare_metal.nil?
@bare_metal = case set
when false, 'false', 0, nil, ''
attributes[:bare_metal] = false
else
attributes[:bare_metal] = true
end
end
##
# Remove model attributes that aren't expected by the SoftLayer API
def clean_attributes
attributes.delete(:bare_metal)
attributes.delete(:flavor_id)
attributes.delete(:ephemeral_storage)
attributes.delete(:tags) if bare_metal?
end
##
# Expand a "flavor" into cpu, ram, and disk attributes
def extract_flavor
if attributes[:flavor_id]
flavor = @service.flavors.get(attributes[:flavor_id])
flavor.nil? and Fog::Errors::Error.new("Unrecognized flavor in #{self.class}##{__method__}")
attributes[:cpu] = flavor.cpu
attributes[:ram] = flavor.ram
attributes[:disk] = flavor.disk unless attributes[:image_id]
if bare_metal?
value = flavor.disk.first['diskImage']['capacity'] < 500 ? 250 : 500
attributes[:disk] = [{'capacity'=>value}]
attributes[:ram] = attributes[:ram] / 1024 if attributes[:ram] > 64
end
end
end
def validate_attributes
requires :name, :domain, :datacenter
if attributes[:fixed_configuration_preset]
requires :os_code
else
requires :cpu, :ram
requires_one :os_code, :image_id
requires_one :disk, :image_id
end
bare_metal? and image_id and raise ArgumentError, "Bare Metal Cloud does not support booting from Image"
end
def set_defaults
attributes[:hourly_billing_flag] = true if attributes[:hourly_billing_flag].nil?
attributes[:ephemeral_storage] = false if attributes[:ephemeral_storage].nil?
attributes[:domain] = service.softlayer_default_domain if service.softlayer_default_domain and attributes[:domain].nil?
self.datacenter = service.softlayer_default_datacenter if service.softlayer_default_datacenter and attributes[:datacenter].nil?
end
def get_item_prices_id_by_value(item_price_array, category, value)
item_prices = item_price_array.select { |item_price| item_price["categories"].find { |category_hash| category_hash["categoryCode"] == category } }
item_price = item_prices.find { |item_price| item_price['item']['capacity'] == value.to_s }
item_price.nil? ? "" : item_price["id"]
end
def get_item_prices_id(update_attributes)
item_price_array = get_upgrade_options
update_attributes.delete(:time)
update_attributes.delete(:maintenance_window)
update_attributes.map { |key, value| { :id => get_item_prices_id_by_value(item_price_array, key.to_s, value) } }
end
def bm_upgrade_order_template(value)
{
:complexType => 'SoftLayer_Container_Product_Order_Hardware_Server_Upgrade',
:hardware => [
{
:id => id
}
],
:properties => [
{
:name => 'MAINTENANCE_WINDOW_ID',
:value => value
}
]
}
end
def vm_upgrade_order_template(time)
{
:complexType => 'SoftLayer_Container_Product_Order_Virtual_Guest_Upgrade',
:virtualGuests => [
{
:id => id
}
],
:properties => [
{
:name => 'MAINTENANCE_WINDOW',
:value => (time.nil? || time.empty?) ? Time.now.iso8601 : time.iso8601
}
]
}
end
def generate_upgrade_order(prices, value)
return bm_upgrade_order_template(value).merge({ :prices => prices }) if bare_metal?
vm_upgrade_order_template(value).merge({ :prices => prices })
end
end
end
end
end
|
module Sentry
module DelayedJob
VERSION = "0.1.0"
end
end
release: 4.3.0
module Sentry
module DelayedJob
VERSION = "4.3.0"
end
end
|
# frozen_string_literal: true
module Bullet
module Generators
class InstallGenerator < ::Rails::Generators::Base
desc <<~DESC
Description:
Enable bullet in development/test for your application.
DESC
def enable_in_development
environment(nil, env: 'development') do
<<-"FILE".strip
config.after_initialize do
Bullet.enable = true
Bullet.alert = true
Bullet.bullet_logger = true
Bullet.console = true
# Bullet.growl = true
Bullet.rails_logger = true
Bullet.add_footer = true
end
FILE
end
say 'Enabled bullet in config/environments/development.rb'
end
def enable_in_test
if yes?('Would you like to enable bullet in test environment? (y/n)')
environment(nil, env: 'test') do
<<-"FILE".strip
config.after_initialize do
Bullet.enable = true
Bullet.bullet_logger = true
Bullet.raise = true # raise an error if n+1 query occurs
end
FILE
end
say 'Enabled bullet in config/environments/test.rb'
end
end
end
end
end
Auto corrected by following Format Ruby Code
# frozen_string_literal: true
module Bullet
module Generators
class InstallGenerator < ::Rails::Generators::Base
desc <<~DESC
Description:
Enable bullet in development/test for your application.
DESC
def enable_in_development
environment(nil, env: 'development') do
<<-"FILE"
config.after_initialize do
Bullet.enable = true
Bullet.alert = true
Bullet.bullet_logger = true
Bullet.console = true
# Bullet.growl = true
Bullet.rails_logger = true
Bullet.add_footer = true
end
FILE
.strip
end
say 'Enabled bullet in config/environments/development.rb'
end
def enable_in_test
if yes?('Would you like to enable bullet in test environment? (y/n)')
environment(nil, env: 'test') do
<<-"FILE"
config.after_initialize do
Bullet.enable = true
Bullet.bullet_logger = true
Bullet.raise = true # raise an error if n+1 query occurs
end
FILE
.strip
end
say 'Enabled bullet in config/environments/test.rb'
end
end
end
end
end
|
#
# rack_app.rb
# github_post_commit_server
#
# Example Rack app for http://github.com/guides/post-receive-hooks
#
# Created by James Tucker on 2008-05-11.
# Copyright 2008 James Tucker
#
require 'rubygems'
require 'rack'
require 'json'
require 'lighthouse-api'
module GithubPostReceiveServer
class RackApp
GO_AWAY_COMMENT = "These are not the droids you are looking for."
THANK_YOU_COMMENT = "You can go about your business. Move along."
# This is what you get if you make a request that isn't a POST with a
# payload parameter.
def rude_comment
@res.write GO_AWAY_COMMENT
end
# Does what it says on the tin. By default, not much, it just prints the
# received payload.
def handle_request
payload = @req.POST["payload"]
return rude_comment if payload.nil?
payload = JSON.parse(payload)
# TODO: Put parameters in to ENV or something
Lighthouse.token = '78c6ca25c0c3d631d099c8e64a73822a6a5865d3'
Lighthouse.account = 'gameclay'
# Iterate the commits and check for search paths
# TODO: This is not the most expandable thing in the world
payload['commits'].each do |commit|
if commit['message'] =~ /Merge branch '.*\/bug-(\d*)'/
begin
# TODO: Put project ID into ENV or something
ticket = Lighthouse::Ticket.find($1, :params => { :project_id => 47141 })
ticket.state = 'fixed'
puts "Marking ticket #{$1} fixed (#{commit['message']})" if ticket.save
rescue
puts "Error updating ticket #{$1} (#{commit['message']})"
end
end
end
@res.write THANK_YOU_COMMENT
end
# Call is the entry point for all rack apps.
def call(env)
@req = Rack::Request.new(env)
@res = Rack::Response.new
handle_request
@res.finish
end
end
end
Ticket resolution now includes message with link to GitHub commit URL.
#
# rack_app.rb
# github_post_commit_server
#
# Example Rack app for http://github.com/guides/post-receive-hooks
#
# Created by James Tucker on 2008-05-11.
# Copyright 2008 James Tucker
#
require 'rubygems'
require 'rack'
require 'json'
require 'lighthouse-api'
module GithubPostReceiveServer
class RackApp
GO_AWAY_COMMENT = "These are not the droids you are looking for."
THANK_YOU_COMMENT = "You can go about your business. Move along."
# This is what you get if you make a request that isn't a POST with a
# payload parameter.
def rude_comment
@res.write GO_AWAY_COMMENT
end
# Does what it says on the tin. By default, not much, it just prints the
# received payload.
def handle_request
payload = @req.POST["payload"]
return rude_comment if payload.nil?
payload = JSON.parse(payload)
# Authenticate with the Lighthouse project
begin
# TODO: Put parameters in to ENV or something
Lighthouse.account = 'gameclay'
Lighthouse.token = '69b8ab518cdf61624b41efe429d796e08e0a288d'
rescue
return "Error authenticating Lighthouse"
end
# Iterate the commits and check for workflow events
# TODO: This is not the most expandable thing in the world
payload['commits'].each do |commit|
# Look for bug fixes
if commit['message'] =~ /Merge branch '.*\/bug-(\d*)'/
begin
# TODO: Put project ID, "resolve state", and message into ENV or something
ticket = Lighthouse::Ticket.find($1, :params => { :project_id => 47141 })
ticket.state = 'resolved'
ticket.body = "Fixed by #{commit['author']['name']}.\n#{commit['url']}"
puts "Marking ticket #{$1} fixed (#{commit['message']})" if ticket.save
rescue
puts "Error updating ticket #{$1} (#{commit['message']})"
end
end
end
@res.write THANK_YOU_COMMENT
end
# Call is the entry point for all rack apps.
def call(env)
@req = Rack::Request.new(env)
@res = Rack::Response.new
handle_request
@res.finish
end
end
end |
require_dependency 'async'
module GlobalRegistryMeasurementMethods
include Async
extend ActiveSupport::Concern
included do
#after_commit :push_to_global_registry
#after_destroy :delete_from_global_registry
end
def delete_from_global_registry
async(:async_push_to_global_registry)
end
def async_delete_from_global_registry(registry_id)
begin
GlobalRegistry::Entity.delete(registry_id)
rescue RestClient::ResourceNotFound
# If the record doesn't exist, we don't care
end
end
# Define default push method
def push_to_global_registry
async(:async_push_to_global_registry)
end
def async_push_to_global_registry
return unless activity
activity.async_push_to_global_registry unless activity.global_registry_id.present?
detailed_mappings = self.class.gr_measurement_types
measurements = []
detailed_mappings.each do |column_name, measurement_type|
total = activity.statistics.where("periodBegin >= ? AND periodBegin <= ?", periodBegin.beginning_of_month, periodBegin.end_of_month)
.sum(column_name)
if total > 0
month = periodBegin.beginning_of_month.strftime("%Y-%m")
measurements << {
measurement_type_id: measurement_type['id'],
related_entity_id: activity.global_registry_id,
period: month,
value: total
}
end
end
# raise ({measurements: measurements}).inspect
GlobalRegistry::Measurement.post(measurements: measurements)
end
def update_in_global_registry
GlobalRegistry::Entity.put(global_registry_id, {entity: attributes_to_push})
end
def create_in_global_registry(parent_id = nil)
entity = GlobalRegistry::Entity.post(entity: {self.class.global_registry_entity_type_name => attributes_to_push.merge({client_integration_id: id}), parent_id: parent_id})
entity = entity['entity']
update_column(:global_registry_id, entity[self.class.global_registry_entity_type_name]['id'])
end
module ClassMethods
def gr_measurement_types(measurement_type_mappings = gr_measurement_type_mappings, related_entity_type_id = gr_related_entity_type_id, category = gr_category, unit = gr_unit, description = '', frequency = 'monthly')
detailed_mappings = {}
measurement_type_mappings.each do |column_name, type_name|
gr_type = GlobalRegistry::MeasurementType.get({'filters[name]' => type_name})['measurement_types'].first
unless gr_type
gr_type = GlobalRegistry::MeasurementType.post(measurement_type: {
name: type_name,
related_entity_type_id: related_entity_type_id,
category: category,
unit: unit,
description: description,
frequency: frequency
})
end
detailed_mappings[column_name] = gr_type
end
detailed_mappings
end
end
end
Don't try to push blank measurements
require_dependency 'async'
module GlobalRegistryMeasurementMethods
include Async
extend ActiveSupport::Concern
included do
#after_commit :push_to_global_registry
#after_destroy :delete_from_global_registry
end
def delete_from_global_registry
async(:async_push_to_global_registry)
end
def async_delete_from_global_registry(registry_id)
begin
GlobalRegistry::Entity.delete(registry_id)
rescue RestClient::ResourceNotFound
# If the record doesn't exist, we don't care
end
end
# Define default push method
def push_to_global_registry
async(:async_push_to_global_registry)
end
def async_push_to_global_registry
return unless activity
activity.async_push_to_global_registry unless activity.global_registry_id.present?
detailed_mappings = self.class.gr_measurement_types
measurements = []
detailed_mappings.each do |column_name, measurement_type|
total = activity.statistics.where("periodBegin >= ? AND periodBegin <= ?", periodBegin.beginning_of_month, periodBegin.end_of_month)
.sum(column_name)
if total > 0
month = periodBegin.beginning_of_month.strftime("%Y-%m")
measurements << {
measurement_type_id: measurement_type['id'],
related_entity_id: activity.global_registry_id,
period: month,
value: total
}
end
end
GlobalRegistry::Measurement.post(measurements: measurements) if measurements.present?
end
def update_in_global_registry
GlobalRegistry::Entity.put(global_registry_id, {entity: attributes_to_push})
end
def create_in_global_registry(parent_id = nil)
entity = GlobalRegistry::Entity.post(entity: {self.class.global_registry_entity_type_name => attributes_to_push.merge({client_integration_id: id}), parent_id: parent_id})
entity = entity['entity']
update_column(:global_registry_id, entity[self.class.global_registry_entity_type_name]['id'])
end
module ClassMethods
def gr_measurement_types(measurement_type_mappings = gr_measurement_type_mappings, related_entity_type_id = gr_related_entity_type_id, category = gr_category, unit = gr_unit, description = '', frequency = 'monthly')
detailed_mappings = {}
measurement_type_mappings.each do |column_name, type_name|
gr_type = GlobalRegistry::MeasurementType.get({'filters[name]' => type_name})['measurement_types'].first
unless gr_type
gr_type = GlobalRegistry::MeasurementType.post(measurement_type: {
name: type_name,
related_entity_type_id: related_entity_type_id,
category: category,
unit: unit,
description: description,
frequency: frequency
})
end
detailed_mappings[column_name] = gr_type
end
detailed_mappings
end
end
end
|
require 'json'
module Google
module Visualization
module Formatter
##
# = JSON Formatter
#
# == Description
#
# Serializes a DataTable to the JavaScript Object Notation (JSON).
#
class JSON
private_class_method :new
##
# Generates a JSON string representation of a data table.
#
def self.render(data_table)
{
'cols' => render_columns(data_table),
'rows' => render_rows(data_table) #,
# 'p' => render_custom_properties(data_table)
}.to_json
end
private
##
# Generates a JSON string representation of a multiple columns.
#
def self.render_columns(data_table)
data_table.columns.map{|c| render_column(c)}
end
##
# Generates a JSON string representation of a single column.
#
def self.render_column(column)
result = {}
%w(id label type pattern).each do |field|
result[field] = column.send(field).to_s if column.send(field)
end
# if column.custom_properties_count > 0
# result["p"] = render_custom_properties(column)
# end
result
end
##
# Generates a JSON string representation of multiple rows.
#
def self.render_rows(data_table)
data_table.rows.map{|r| render_row(r, data_table)}
end
##
# Generates a JSON string representation of a single row.
#
def self.render_row(row, data_table)
result = {'c' => []}
i = 0
result = {'c' => row.cells.map { |c|
c = render_cell(c, data_table.column(i).type)
i+=1
c
}
}
result
end
##
# Generates a JSON string representation of a cell.
#
def self.render_cell(cell, type=nil)
if (type == DataType::DATETIME || type == DataType::DATE)
d = cell.value
if d.instance_of?(Date)
result = { 'v' => "Date(#{d.year}, #{d.month}, #{d.day})"}
elsif d.instance_of?(DateTime)
result = { 'v' => "Date(#{d.year}, #{d.month}, #{d.day}, #{d.hour}, #{d.minute}, #{d.second})"}
end
else
result = {'v' => cell.value}
result['f'] = cell.formatted_value if cell.formatted_value
# result['p'] = render_custom_properties(row) if row.custom_properties_count > 0
end
result
end
##
# Generates a JSON string representation of the custom properties.
#
#def self.render_custom_properties(data_element)
# result = data_element.custom_properties.collect { |name,value|
# "#{name.to_s}:#{rb_to_js(value, DataType::STRING)}"
# }
# "p:{#{result.join(',')}}"
#end
end
end
end
end
Fix wrong month in date conversion
require 'json'
module Google
module Visualization
module Formatter
##
# = JSON Formatter
#
# == Description
#
# Serializes a DataTable to the JavaScript Object Notation (JSON).
#
class JSON
private_class_method :new
##
# Generates a JSON string representation of a data table.
#
def self.render(data_table)
{
'cols' => render_columns(data_table),
'rows' => render_rows(data_table) #,
# 'p' => render_custom_properties(data_table)
}.to_json
end
private
##
# Generates a JSON string representation of a multiple columns.
#
def self.render_columns(data_table)
data_table.columns.map{|c| render_column(c)}
end
##
# Generates a JSON string representation of a single column.
#
def self.render_column(column)
result = {}
%w(id label type pattern).each do |field|
result[field] = column.send(field).to_s if column.send(field)
end
# if column.custom_properties_count > 0
# result["p"] = render_custom_properties(column)
# end
result
end
##
# Generates a JSON string representation of multiple rows.
#
def self.render_rows(data_table)
data_table.rows.map{|r| render_row(r, data_table)}
end
##
# Generates a JSON string representation of a single row.
#
def self.render_row(row, data_table)
result = {'c' => []}
i = 0
result = {'c' => row.cells.map { |c|
c = render_cell(c, data_table.column(i).type)
i+=1
c
}
}
result
end
##
# Generates a JSON string representation of a cell.
#
def self.render_cell(cell, type=nil)
if (type == DataType::DATETIME || type == DataType::DATE)
d = cell.value
if d.instance_of?(Date)
result = { 'v' => "Date(#{d.year}, #{d.month-1}, #{d.day})"}
elsif d.instance_of?(DateTime)
result = { 'v' => "Date(#{d.year}, #{d.month-1}, #{d.day}, #{d.hour}, #{d.minute}, #{d.second})"}
end
else
result = {'v' => cell.value}
result['f'] = cell.formatted_value if cell.formatted_value
# result['p'] = render_custom_properties(row) if row.custom_properties_count > 0
end
result
end
##
# Generates a JSON string representation of the custom properties.
#
#def self.render_custom_properties(data_element)
# result = data_element.custom_properties.collect { |name,value|
# "#{name.to_s}:#{rb_to_js(value, DataType::STRING)}"
# }
# "p:{#{result.join(',')}}"
#end
end
end
end
end
|
# frozen_string_literal: true
module Hanami
class Application
module Routing
# Hanami application router endpoint resolver
#
# @since 2.0.0
class Resolver
require_relative "resolver/trie"
# @since 2.0.0
class NotCallableEndpointError < StandardError
def initialize(endpoint)
super("#{endpoint.inspect} is not compatible with Rack. Please make sure it implements #call.")
end
end
# @api private
# @since 2.0.0
def initialize(slices:, inflector:)
@slices = slices
@inflector = inflector
@slices_registry = Trie.new
end
# @api private
# @since 2.0.0
#
# rubocop:disable Metrics/MethodLength
def call(path, identifier)
endpoint =
case identifier
when String
resolve_string_identifier(path, identifier)
when Class
identifier.respond_to?(:call) ? identifier : identifier.new
else
identifier
end
unless endpoint.respond_to?(:call) # rubocop:disable Style/IfUnlessModifier
raise NotCallableEndpointError.new(endpoint)
end
endpoint
end
# rubocop:enable Metrics/MethodLength
# @api private
# @since 2.0.0
def register_slice_at_path(name, path)
slices_registry.add(path, name)
end
private
# @api private
# @since 2.0.0
attr_reader :slices
# @api private
# @since 2.0.0
attr_reader :inflector
# @api private
# @since 2.0.0
attr_reader :slices_registry
# @api private
# @since 2.0.0
def resolve_string_identifier(path, identifier)
slice_name = slices_registry.find(path) or raise "missing slice for #{path.inspect} (#{identifier.inspect})"
slice = slices[slice_name]
action_key = "actions.#{identifier}"
slice[action_key]
end
end
end
end
end
Give visibility to the endpoint namespace key (#1112)
Instead of having it buried within the implementation of a private
method, we give it the prominence it deserves by having it as a class
constant.
We took the occasion to rename `slices_registry` to `slice_registry` to
make the naming (singular) consistent with the new constant name.
# frozen_string_literal: true
module Hanami
class Application
module Routing
# Hanami application router endpoint resolver
#
# @since 2.0.0
class Resolver
ENDPOINT_KEY_NAMESPACE = "actions"
require_relative "resolver/trie"
# @since 2.0.0
class NotCallableEndpointError < StandardError
def initialize(endpoint)
super("#{endpoint.inspect} is not compatible with Rack. Please make sure it implements #call.")
end
end
# @api private
# @since 2.0.0
def initialize(slices:, inflector:)
@slices = slices
@inflector = inflector
@slice_registry = Trie.new
end
# @api private
# @since 2.0.0
#
# rubocop:disable Metrics/MethodLength
def call(path, identifier)
endpoint =
case identifier
when String
resolve_string_identifier(path, identifier)
when Class
identifier.respond_to?(:call) ? identifier : identifier.new
else
identifier
end
unless endpoint.respond_to?(:call) # rubocop:disable Style/IfUnlessModifier
raise NotCallableEndpointError.new(endpoint)
end
endpoint
end
# rubocop:enable Metrics/MethodLength
# @api private
# @since 2.0.0
def register_slice_at_path(name, path)
slice_registry.add(path, name)
end
private
# @api private
# @since 2.0.0
attr_reader :slices
# @api private
# @since 2.0.0
attr_reader :inflector
# @api private
# @since 2.0.0
attr_reader :slice_registry
# @api private
# @since 2.0.0
def resolve_string_identifier(path, identifier)
slice_name = slice_registry.find(path) or raise "missing slice for #{path.inspect} (#{identifier.inspect})"
slice = slices[slice_name]
endpoint_key = "#{ENDPOINT_KEY_NAMESPACE}.#{identifier}"
slice[endpoint_key]
end
end
end
end
end
|
module KnapsackPro
module Adapters
class RSpecAdapter < BaseAdapter
TEST_DIR_PATTERN = 'spec/**{,/*/**}/*_spec.rb'
def self.test_path(example_group)
if defined?(Turnip) && Turnip::VERSION.to_i < 2
unless example_group[:turnip]
until example_group[:parent_example_group].nil?
example_group = example_group[:parent_example_group]
end
end
else
until example_group[:parent_example_group].nil?
example_group = example_group[:parent_example_group]
end
end
example_group[:file_path]
end
def bind_time_tracker
::RSpec.configure do |config|
config.around(:each) do |example|
current_example_group =
if ::RSpec.respond_to?(:current_example)
::RSpec.current_example.metadata[:example_group]
else
example.metadata
end
current_test_path = KnapsackPro::Adapters::RSpecAdapter.test_path(current_example_group)
KnapsackPro.tracker.current_test_path =
if KnapsackPro::Config::Env.rspec_split_by_test_examples? && KnapsackPro::Adapters::RSpecAdapter.slow_test_file?(RSpecAdapter, current_test_path)
example.id
else
current_test_path
end
KnapsackPro.tracker.start_timer
example.run
KnapsackPro.tracker.stop_timer
end
config.after(:suite) do
KnapsackPro.logger.debug(KnapsackPro::Presenter.global_time)
end
end
end
def bind_save_report
::RSpec.configure do |config|
config.after(:suite) do
KnapsackPro::Report.save
end
end
end
def bind_before_queue_hook
::RSpec.configure do |config|
config.before(:suite) do
unless ENV['KNAPSACK_PRO_BEFORE_QUEUE_HOOK_CALLED']
ENV['KNAPSACK_PRO_BEFORE_QUEUE_HOOK_CALLED'] = 'true'
KnapsackPro::Hooks::Queue.call_before_queue
end
end
end
end
end
# This is added to provide backwards compatibility
# In case someone is doing switch from knapsack gem to the knapsack_pro gem
# and didn't notice the class name changed
class RspecAdapter < RSpecAdapter
end
end
end
Use ::Turnip instead of Turnip
module KnapsackPro
module Adapters
class RSpecAdapter < BaseAdapter
TEST_DIR_PATTERN = 'spec/**{,/*/**}/*_spec.rb'
def self.test_path(example_group)
if defined?(::Turnip) && ::Turnip::VERSION.to_i < 2
unless example_group[:turnip]
until example_group[:parent_example_group].nil?
example_group = example_group[:parent_example_group]
end
end
else
until example_group[:parent_example_group].nil?
example_group = example_group[:parent_example_group]
end
end
example_group[:file_path]
end
def bind_time_tracker
::RSpec.configure do |config|
config.around(:each) do |example|
current_example_group =
if ::RSpec.respond_to?(:current_example)
::RSpec.current_example.metadata[:example_group]
else
example.metadata
end
current_test_path = KnapsackPro::Adapters::RSpecAdapter.test_path(current_example_group)
KnapsackPro.tracker.current_test_path =
if KnapsackPro::Config::Env.rspec_split_by_test_examples? && KnapsackPro::Adapters::RSpecAdapter.slow_test_file?(RSpecAdapter, current_test_path)
example.id
else
current_test_path
end
KnapsackPro.tracker.start_timer
example.run
KnapsackPro.tracker.stop_timer
end
config.after(:suite) do
KnapsackPro.logger.debug(KnapsackPro::Presenter.global_time)
end
end
end
def bind_save_report
::RSpec.configure do |config|
config.after(:suite) do
KnapsackPro::Report.save
end
end
end
def bind_before_queue_hook
::RSpec.configure do |config|
config.before(:suite) do
unless ENV['KNAPSACK_PRO_BEFORE_QUEUE_HOOK_CALLED']
ENV['KNAPSACK_PRO_BEFORE_QUEUE_HOOK_CALLED'] = 'true'
KnapsackPro::Hooks::Queue.call_before_queue
end
end
end
end
end
# This is added to provide backwards compatibility
# In case someone is doing switch from knapsack gem to the knapsack_pro gem
# and didn't notice the class name changed
class RspecAdapter < RSpecAdapter
end
end
end
|
require "logstash/namespace"
require "logstash/outputs/base"
require "stud/buffer"
# This output lets you store logs in elasticsearch.
#
# This plugin uses the HTTP/REST interface to ElasticSearch, which usually
# lets you use any version of elasticsearch server. It is known to work
# with elasticsearch %ELASTICSEARCH_VERSION%
#
# You can learn more about elasticsearch at <http://elasticsearch.org>
class LogStash::Outputs::ElasticSearchHTTP < LogStash::Outputs::Base
include Stud::Buffer
config_name "elasticsearch_http"
milestone 2
# The index to write events to. This can be dynamic using the %{foo} syntax.
# The default value will partition your indices by day so you can more easily
# delete old data or only search specific date ranges.
config :index, :validate => :string, :default => "logstash-%{+YYYY.MM.dd}"
# The index type to write events to. Generally you should try to write only
# similar events to the same 'type'. String expansion '%{foo}' works here.
config :index_type, :validate => :string
# The hostname or ip address to reach your elasticsearch server.
config :host, :validate => :string, :required => true
# The port for ElasticSearch HTTP interface to use.
config :port, :validate => :number, :default => 9200
# Set the number of events to queue up before writing to elasticsearch.
config :flush_size, :validate => :number, :default => 100
# The document ID for the index. Useful for overwriting existing entries in
# elasticsearch with the same ID.
config :document_id, :validate => :string, :default => nil
# The amount of time since last flush before a flush is forced.
config :idle_flush_time, :validate => :number, :default => 1
public
def register
require "ftw" # gem ftw
@agent = FTW::Agent.new
@queue = []
buffer_initialize(
:max_items => @flush_size,
:max_interval => @idle_flush_time,
:logger => @logger
)
end # def register
public
def receive(event)
return unless output?(event)
buffer_receive([event, index, type])
end # def receive
def flush(events, teardown=false)
# Avoid creating a new string for newline every time
newline = "\n".freeze
body = events.collect do |event, index, type|
index = event.sprintf(@index)
# Set the 'type' value for the index.
if @index_type.nil?
type = event["type"] || "logs"
else
type = event.sprintf(@index_type)
end
header = { "index" => { "_index" => index, "_type" => type } }
header["index"]["_id"] = event.sprintf(@document_id) if !@document_id.nil?
[ header.to_json, newline, event.to_json, newline ]
end.flatten
post(body.join(""))
end # def receive_bulk
def post(body)
begin
response = @agent.post!("http://#{@host}:#{@port}/_bulk", :body => body)
rescue EOFError
@logger.warn("EOF while writing request or reading response header from elasticsearch",
:host => @host, :port => @port)
return # abort this flush
end
# Consume the body for error checking
# This will also free up the connection for reuse.
body = ""
begin
response.read_body { |chunk| body += chunk }
rescue EOFError
@logger.warn("EOF while reading response body from elasticsearch",
:host => @host, :port => @port)
return # abort this flush
end
if response.status != 200
@logger.error("Error writing (bulk) to elasticsearch",
:response => response, :response_body => body,
:request_body => @queue.join("\n"))
return
end
end # def post
def teardown
buffer_flush(:final => true)
end # def teardown
end # class LogStash::Outputs::ElasticSearchHTTP
- allow using asynchronous replication setting in index requests
require "logstash/namespace"
require "logstash/outputs/base"
require "stud/buffer"
# This output lets you store logs in elasticsearch.
#
# This plugin uses the HTTP/REST interface to ElasticSearch, which usually
# lets you use any version of elasticsearch server. It is known to work
# with elasticsearch %ELASTICSEARCH_VERSION%
#
# You can learn more about elasticsearch at <http://elasticsearch.org>
class LogStash::Outputs::ElasticSearchHTTP < LogStash::Outputs::Base
include Stud::Buffer
config_name "elasticsearch_http"
milestone 2
# The index to write events to. This can be dynamic using the %{foo} syntax.
# The default value will partition your indices by day so you can more easily
# delete old data or only search specific date ranges.
config :index, :validate => :string, :default => "logstash-%{+YYYY.MM.dd}"
# The index type to write events to. Generally you should try to write only
# similar events to the same 'type'. String expansion '%{foo}' works here.
config :index_type, :validate => :string
# The hostname or ip address to reach your elasticsearch server.
config :host, :validate => :string, :required => true
# The port for ElasticSearch HTTP interface to use.
config :port, :validate => :number, :default => 9200
# Set the number of events to queue up before writing to elasticsearch.
config :flush_size, :validate => :number, :default => 100
# The document ID for the index. Useful for overwriting existing entries in
# elasticsearch with the same ID.
config :document_id, :validate => :string, :default => nil
# The amount of time since last flush before a flush is forced.
config :idle_flush_time, :validate => :number, :default => 1
# Make replicated writes asynchronous. If true, this will cause
# the index request to elasticsearch to return after the primary
# shards have been written. If false (default), index requests
# will wait until the primary and the replica shards have been
# written.
config :asynchronous_replication, :validate => :boolean, :default => false
public
def register
require "ftw" # gem ftw
@agent = FTW::Agent.new
@queue = []
if @asynchronous_replication
@bulk_url = "http://#{@host}:#{@port}/_bulk?replication=async"
else
@bulk_url = "http://#{@host}:#{@port}/_bulk"
end
buffer_initialize(
:max_items => @flush_size,
:max_interval => @idle_flush_time,
:logger => @logger
)
end # def register
public
def receive(event)
return unless output?(event)
buffer_receive([event, index, type])
end # def receive
def flush(events, teardown=false)
# Avoid creating a new string for newline every time
newline = "\n".freeze
body = events.collect do |event, index, type|
index = event.sprintf(@index)
# Set the 'type' value for the index.
if @index_type.nil?
type = event["type"] || "logs"
else
type = event.sprintf(@index_type)
end
header = { "index" => { "_index" => index, "_type" => type } }
header["index"]["_id"] = event.sprintf(@document_id) if !@document_id.nil?
[ header.to_json, newline, event.to_json, newline ]
end.flatten
post(body.join(""))
end # def receive_bulk
def post(body)
begin
response = @agent.post!(@bulk_url, :body => body)
rescue EOFError
@logger.warn("EOF while writing request or reading response header from elasticsearch",
:host => @host, :port => @port)
return # abort this flush
end
# Consume the body for error checking
# This will also free up the connection for reuse.
body = ""
begin
response.read_body { |chunk| body += chunk }
rescue EOFError
@logger.warn("EOF while reading response body from elasticsearch",
:host => @host, :port => @port)
return # abort this flush
end
if response.status != 200
@logger.error("Error writing (bulk) to elasticsearch",
:response => response, :response_body => body,
:request_body => @queue.join("\n"))
return
end
end # def post
def teardown
buffer_flush(:final => true)
end # def teardown
end # class LogStash::Outputs::ElasticSearchHTTP
|
require 'helper'
class TestAttribute < TestCase
context "attribute" do
setup do
setup_fixtures
end
should "generate output" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Full name</span>
<span class="value">Doe, John</span>
</li>
EOHTML
actual = @user_builder.attribute(:full_name)
assert_equal expected, actual
end
should "not show attribute when value is blank" do
actual = @user_builder.attribute(:title)
assert_nil actual
end
should "show attribute with :display_empty => true" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Title</span>
<span class="value"></span>
</li>
EOHTML
actual = @user_builder.attribute(:title, :display_empty => true)
assert_equal expected, actual
end
context "with default formating" do
should "properly format a String" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Author</span>
<span class="value">Doe, John</span>
</li>
EOHTML
actual = @blog_builder.attribute(:author_full_name, :label => "Author")
assert_equal expected, actual
end
should "properly format a Date" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Birthday</span>
<span class="value">1953-06-03</span>
</li>
EOHTML
actual = @user_builder.attribute(:birthday)
assert_equal expected, actual
end
should "properly format a DateTime" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Created at</span>
<span class="value">Thu, 02 Jun 2011 12:06:42 +0000</span>
</li>
EOHTML
actual = @user_builder.attribute(:created_at)
assert_equal expected, actual
end
should "properly format a Time" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Time</span>
<span class="value">Sat, 01 Jan 2000 06:00:00 +0100</span>
</li>
EOHTML
actual = @user_builder.attribute(:time)
assert_equal expected, actual
end
should "properly format a Float" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Float</span>
<span class="value">54424.220</span>
</li>
EOHTML
actual = @user_builder.attribute(:float)
assert_equal expected, actual
end
should "properly format a Decimal" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Decimal</span>
<span class="value">4454.344</span>
</li>
EOHTML
actual = @user_builder.attribute(:decimal)
assert_equal expected, actual
end
should "properly format a Integer" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Integer</span>
<span class="value">45,453</span>
</li>
EOHTML
actual = @user_builder.attribute(:integer)
assert_equal expected, actual
end
end
context "with default formating disabled" do
should "properly format a String" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Author</span>
<span class="value">Doe, John</span>
</li>
EOHTML
actual = @blog_builder.attribute(:author_full_name, :label => "Author", :format => false)
assert_equal expected, actual
end
should "properly format a Date" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Birthday</span>
<span class="value">1953-06-03</span>
</li>
EOHTML
actual = @user_builder.attribute(:birthday, :format => false)
assert_equal expected, actual
end
should "properly format a DateTime" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Created at</span>
<span class="value">2011-06-02T12:06:42+00:00</span>
</li>
EOHTML
actual = @user_builder.attribute(:created_at, :format => false)
assert_equal expected, actual
end
should "properly format a Time" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Time</span>
<span class="value">2000-01-01 06:00:00 +0100</span>
</li>
EOHTML
actual = @user_builder.attribute(:time, :format => false)
assert_equal expected, actual
end
should "properly format a Float" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Float</span>
<span class="value">54424.22</span>
</li>
EOHTML
actual = @user_builder.attribute(:float, :format => false)
assert_equal expected, actual
end
should "properly format a Decimal" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Decimal</span>
<span class="value">4454.3435</span>
</li>
EOHTML
actual = @user_builder.attribute(:decimal, :format => false)
assert_equal expected, actual
end
should "properly format a Integer" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Integer</span>
<span class="value">45453</span>
</li>
EOHTML
actual = @user_builder.attribute(:integer, :format => false)
assert_equal expected, actual
end
end
context "with custom formating" do
should "output the return value of called template's method" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Author</span>
<span class="value">Hello, my name is Doe, John</span>
</li>
EOHTML
def @template.hello(name)
"Hello, my name is #{name}"
end
actual = @blog_builder.attribute(:author_full_name, :label => "Author", :format => :hello)
assert_equal expected, actual
end
end
should "show custom label" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Name</span>
<span class="value">Doe, John</span>
</li>
EOHTML
actual = @user_builder.attribute(:full_name, :label => "Name")
assert_equal expected, actual
end
should "show custom value" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Full name</span>
<span class="value">Sir Doe, John</span>
</li>
EOHTML
actual = @user_builder.attribute(:full_name, :value => "Sir #{@user.full_name}")
assert_equal expected, actual
end
should "use th custome value as hash key if it's a symbol and the attribute is a hash" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Address</span>
<span class="value">Hellway 13</span>
</li>
EOHTML
actual = @user_builder.attribute(:address, :value => :street)
assert_equal expected, actual
end
should "use th custome value as a method it's a symbol and the attribute is not a hash" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Blog</span>
<span class="value">IT Pro Blog</span>
</li>
EOHTML
actual = @user_builder.attribute(:blog, :value => :name)
assert_equal expected, actual
end
should "work with custom value blank" do
assert_nil @user_builder.attribute(:full_name, :value => nil)
assert_nil @user_builder.attribute(:full_name, :value => "")
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Full name</span>
<span class="value"></span>
</li>
EOHTML
actual = @user_builder.attribute(:full_name, :value => nil, :display_empty => true)
assert_equal expected, actual
actual = @user_builder.attribute(:full_name, :value => "", :display_empty => true)
assert_equal expected, actual
end
context "with block" do
should "yield block" do
block_run = false
@user_builder.attribute :full_name do
block_run = true
end
assert block_run
end
should "generate output" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Full name</span>
<span class="value">John Doe!!!</span>
</li>
EOHTML
actual = @user_builder.attribute :full_name do
@user_builder.template.output_buffer << "John Doe"
3.times do
@user_builder.template.output_buffer << "!"
end
end
assert_equal expected, actual
end
should "show custom label" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Full name</span>
<span class="value">John Doe!!!</span>
</li>
EOHTML
actual = @user_builder.attribute :label => "Full name" do
@user_builder.template.output_buffer << "John Doe"
3.times do
@user_builder.template.output_buffer << "!"
end
end
assert_equal expected, actual
end
end
end
end
Allow tests to run in different timezones
require 'helper'
class TestAttribute < TestCase
context "attribute" do
setup do
setup_fixtures
end
should "generate output" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Full name</span>
<span class="value">Doe, John</span>
</li>
EOHTML
actual = @user_builder.attribute(:full_name)
assert_equal expected, actual
end
should "not show attribute when value is blank" do
actual = @user_builder.attribute(:title)
assert_nil actual
end
should "show attribute with :display_empty => true" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Title</span>
<span class="value"></span>
</li>
EOHTML
actual = @user_builder.attribute(:title, :display_empty => true)
assert_equal expected, actual
end
context "with default formating" do
should "properly format a String" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Author</span>
<span class="value">Doe, John</span>
</li>
EOHTML
actual = @blog_builder.attribute(:author_full_name, :label => "Author")
assert_equal expected, actual
end
should "properly format a Date" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Birthday</span>
<span class="value">1953-06-03</span>
</li>
EOHTML
actual = @user_builder.attribute(:birthday)
assert_equal expected, actual
end
should "properly format a DateTime" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Created at</span>
<span class="value">Thu, 02 Jun 2011 12:06:42 +0000</span>
</li>
EOHTML
actual = @user_builder.attribute(:created_at)
assert_equal expected, actual
end
should "properly format a Time" do
time = @user.time.strftime("%a, %d %b %Y %H:%M:%S %z")
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Time</span>
<span class="value">#{time}</span>
</li>
EOHTML
actual = @user_builder.attribute(:time)
assert_equal expected, actual
end
should "properly format a Float" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Float</span>
<span class="value">54424.220</span>
</li>
EOHTML
actual = @user_builder.attribute(:float)
assert_equal expected, actual
end
should "properly format a Decimal" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Decimal</span>
<span class="value">4454.344</span>
</li>
EOHTML
actual = @user_builder.attribute(:decimal)
assert_equal expected, actual
end
should "properly format a Integer" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Integer</span>
<span class="value">45,453</span>
</li>
EOHTML
actual = @user_builder.attribute(:integer)
assert_equal expected, actual
end
end
context "with default formating disabled" do
should "properly format a String" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Author</span>
<span class="value">Doe, John</span>
</li>
EOHTML
actual = @blog_builder.attribute(:author_full_name, :label => "Author", :format => false)
assert_equal expected, actual
end
should "properly format a Date" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Birthday</span>
<span class="value">1953-06-03</span>
</li>
EOHTML
actual = @user_builder.attribute(:birthday, :format => false)
assert_equal expected, actual
end
should "properly format a DateTime" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Created at</span>
<span class="value">2011-06-02T12:06:42+00:00</span>
</li>
EOHTML
actual = @user_builder.attribute(:created_at, :format => false)
assert_equal expected, actual
end
should "properly format a Time" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Time</span>
<span class="value">#{@user.time}</span>
</li>
EOHTML
actual = @user_builder.attribute(:time, :format => false)
assert_equal expected, actual
end
should "properly format a Float" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Float</span>
<span class="value">54424.22</span>
</li>
EOHTML
actual = @user_builder.attribute(:float, :format => false)
assert_equal expected, actual
end
should "properly format a Decimal" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Decimal</span>
<span class="value">4454.3435</span>
</li>
EOHTML
actual = @user_builder.attribute(:decimal, :format => false)
assert_equal expected, actual
end
should "properly format a Integer" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Integer</span>
<span class="value">45453</span>
</li>
EOHTML
actual = @user_builder.attribute(:integer, :format => false)
assert_equal expected, actual
end
end
context "with custom formating" do
should "output the return value of called template's method" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Author</span>
<span class="value">Hello, my name is Doe, John</span>
</li>
EOHTML
def @template.hello(name)
"Hello, my name is #{name}"
end
actual = @blog_builder.attribute(:author_full_name, :label => "Author", :format => :hello)
assert_equal expected, actual
end
end
should "show custom label" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Name</span>
<span class="value">Doe, John</span>
</li>
EOHTML
actual = @user_builder.attribute(:full_name, :label => "Name")
assert_equal expected, actual
end
should "show custom value" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Full name</span>
<span class="value">Sir Doe, John</span>
</li>
EOHTML
actual = @user_builder.attribute(:full_name, :value => "Sir #{@user.full_name}")
assert_equal expected, actual
end
should "use th custome value as hash key if it's a symbol and the attribute is a hash" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Address</span>
<span class="value">Hellway 13</span>
</li>
EOHTML
actual = @user_builder.attribute(:address, :value => :street)
assert_equal expected, actual
end
should "use th custome value as a method it's a symbol and the attribute is not a hash" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Blog</span>
<span class="value">IT Pro Blog</span>
</li>
EOHTML
actual = @user_builder.attribute(:blog, :value => :name)
assert_equal expected, actual
end
should "work with custom value blank" do
assert_nil @user_builder.attribute(:full_name, :value => nil)
assert_nil @user_builder.attribute(:full_name, :value => "")
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Full name</span>
<span class="value"></span>
</li>
EOHTML
actual = @user_builder.attribute(:full_name, :value => nil, :display_empty => true)
assert_equal expected, actual
actual = @user_builder.attribute(:full_name, :value => "", :display_empty => true)
assert_equal expected, actual
end
context "with block" do
should "yield block" do
block_run = false
@user_builder.attribute :full_name do
block_run = true
end
assert block_run
end
should "generate output" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Full name</span>
<span class="value">John Doe!!!</span>
</li>
EOHTML
actual = @user_builder.attribute :full_name do
@user_builder.template.output_buffer << "John Doe"
3.times do
@user_builder.template.output_buffer << "!"
end
end
assert_equal expected, actual
end
should "show custom label" do
expected = html <<-EOHTML
<li class="attribute">
<span class="label">Full name</span>
<span class="value">John Doe!!!</span>
</li>
EOHTML
actual = @user_builder.attribute :label => "Full name" do
@user_builder.template.output_buffer << "John Doe"
3.times do
@user_builder.template.output_buffer << "!"
end
end
assert_equal expected, actual
end
end
end
end
|
require './test/test_helper'
require './lib/middleman-gemoji/converter'
class TestConverter < Minitest::Test
def setup
@root = ENV['MM_ROOT']
ENV['MM_ROOT'] = File.join(Dir.pwd, 'fixtures', 'gemoji-file-exist')
@converter = converter
end
def teardown
ENV['MM_ROOT'] = @root
end
def converter(app: Middleman::Application.new, size: nil, style: nil, emoji_dir: 'images/emoji')
Middleman::Gemoji::Converter.new(app, {size: size, style: style, emoji_dir: emoji_dir})
end
def test_initialize
assert_instance_of(Middleman::Application, @converter.app)
end
def test_initialize_raise_runtime_error
assert_raises RuntimeError do
converter(app: nil)
end
end
def test_convert_received_blank
assert_equal('', @converter.convert(''))
assert_equal(nil, @converter.convert(nil))
end
def test_emojify
assert_equal(
'<img class="gemoji" alt="+1" src="/images/emoji/unicode/1f44d.png" />',
@converter.emojify(':+1:')
);
end
def test_emojify_received_normal_string
html = '<p>hoge</p>'
assert_equal(str, @converter.emojify(html));
end
def test_emojify_inner_body
html = "<html><head><title>something title :+1:</title></head><body>\n<p>somethig emoji :+1:</p>\n</body></html>"
result = @converter.emojify_inner_body(html)
assert_match(/1f44d.png/, result)
assert_match(/something title :\+1:/, result)
end
def test_emojify_inner_body_received_normal_string
html = '<p>hoge</p>'
assert_equal(html, @converter.emojify_inner_body(html))
end
def test_has_body_return_true
html = "<html><head><title>something title :+1:</title></head><body>\n<p>somethig emoji :+1:</p>\n</body></html>"
assert_equal(true, @converter.has_body?(html))
end
def test_has_body_return_false
html = '<p>somethig emoji :+1:</p>'
assert_equal(false, @converter.has_body?(html))
end
def test_src
path = @converter.src('unicode/1f44d.png')
assert_equal('src="/images/emoji/unicode/1f44d.png"', path)
end
def test_src_with_cdn
ENV['MM_ROOT'] = File.join(@converter.app.root, 'fixtures', 'gemoji-file-not-exist')
@converter.set_base_path
path = @converter.src('unicode/1f44d.png')
assert_equal(
'src="https://assets-cdn.github.com/images/icons/emoji/unicode/1f44d.png"',
path
)
end
def test_size_rerutn_nil
@converter.options[:size] = nil
assert_nil(@converter.size)
end
def test_size_return_string
@converter.options[:size] = 40
assert_equal('width="40" height="40"', @converter.size)
end
def test_style_return_nil
@converter.options[:style] = nil
assert_nil(@converter.style)
end
def test_style_return_string
@converter.options[:style] = 'padding-right: .2em'
assert_equal('style="padding-right: .2em"', @converter.style)
end
def test_emoji_file_exist_return_true
assert_equal(true, @converter.emoji_file_exist?)
end
def test_emoji_file_exist_return_false
ENV['MM_ROOT'] = File.join(@converter.app.root, 'fixtures', 'gemoji-file-not-exist')
assert_equal(false, @converter.emoji_file_exist?)
end
def test_set_base_path__cdn
ENV['MM_ROOT'] = File.join(@converter.app.root, 'fixtures', 'gemoji-file-not-exist')
@converter.set_base_path
assert_equal(
'https://assets-cdn.github.com/images/icons/emoji/',
@converter.base_path
)
end
def test_set_base_path__relative_path
assert_equal('/images/emoji', @converter.base_path)
end
def test_set_base_path__full_path
@converter.app.config[:http_prefix] = 'http://example.com/'
@converter.set_base_path
assert_equal('http://example.com/images/emoji', @converter.base_path)
end
end
fix typo
require './test/test_helper'
require './lib/middleman-gemoji/converter'
class TestConverter < Minitest::Test
def setup
@root = ENV['MM_ROOT']
ENV['MM_ROOT'] = File.join(Dir.pwd, 'fixtures', 'gemoji-file-exist')
@converter = converter
end
def teardown
ENV['MM_ROOT'] = @root
end
def converter(app: Middleman::Application.new, size: nil, style: nil, emoji_dir: 'images/emoji')
Middleman::Gemoji::Converter.new(app, {size: size, style: style, emoji_dir: emoji_dir})
end
def test_initialize
assert_instance_of(Middleman::Application, @converter.app)
end
def test_initialize_raise_runtime_error
assert_raises RuntimeError do
converter(app: nil)
end
end
def test_convert_received_blank
assert_equal('', @converter.convert(''))
assert_equal(nil, @converter.convert(nil))
end
def test_emojify
assert_equal(
'<img class="gemoji" alt="+1" src="/images/emoji/unicode/1f44d.png" />',
@converter.emojify(':+1:')
);
end
def test_emojify_received_normal_string
html = '<p>hoge</p>'
assert_equal(html, @converter.emojify(html));
end
def test_emojify_inner_body
html = "<html><head><title>something title :+1:</title></head><body>\n<p>somethig emoji :+1:</p>\n</body></html>"
result = @converter.emojify_inner_body(html)
assert_match(/1f44d.png/, result)
assert_match(/something title :\+1:/, result)
end
def test_emojify_inner_body_received_normal_string
html = '<p>hoge</p>'
assert_equal(html, @converter.emojify_inner_body(html))
end
def test_has_body_return_true
html = "<html><head><title>something title :+1:</title></head><body>\n<p>somethig emoji :+1:</p>\n</body></html>"
assert_equal(true, @converter.has_body?(html))
end
def test_has_body_return_false
html = '<p>somethig emoji :+1:</p>'
assert_equal(false, @converter.has_body?(html))
end
def test_src
path = @converter.src('unicode/1f44d.png')
assert_equal('src="/images/emoji/unicode/1f44d.png"', path)
end
def test_src_with_cdn
ENV['MM_ROOT'] = File.join(@converter.app.root, 'fixtures', 'gemoji-file-not-exist')
@converter.set_base_path
path = @converter.src('unicode/1f44d.png')
assert_equal(
'src="https://assets-cdn.github.com/images/icons/emoji/unicode/1f44d.png"',
path
)
end
def test_size_rerutn_nil
@converter.options[:size] = nil
assert_nil(@converter.size)
end
def test_size_return_string
@converter.options[:size] = 40
assert_equal('width="40" height="40"', @converter.size)
end
def test_style_return_nil
@converter.options[:style] = nil
assert_nil(@converter.style)
end
def test_style_return_string
@converter.options[:style] = 'padding-right: .2em'
assert_equal('style="padding-right: .2em"', @converter.style)
end
def test_emoji_file_exist_return_true
assert_equal(true, @converter.emoji_file_exist?)
end
def test_emoji_file_exist_return_false
ENV['MM_ROOT'] = File.join(@converter.app.root, 'fixtures', 'gemoji-file-not-exist')
assert_equal(false, @converter.emoji_file_exist?)
end
def test_set_base_path__cdn
ENV['MM_ROOT'] = File.join(@converter.app.root, 'fixtures', 'gemoji-file-not-exist')
@converter.set_base_path
assert_equal(
'https://assets-cdn.github.com/images/icons/emoji/',
@converter.base_path
)
end
def test_set_base_path__relative_path
assert_equal('/images/emoji', @converter.base_path)
end
def test_set_base_path__full_path
@converter.app.config[:http_prefix] = 'http://example.com/'
@converter.set_base_path
assert_equal('http://example.com/images/emoji', @converter.base_path)
end
end
|
# Copyright (C) 2007 Jan Dvorak <jan.dvorak@kraxnet.cz>
#
# This program is distributed under the terms of the MIT license.
# See the included MIT-LICENSE file for the terms of this license.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
require 'opengl/test_case'
class TestGlExtNv < OpenGL::TestCase
def setup
super()
end
def teardown
super()
end
def test_gl_nv_vertex_program
supported?("GL_NV_vertex_program")
assert_equal(glIsProgramNV(0),false)
programs = glGenProgramsNV(2)
assert_equal(programs.size,2)
program = "!!VSP1.0\nEND"
glBindProgramNV(GL_VERTEX_PROGRAM_NV, programs[1])
glLoadProgramNV(GL_VERTEX_STATE_PROGRAM_NV, programs[0], program)
assert_equal(glGetProgramStringNV(programs[0], GL_PROGRAM_STRING_NV), program)
assert_equal(glIsProgramNV(programs[0]),true)
assert_equal(glGetProgramivNV(programs[0],GL_PROGRAM_LENGTH_NV),program.size)
glVertexAttribPointerNV(1,2,GL_FLOAT,0,[1,1].pack("f*"))
assert_equal(glGetVertexAttribPointervNV(1),[1,1].pack("f*"))
glExecuteProgramNV(GL_VERTEX_STATE_PROGRAM_NV,programs[0],[1,1,1,1])
glProgramParameter4dNV(GL_VERTEX_PROGRAM_NV,1, 1,2,3,4)
assert_equal(glGetProgramParameterdvNV(GL_VERTEX_PROGRAM_NV,1,GL_PROGRAM_PARAMETER_NV),[1,2,3,4])
glProgramParameter4fNV(GL_VERTEX_PROGRAM_NV,1, 5,6,7,8)
assert_equal(glGetProgramParameterfvNV(GL_VERTEX_PROGRAM_NV,1,GL_PROGRAM_PARAMETER_NV),[5,6,7,8])
glProgramParameter4dvNV(GL_VERTEX_PROGRAM_NV,1, [1,2,3,4])
assert_equal(glGetProgramParameterdvNV(GL_VERTEX_PROGRAM_NV,1,GL_PROGRAM_PARAMETER_NV),[1,2,3,4])
glProgramParameter4fvNV(GL_VERTEX_PROGRAM_NV,1, [5,6,7,8])
assert_equal(glGetProgramParameterfvNV(GL_VERTEX_PROGRAM_NV,1,GL_PROGRAM_PARAMETER_NV),[5,6,7,8])
glProgramParameters4fvNV(GL_VERTEX_PROGRAM_NV,1, [1,2,3,4,5,6,7,8])
assert_equal(glGetProgramParameterfvNV(GL_VERTEX_PROGRAM_NV,1,GL_PROGRAM_PARAMETER_NV),[1,2,3,4])
assert_equal(glGetProgramParameterfvNV(GL_VERTEX_PROGRAM_NV,2,GL_PROGRAM_PARAMETER_NV),[5,6,7,8])
glProgramParameters4dvNV(GL_VERTEX_PROGRAM_NV,1, [8,7,6,5,4,3,2,1])
assert_equal(glGetProgramParameterdvNV(GL_VERTEX_PROGRAM_NV,1,GL_PROGRAM_PARAMETER_NV),[8,7,6,5])
assert_equal(glGetProgramParameterdvNV(GL_VERTEX_PROGRAM_NV,2,GL_PROGRAM_PARAMETER_NV),[4,3,2,1])
glVertexAttrib1dNV(1,1)
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0],1)
glVertexAttrib1fNV(1,2)
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0],2)
glVertexAttrib1sNV(1,3)
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0],3)
glVertexAttrib1dvNV(1,[1])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0],1)
glVertexAttrib1fvNV(1,[2])
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0],2)
glVertexAttrib1svNV(1,[3])
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0],3)
glVertexAttrib2dNV(1,1,2)
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[1,2])
glVertexAttrib2fNV(1,2,3)
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[2,3])
glVertexAttrib2sNV(1,3,4)
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[3,4])
glVertexAttrib2dvNV(1,[1,2])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[1,2])
glVertexAttrib2fvNV(1,[2,3])
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[2,3])
glVertexAttrib2svNV(1,[3,4])
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[3,4])
glVertexAttrib3dNV(1,1,2,3)
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[1,2,3])
glVertexAttrib3fNV(1,2,3,4)
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[2,3,4])
glVertexAttrib3sNV(1,3,4,5)
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[3,4,5])
glVertexAttrib3dvNV(1,[1,2,3])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[1,2,3])
glVertexAttrib3fvNV(1,[2,3,4])
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[2,3,4])
glVertexAttrib3svNV(1,[3,4,5])
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[3,4,5])
glVertexAttrib4dNV(1,1,2,3,4)
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[1,2,3,4])
glVertexAttrib4fNV(1,2,3,4,5)
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[2,3,4,5])
glVertexAttrib4sNV(1,3,4,5,6)
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[3,4,5,6])
glVertexAttrib4dvNV(1,[1,2,3,4])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[1,2,3,4])
glVertexAttrib4fvNV(1,[2,3,4,5])
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[2,3,4,5])
glVertexAttrib4svNV(1,[3,4,5,6])
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[3,4,5,6])
glVertexAttrib4ubNV(1,2**8-1,0,2**8-1,0)
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[1,0,1,0])
glVertexAttrib4ubvNV(1,[0,2**8-1,0,2**8-1])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[0,1,0,1])
glVertexAttribs1dvNV(1,[1,2])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0],1)
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0],2)
glVertexAttribs1fvNV(1,[3,4])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0],3)
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0],4)
glVertexAttribs1svNV(1,[5,6])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0],5)
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0],6)
glVertexAttribs2dvNV(1,[1,2,3,4])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[1,2])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,2],[3,4])
glVertexAttribs2fvNV(1,[3,4,5,6])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[3,4])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,2],[5,6])
glVertexAttribs2svNV(1,[5,6,7,8])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[5,6])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,2],[7,8])
glVertexAttribs3dvNV(1,[1,2,3,4,5,6])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[1,2,3])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,3],[4,5,6])
glVertexAttribs3fvNV(1,[3,4,5,6,7,8])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[3,4,5])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,3],[6,7,8])
glVertexAttribs3svNV(1,[5,6,7,8,9,10])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[5,6,7])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,3],[8,9,10])
glVertexAttribs4dvNV(1,[1,2,3,4,5,6,7,8])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[1,2,3,4])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,4],[5,6,7,8])
glVertexAttribs4fvNV(1,[3,4,5,6,7,8,9,10])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[3,4,5,6])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,4],[7,8,9,10])
glVertexAttribs4svNV(1,[5,6,7,8,9,10,11,12])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[5,6,7,8])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,4],[9,10,11,12])
glVertexAttribs4ubvNV(1,[2**8-1,0,2**8-1,0,2**8-1,0,2**8-1,0])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[1,0,1,0])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,4],[1,0,1,0])
glTrackMatrixNV(GL_VERTEX_PROGRAM_NV, 4, GL_MATRIX0_NV, GL_IDENTITY_NV);
assert_equal(glGetTrackMatrixivNV(GL_VERTEX_PROGRAM_NV, 4, GL_TRACK_MATRIX_NV),GL_MATRIX0_NV)
glRequestResidentProgramsNV(programs[0])
res = glAreProgramsResidentNV(programs[0])
assert_equal(res.size,1)
glDeleteProgramsNV(programs)
end
def test_gl_nv_fragment_program
supported?("GL_NV_fragment_program")
programs = glGenProgramsNV(1)
program = "!!FP1.0\nDECLARE test = {0,0,0,0};\nEND"
glBindProgramNV(GL_FRAGMENT_PROGRAM_NV, programs[0])
glLoadProgramNV(GL_FRAGMENT_PROGRAM_NV, programs[0], program)
glProgramNamedParameter4fNV(programs[0],"test",1,2,3,4)
assert_equal(glGetProgramNamedParameterfvNV(programs[0],"test"),[1,2,3,4])
glProgramNamedParameter4dNV(programs[0],"test",5,6,7,8)
assert_equal(glGetProgramNamedParameterdvNV(programs[0],"test"),[5,6,7,8])
glProgramNamedParameter4fvNV(programs[0],"test",[1,2,3,4])
assert_equal(glGetProgramNamedParameterfvNV(programs[0],"test"),[1,2,3,4])
glProgramNamedParameter4dvNV(programs[0],"test",[5,6,7,8])
assert_equal(glGetProgramNamedParameterdvNV(programs[0],"test"),[5,6,7,8])
glDeleteProgramsNV(programs)
end
def test_gl_nv_framebuffer_multisample_coverage
supported?("GL_NV_framebuffer_multisample_coverage")
rb = glGenRenderbuffersEXT(1)[0]
glBindRenderbufferEXT(GL_RENDERBUFFER_EXT,rb)
glRenderbufferStorageMultisampleCoverageNV(GL_RENDERBUFFER_EXT,4,4, GL_RGBA,2,2)
samples = glGetRenderbufferParameterivEXT(GL_RENDERBUFFER_EXT,GL_RENDERBUFFER_COVERAGE_SAMPLES_NV)
assert_equal(samples,4)
samples = glGetRenderbufferParameterivEXT(GL_RENDERBUFFER_EXT,GL_RENDERBUFFER_COLOR_SAMPLES_NV)
assert_equal(samples,4)
glDeleteRenderbuffersEXT(rb)
end
def test_gl_nv_fence
supported?("GL_NV_fence")
fences = glGenFencesNV(2)
assert_equal(fences.size,2)
glSetFenceNV(fences[0],GL_ALL_COMPLETED_NV)
assert_equal(glGetFenceivNV(fences[0],GL_FENCE_CONDITION_NV),GL_ALL_COMPLETED_NV)
assert_equal(glIsFenceNV(fences[0]),true)
glFinishFenceNV(fences[0])
assert_equal(glTestFenceNV(fences[0]),true)
glDeleteFencesNV(fences)
end
def test_gl_nv_depth_buffer_float
supported?("GL_NV_depth_buffer_float")
glDepthRangedNV(0.1,0.2)
assert(assert_in_delta(glGetFloatv(GL_DEPTH_RANGE),[0.1,0.2]))
glDepthBoundsdNV(0.1,0.2)
assert(assert_in_delta(glGetFloatv(GL_DEPTH_BOUNDS_EXT),[0.1,0.2]))
glClearDepthdNV(0.3)
assert(assert_in_delta([glGetDoublev(GL_DEPTH_CLEAR_VALUE)],[0.3]))
end
def test_gl_nv_occlusion_query
supported?("GL_NV_occlusion_query")
queries = glGenOcclusionQueriesNV(2)
assert_equal(queries.size,2)
glBeginOcclusionQueryNV(queries[0])
assert_equal(glIsOcclusionQueryNV(queries[0]),true)
glEndOcclusionQueryNV()
r = glGetOcclusionQueryivNV(queries[0],GL_PIXEL_COUNT_AVAILABLE_NV)
assert(r==GL_TRUE || r==GL_FALSE)
assert(glGetOcclusionQueryuivNV(queries[0],GL_PIXEL_COUNT_NV)>=0)
glDeleteOcclusionQueriesNV(queries)
assert_equal(glIsOcclusionQueryNV(queries[1]),false)
end
def test_gl_nv_gpu_program4
supported?(["GL_NV_gpu_program4","GL_ARB_vertex_program"])
programs = glGenProgramsARB(1)
program = "!!ARBvp1.0\nTEMP vv;\nEND"
glBindProgramARB(GL_VERTEX_PROGRAM_ARB, programs[0])
glProgramStringARB(GL_VERTEX_PROGRAM_ARB, GL_PROGRAM_FORMAT_ASCII_ARB, program)
glProgramLocalParameterI4iNV(GL_VERTEX_PROGRAM_ARB,1, 1,2,3,4)
assert_equal(glGetProgramLocalParameterIivNV(GL_VERTEX_PROGRAM_ARB,1),[1,2,3,4])
glProgramLocalParameterI4uiNV(GL_VERTEX_PROGRAM_ARB,1, 5,6,7,8)
assert_equal(glGetProgramLocalParameterIuivNV(GL_VERTEX_PROGRAM_ARB,1),[5,6,7,8])
glProgramLocalParameterI4ivNV(GL_VERTEX_PROGRAM_ARB,1, [1,2,3,4])
assert_equal(glGetProgramLocalParameterIivNV(GL_VERTEX_PROGRAM_ARB,1),[1,2,3,4])
glProgramLocalParameterI4uivNV(GL_VERTEX_PROGRAM_ARB,1, [5,6,7,8])
assert_equal(glGetProgramLocalParameterIuivNV(GL_VERTEX_PROGRAM_ARB,1),[5,6,7,8])
glProgramLocalParametersI4ivNV(GL_VERTEX_PROGRAM_ARB,1, [1,2,3,4])
assert_equal(glGetProgramLocalParameterIivNV(GL_VERTEX_PROGRAM_ARB,1),[1,2,3,4])
glProgramLocalParametersI4uivNV(GL_VERTEX_PROGRAM_ARB,1, [5,6,7,8])
assert_equal(glGetProgramLocalParameterIuivNV(GL_VERTEX_PROGRAM_ARB,1),[5,6,7,8])
glProgramEnvParameterI4iNV(GL_VERTEX_PROGRAM_ARB,1, 1,2,3,4)
assert_equal(glGetProgramEnvParameterIivNV(GL_VERTEX_PROGRAM_ARB,1),[1,2,3,4])
glProgramEnvParameterI4uiNV(GL_VERTEX_PROGRAM_ARB,1, 5,6,7,8)
assert_equal(glGetProgramEnvParameterIuivNV(GL_VERTEX_PROGRAM_ARB,1),[5,6,7,8])
glProgramEnvParameterI4ivNV(GL_VERTEX_PROGRAM_ARB,1, [1,2,3,4])
assert_equal(glGetProgramEnvParameterIivNV(GL_VERTEX_PROGRAM_ARB,1),[1,2,3,4])
glProgramEnvParameterI4uivNV(GL_VERTEX_PROGRAM_ARB,1, [5,6,7,8])
assert_equal(glGetProgramEnvParameterIuivNV(GL_VERTEX_PROGRAM_ARB,1),[5,6,7,8])
glProgramEnvParametersI4ivNV(GL_VERTEX_PROGRAM_ARB,1, [1,2,3,4])
assert_equal(glGetProgramEnvParameterIivNV(GL_VERTEX_PROGRAM_ARB,1),[1,2,3,4])
glProgramEnvParametersI4uivNV(GL_VERTEX_PROGRAM_ARB,1, [5,6,7,8])
assert_equal(glGetProgramEnvParameterIuivNV(GL_VERTEX_PROGRAM_ARB,1),[5,6,7,8])
glDeleteProgramsARB(programs)
end
def test_gl_nv_gpu_program4
supported?(["GL_NV_gpu_program4","GL_EXT_framebuffer_object"])
geometry_program = <<-EOP
!!NVgp4.0
PRIMITIVE_IN TRIANGLES;
PRIMITIVE_OUT TRIANGLE_STRIP;
VERTICES_OUT 1;
END
EOP
program_id = glGenProgramsARB(1)[0]
glBindProgramARB(GL_GEOMETRY_PROGRAM_NV, program_id)
glProgramStringARB(GL_GEOMETRY_PROGRAM_NV, GL_PROGRAM_FORMAT_ASCII_ARB, geometry_program);
assert_equal(glGetProgramivARB(GL_GEOMETRY_PROGRAM_NV,GL_GEOMETRY_VERTICES_OUT_EXT),1)
glProgramVertexLimitNV(GL_GEOMETRY_PROGRAM_NV, 2)
assert_equal(glGetProgramivARB(GL_GEOMETRY_PROGRAM_NV,GL_GEOMETRY_VERTICES_OUT_EXT),2)
#
fbo = glGenFramebuffersEXT(1)[0]
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT,fbo)
texture2d,texture3d,texture_cube = glGenTextures(3)
glBindTexture(GL_TEXTURE_2D, texture2d)
glBindTexture(GL_TEXTURE_3D, texture3d)
glBindTexture(GL_TEXTURE_CUBE_MAP, texture_cube)
glFramebufferTextureEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,texture2d,0)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT, GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_EXT),texture2d)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_EXT),0)
glFramebufferTextureLayerEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,texture3d,0,1)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_EXT),texture3d)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_EXT),0)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER_EXT),1)
glFramebufferTextureFaceEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,texture_cube,0,GL_TEXTURE_CUBE_MAP_POSITIVE_X)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_EXT),texture_cube)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_EXT),0)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE_EXT),GL_TEXTURE_CUBE_MAP_POSITIVE_X)
glDeleteTextures([texture2d,texture3d])
glDeleteFramebuffersEXT(fbo)
glDeleteProgramsARB(program_id)
end
def test_gl_nv_primitive_restart
supported?("GL_NV_primitive_restart")
glBegin(GL_TRIANGLES)
glPrimitiveRestartNV()
glEnd
glPrimitiveRestartIndexNV(2)
assert_equal(glGetIntegerv(GL_PRIMITIVE_RESTART_INDEX_NV),2)
end
def test_gl_nv_point_sprite
supported?("GL_NV_point_sprite")
glPointParameteriNV(GL_POINT_SPRITE_R_MODE_NV, GL_ZERO)
assert_equal(glGetIntegerv(GL_POINT_SPRITE_R_MODE_NV),GL_ZERO)
glPointParameteriNV(GL_POINT_SPRITE_R_MODE_NV, GL_S)
assert_equal(glGetIntegerv(GL_POINT_SPRITE_R_MODE_NV),GL_S)
glPointParameterivNV(GL_POINT_SPRITE_R_MODE_NV, [GL_ZERO])
assert_equal(glGetIntegerv(GL_POINT_SPRITE_R_MODE_NV),GL_ZERO)
glPointParameterivNV(GL_POINT_SPRITE_R_MODE_NV, [GL_S])
assert_equal(glGetIntegerv(GL_POINT_SPRITE_R_MODE_NV),GL_S)
end
end
Reindent gl-ext-nv tests
# Copyright (C) 2007 Jan Dvorak <jan.dvorak@kraxnet.cz>
#
# This program is distributed under the terms of the MIT license.
# See the included MIT-LICENSE file for the terms of this license.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
require 'opengl/test_case'
class TestGlExtNv < OpenGL::TestCase
def test_gl_nv_vertex_program
supported?("GL_NV_vertex_program")
assert_equal(glIsProgramNV(0),false)
programs = glGenProgramsNV(2)
assert_equal(programs.size,2)
program = "!!VSP1.0\nEND"
glBindProgramNV(GL_VERTEX_PROGRAM_NV, programs[1])
glLoadProgramNV(GL_VERTEX_STATE_PROGRAM_NV, programs[0], program)
assert_equal(glGetProgramStringNV(programs[0], GL_PROGRAM_STRING_NV), program)
assert_equal(glIsProgramNV(programs[0]),true)
assert_equal(glGetProgramivNV(programs[0],GL_PROGRAM_LENGTH_NV),program.size)
glVertexAttribPointerNV(1,2,GL_FLOAT,0,[1,1].pack("f*"))
assert_equal(glGetVertexAttribPointervNV(1),[1,1].pack("f*"))
glExecuteProgramNV(GL_VERTEX_STATE_PROGRAM_NV,programs[0],[1,1,1,1])
glProgramParameter4dNV(GL_VERTEX_PROGRAM_NV,1, 1,2,3,4)
assert_equal(glGetProgramParameterdvNV(GL_VERTEX_PROGRAM_NV,1,GL_PROGRAM_PARAMETER_NV),[1,2,3,4])
glProgramParameter4fNV(GL_VERTEX_PROGRAM_NV,1, 5,6,7,8)
assert_equal(glGetProgramParameterfvNV(GL_VERTEX_PROGRAM_NV,1,GL_PROGRAM_PARAMETER_NV),[5,6,7,8])
glProgramParameter4dvNV(GL_VERTEX_PROGRAM_NV,1, [1,2,3,4])
assert_equal(glGetProgramParameterdvNV(GL_VERTEX_PROGRAM_NV,1,GL_PROGRAM_PARAMETER_NV),[1,2,3,4])
glProgramParameter4fvNV(GL_VERTEX_PROGRAM_NV,1, [5,6,7,8])
assert_equal(glGetProgramParameterfvNV(GL_VERTEX_PROGRAM_NV,1,GL_PROGRAM_PARAMETER_NV),[5,6,7,8])
glProgramParameters4fvNV(GL_VERTEX_PROGRAM_NV,1, [1,2,3,4,5,6,7,8])
assert_equal(glGetProgramParameterfvNV(GL_VERTEX_PROGRAM_NV,1,GL_PROGRAM_PARAMETER_NV),[1,2,3,4])
assert_equal(glGetProgramParameterfvNV(GL_VERTEX_PROGRAM_NV,2,GL_PROGRAM_PARAMETER_NV),[5,6,7,8])
glProgramParameters4dvNV(GL_VERTEX_PROGRAM_NV,1, [8,7,6,5,4,3,2,1])
assert_equal(glGetProgramParameterdvNV(GL_VERTEX_PROGRAM_NV,1,GL_PROGRAM_PARAMETER_NV),[8,7,6,5])
assert_equal(glGetProgramParameterdvNV(GL_VERTEX_PROGRAM_NV,2,GL_PROGRAM_PARAMETER_NV),[4,3,2,1])
glVertexAttrib1dNV(1,1)
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0],1)
glVertexAttrib1fNV(1,2)
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0],2)
glVertexAttrib1sNV(1,3)
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0],3)
glVertexAttrib1dvNV(1,[1])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0],1)
glVertexAttrib1fvNV(1,[2])
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0],2)
glVertexAttrib1svNV(1,[3])
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0],3)
glVertexAttrib2dNV(1,1,2)
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[1,2])
glVertexAttrib2fNV(1,2,3)
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[2,3])
glVertexAttrib2sNV(1,3,4)
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[3,4])
glVertexAttrib2dvNV(1,[1,2])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[1,2])
glVertexAttrib2fvNV(1,[2,3])
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[2,3])
glVertexAttrib2svNV(1,[3,4])
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[3,4])
glVertexAttrib3dNV(1,1,2,3)
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[1,2,3])
glVertexAttrib3fNV(1,2,3,4)
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[2,3,4])
glVertexAttrib3sNV(1,3,4,5)
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[3,4,5])
glVertexAttrib3dvNV(1,[1,2,3])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[1,2,3])
glVertexAttrib3fvNV(1,[2,3,4])
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[2,3,4])
glVertexAttrib3svNV(1,[3,4,5])
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[3,4,5])
glVertexAttrib4dNV(1,1,2,3,4)
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[1,2,3,4])
glVertexAttrib4fNV(1,2,3,4,5)
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[2,3,4,5])
glVertexAttrib4sNV(1,3,4,5,6)
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[3,4,5,6])
glVertexAttrib4dvNV(1,[1,2,3,4])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[1,2,3,4])
glVertexAttrib4fvNV(1,[2,3,4,5])
assert_equal(glGetVertexAttribfvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[2,3,4,5])
glVertexAttrib4svNV(1,[3,4,5,6])
assert_equal(glGetVertexAttribivNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[3,4,5,6])
glVertexAttrib4ubNV(1,2**8-1,0,2**8-1,0)
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[1,0,1,0])
glVertexAttrib4ubvNV(1,[0,2**8-1,0,2**8-1])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[0,1,0,1])
glVertexAttribs1dvNV(1,[1,2])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0],1)
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0],2)
glVertexAttribs1fvNV(1,[3,4])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0],3)
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0],4)
glVertexAttribs1svNV(1,[5,6])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0],5)
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0],6)
glVertexAttribs2dvNV(1,[1,2,3,4])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[1,2])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,2],[3,4])
glVertexAttribs2fvNV(1,[3,4,5,6])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[3,4])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,2],[5,6])
glVertexAttribs2svNV(1,[5,6,7,8])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,2],[5,6])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,2],[7,8])
glVertexAttribs3dvNV(1,[1,2,3,4,5,6])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[1,2,3])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,3],[4,5,6])
glVertexAttribs3fvNV(1,[3,4,5,6,7,8])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[3,4,5])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,3],[6,7,8])
glVertexAttribs3svNV(1,[5,6,7,8,9,10])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,3],[5,6,7])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,3],[8,9,10])
glVertexAttribs4dvNV(1,[1,2,3,4,5,6,7,8])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[1,2,3,4])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,4],[5,6,7,8])
glVertexAttribs4fvNV(1,[3,4,5,6,7,8,9,10])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[3,4,5,6])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,4],[7,8,9,10])
glVertexAttribs4svNV(1,[5,6,7,8,9,10,11,12])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[5,6,7,8])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,4],[9,10,11,12])
glVertexAttribs4ubvNV(1,[2**8-1,0,2**8-1,0,2**8-1,0,2**8-1,0])
assert_equal(glGetVertexAttribdvNV(1,GL_CURRENT_ATTRIB_NV)[0,4],[1,0,1,0])
assert_equal(glGetVertexAttribdvNV(2,GL_CURRENT_ATTRIB_NV)[0,4],[1,0,1,0])
glTrackMatrixNV(GL_VERTEX_PROGRAM_NV, 4, GL_MATRIX0_NV, GL_IDENTITY_NV);
assert_equal(glGetTrackMatrixivNV(GL_VERTEX_PROGRAM_NV, 4, GL_TRACK_MATRIX_NV),GL_MATRIX0_NV)
glRequestResidentProgramsNV(programs[0])
res = glAreProgramsResidentNV(programs[0])
assert_equal(res.size,1)
glDeleteProgramsNV(programs)
end
def test_gl_nv_fragment_program
supported?("GL_NV_fragment_program")
programs = glGenProgramsNV(1)
program = "!!FP1.0\nDECLARE test = {0,0,0,0};\nEND"
glBindProgramNV(GL_FRAGMENT_PROGRAM_NV, programs[0])
glLoadProgramNV(GL_FRAGMENT_PROGRAM_NV, programs[0], program)
glProgramNamedParameter4fNV(programs[0],"test",1,2,3,4)
assert_equal(glGetProgramNamedParameterfvNV(programs[0],"test"),[1,2,3,4])
glProgramNamedParameter4dNV(programs[0],"test",5,6,7,8)
assert_equal(glGetProgramNamedParameterdvNV(programs[0],"test"),[5,6,7,8])
glProgramNamedParameter4fvNV(programs[0],"test",[1,2,3,4])
assert_equal(glGetProgramNamedParameterfvNV(programs[0],"test"),[1,2,3,4])
glProgramNamedParameter4dvNV(programs[0],"test",[5,6,7,8])
assert_equal(glGetProgramNamedParameterdvNV(programs[0],"test"),[5,6,7,8])
glDeleteProgramsNV(programs)
end
def test_gl_nv_framebuffer_multisample_coverage
supported?("GL_NV_framebuffer_multisample_coverage")
rb = glGenRenderbuffersEXT(1)[0]
glBindRenderbufferEXT(GL_RENDERBUFFER_EXT,rb)
glRenderbufferStorageMultisampleCoverageNV(GL_RENDERBUFFER_EXT,4,4, GL_RGBA,2,2)
samples = glGetRenderbufferParameterivEXT(GL_RENDERBUFFER_EXT,GL_RENDERBUFFER_COVERAGE_SAMPLES_NV)
assert_equal(samples,4)
samples = glGetRenderbufferParameterivEXT(GL_RENDERBUFFER_EXT,GL_RENDERBUFFER_COLOR_SAMPLES_NV)
assert_equal(samples,4)
glDeleteRenderbuffersEXT(rb)
end
def test_gl_nv_fence
supported?("GL_NV_fence")
fences = glGenFencesNV(2)
assert_equal(fences.size,2)
glSetFenceNV(fences[0],GL_ALL_COMPLETED_NV)
assert_equal(glGetFenceivNV(fences[0],GL_FENCE_CONDITION_NV),GL_ALL_COMPLETED_NV)
assert_equal(glIsFenceNV(fences[0]),true)
glFinishFenceNV(fences[0])
assert_equal(glTestFenceNV(fences[0]),true)
glDeleteFencesNV(fences)
end
def test_gl_nv_depth_buffer_float
supported?("GL_NV_depth_buffer_float")
glDepthRangedNV(0.1,0.2)
assert(assert_in_delta(glGetFloatv(GL_DEPTH_RANGE),[0.1,0.2]))
glDepthBoundsdNV(0.1,0.2)
assert(assert_in_delta(glGetFloatv(GL_DEPTH_BOUNDS_EXT),[0.1,0.2]))
glClearDepthdNV(0.3)
assert(assert_in_delta([glGetDoublev(GL_DEPTH_CLEAR_VALUE)],[0.3]))
end
def test_gl_nv_occlusion_query
supported?("GL_NV_occlusion_query")
queries = glGenOcclusionQueriesNV(2)
assert_equal(queries.size,2)
glBeginOcclusionQueryNV(queries[0])
assert_equal(glIsOcclusionQueryNV(queries[0]),true)
glEndOcclusionQueryNV()
r = glGetOcclusionQueryivNV(queries[0],GL_PIXEL_COUNT_AVAILABLE_NV)
assert(r==GL_TRUE || r==GL_FALSE)
assert(glGetOcclusionQueryuivNV(queries[0],GL_PIXEL_COUNT_NV)>=0)
glDeleteOcclusionQueriesNV(queries)
assert_equal(glIsOcclusionQueryNV(queries[1]),false)
end
def test_gl_nv_gpu_program4
supported?(["GL_NV_gpu_program4","GL_ARB_vertex_program"])
programs = glGenProgramsARB(1)
program = "!!ARBvp1.0\nTEMP vv;\nEND"
glBindProgramARB(GL_VERTEX_PROGRAM_ARB, programs[0])
glProgramStringARB(GL_VERTEX_PROGRAM_ARB, GL_PROGRAM_FORMAT_ASCII_ARB, program)
glProgramLocalParameterI4iNV(GL_VERTEX_PROGRAM_ARB,1, 1,2,3,4)
assert_equal(glGetProgramLocalParameterIivNV(GL_VERTEX_PROGRAM_ARB,1),[1,2,3,4])
glProgramLocalParameterI4uiNV(GL_VERTEX_PROGRAM_ARB,1, 5,6,7,8)
assert_equal(glGetProgramLocalParameterIuivNV(GL_VERTEX_PROGRAM_ARB,1),[5,6,7,8])
glProgramLocalParameterI4ivNV(GL_VERTEX_PROGRAM_ARB,1, [1,2,3,4])
assert_equal(glGetProgramLocalParameterIivNV(GL_VERTEX_PROGRAM_ARB,1),[1,2,3,4])
glProgramLocalParameterI4uivNV(GL_VERTEX_PROGRAM_ARB,1, [5,6,7,8])
assert_equal(glGetProgramLocalParameterIuivNV(GL_VERTEX_PROGRAM_ARB,1),[5,6,7,8])
glProgramLocalParametersI4ivNV(GL_VERTEX_PROGRAM_ARB,1, [1,2,3,4])
assert_equal(glGetProgramLocalParameterIivNV(GL_VERTEX_PROGRAM_ARB,1),[1,2,3,4])
glProgramLocalParametersI4uivNV(GL_VERTEX_PROGRAM_ARB,1, [5,6,7,8])
assert_equal(glGetProgramLocalParameterIuivNV(GL_VERTEX_PROGRAM_ARB,1),[5,6,7,8])
glProgramEnvParameterI4iNV(GL_VERTEX_PROGRAM_ARB,1, 1,2,3,4)
assert_equal(glGetProgramEnvParameterIivNV(GL_VERTEX_PROGRAM_ARB,1),[1,2,3,4])
glProgramEnvParameterI4uiNV(GL_VERTEX_PROGRAM_ARB,1, 5,6,7,8)
assert_equal(glGetProgramEnvParameterIuivNV(GL_VERTEX_PROGRAM_ARB,1),[5,6,7,8])
glProgramEnvParameterI4ivNV(GL_VERTEX_PROGRAM_ARB,1, [1,2,3,4])
assert_equal(glGetProgramEnvParameterIivNV(GL_VERTEX_PROGRAM_ARB,1),[1,2,3,4])
glProgramEnvParameterI4uivNV(GL_VERTEX_PROGRAM_ARB,1, [5,6,7,8])
assert_equal(glGetProgramEnvParameterIuivNV(GL_VERTEX_PROGRAM_ARB,1),[5,6,7,8])
glProgramEnvParametersI4ivNV(GL_VERTEX_PROGRAM_ARB,1, [1,2,3,4])
assert_equal(glGetProgramEnvParameterIivNV(GL_VERTEX_PROGRAM_ARB,1),[1,2,3,4])
glProgramEnvParametersI4uivNV(GL_VERTEX_PROGRAM_ARB,1, [5,6,7,8])
assert_equal(glGetProgramEnvParameterIuivNV(GL_VERTEX_PROGRAM_ARB,1),[5,6,7,8])
glDeleteProgramsARB(programs)
end
def test_gl_nv_gpu_program4
supported?(["GL_NV_gpu_program4","GL_EXT_framebuffer_object"])
geometry_program = <<-EOP
!!NVgp4.0
PRIMITIVE_IN TRIANGLES;
PRIMITIVE_OUT TRIANGLE_STRIP;
VERTICES_OUT 1;
END
EOP
program_id = glGenProgramsARB(1)[0]
glBindProgramARB(GL_GEOMETRY_PROGRAM_NV, program_id)
glProgramStringARB(GL_GEOMETRY_PROGRAM_NV, GL_PROGRAM_FORMAT_ASCII_ARB, geometry_program);
assert_equal(glGetProgramivARB(GL_GEOMETRY_PROGRAM_NV,GL_GEOMETRY_VERTICES_OUT_EXT),1)
glProgramVertexLimitNV(GL_GEOMETRY_PROGRAM_NV, 2)
assert_equal(glGetProgramivARB(GL_GEOMETRY_PROGRAM_NV,GL_GEOMETRY_VERTICES_OUT_EXT),2)
#
fbo = glGenFramebuffersEXT(1)[0]
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT,fbo)
texture2d,texture3d,texture_cube = glGenTextures(3)
glBindTexture(GL_TEXTURE_2D, texture2d)
glBindTexture(GL_TEXTURE_3D, texture3d)
glBindTexture(GL_TEXTURE_CUBE_MAP, texture_cube)
glFramebufferTextureEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,texture2d,0)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT, GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_EXT),texture2d)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_EXT),0)
glFramebufferTextureLayerEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,texture3d,0,1)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_EXT),texture3d)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_EXT),0)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER_EXT),1)
glFramebufferTextureFaceEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,texture_cube,0,GL_TEXTURE_CUBE_MAP_POSITIVE_X)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_EXT),texture_cube)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_EXT),0)
assert_equal(glGetFramebufferAttachmentParameterivEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE_EXT),GL_TEXTURE_CUBE_MAP_POSITIVE_X)
glDeleteTextures([texture2d,texture3d])
glDeleteFramebuffersEXT(fbo)
glDeleteProgramsARB(program_id)
end
def test_gl_nv_primitive_restart
supported?("GL_NV_primitive_restart")
glBegin(GL_TRIANGLES)
glPrimitiveRestartNV()
glEnd
glPrimitiveRestartIndexNV(2)
assert_equal(glGetIntegerv(GL_PRIMITIVE_RESTART_INDEX_NV),2)
end
def test_gl_nv_point_sprite
supported?("GL_NV_point_sprite")
glPointParameteriNV(GL_POINT_SPRITE_R_MODE_NV, GL_ZERO)
assert_equal(glGetIntegerv(GL_POINT_SPRITE_R_MODE_NV),GL_ZERO)
glPointParameteriNV(GL_POINT_SPRITE_R_MODE_NV, GL_S)
assert_equal(glGetIntegerv(GL_POINT_SPRITE_R_MODE_NV),GL_S)
glPointParameterivNV(GL_POINT_SPRITE_R_MODE_NV, [GL_ZERO])
assert_equal(glGetIntegerv(GL_POINT_SPRITE_R_MODE_NV),GL_ZERO)
glPointParameterivNV(GL_POINT_SPRITE_R_MODE_NV, [GL_S])
assert_equal(glGetIntegerv(GL_POINT_SPRITE_R_MODE_NV),GL_S)
end
end
|
require 'omniauth/strategies/oauth2'
module OmniAuth
module Strategies
class SeaconLogistics < OmniAuth::Strategies::OAuth2
option :name, :seaconlogistics
option :client_options, {
site: 'https://cas.seaconlogistics.com',
authorize_url: 'https://cas.seaconlogistics.com/oauth2.0/authorize',
token_url: 'https://cas.seaconlogistics.com/oauth2.0/accessToken',
token_method: :get
}
option :token_params, {
parse: :query
}
def raw_info
@raw_info ||= access_token.get('/oauth2.0/profile').parsed
end
end
end
end
Changed the way the profile is requested
require 'omniauth/strategies/oauth2'
module OmniAuth
module Strategies
class SeaconLogistics < OmniAuth::Strategies::OAuth2
option :name, :seaconlogistics
option :client_options, {
site: 'https://cas.seaconlogistics.com',
authorize_url: 'https://cas.seaconlogistics.com/oauth2.0/authorize',
token_url: 'https://cas.seaconlogistics.com/oauth2.0/accessToken',
token_method: :get
}
option :token_params, {
parse: :query
}
def raw_info
access_token.options[:mode] = :query
access_token.options[:param_name] = :access_token
@raw_info ||= access_token.get('/oauth2.0/profile').parsed
end
end
end
end
|
# This looks at a YAML manifest of gems and JS to
# a) make sure that the manifest matches what we're using
# b) make sure that the manifest knows their licenses
# c) make sure those licenses aren't dangerous
module Papers
class DependencyLicenseValidator
def initialize
@errors = []
end
def valid?
@errors = []
validate_gems
validate_js
@errors.empty?
end
def errors
@errors
end
def manifest
@manifest ||= YAML.load File.read(manifest_file)
end
def pretty_gem_list
GemSpec.all_from_manifest(manifest).map(&:pretty_hash)
end
def pretty_js_list
JsSpec.all_from_manifest(manifest).map(&:pretty_hash)
end
private
def validate_spec_type(spec_type)
spec_type.missing_from_manifest(manifest).each do |name|
errors << "#{name} is in the app, but not in the manifest"
end
spec_type.unknown_in_manifest(manifest).each do |name|
errors << "#{name} is in the manifest, but not in the app"
end
spec_type.all_from_manifest(manifest).each do |spec|
errors << "#{spec.name} is licensed under #{spec.license}, which is not acceptable" unless spec.acceptable_license?
end
end
def validate_gems
validate_spec_type GemSpec
end
def validate_js
validate_spec_type JsSpec
end
def manifest_file
File.join Rails.root, "config", "dependency_manifest.yml"
end
class DependencySpec
attr_accessor :name, :license, :license_url, :project_url
def initialize(options)
@name = options[:name]
@license = options[:license]
@license_url = options[:license_url]
@project_url = options[:project_url]
end
# Do not modify this without talking to Bjorn or Jade!
GOOD_LICENSES = ["MIT", "BSD", "LGPLv2.1", "LGPLv3", "Ruby", "Apache 2.0", "Perl Artistic", "Artistic 2.0", "ISC", "New Relic", "None", "Manually reviewed"]
def acceptable_license?
GOOD_LICENSES.include?(license)
end
protected
def public_license_name
if license == 'Manually reviewed'
'Other'
else
license
end
end
def self.all_from_manifest(manifest)
(manifest[manifest_key] || []).map do |name, info|
license_url = info['license_url']
license = info['license']
project_url = info['project_url']
self.new(name: name, license: license, license_url: license_url, project_url: project_url)
end.sort { |a, b| a.name.downcase <=> b.name.downcase }
end
def self.missing_from_manifest(manifest)
introspected.to_set - all_from_manifest(manifest).map(&:name).to_set
end
def self.unknown_in_manifest(manifest)
all_from_manifest(manifest).map(&:name).to_set - introspected.to_set
end
end
class GemSpec < DependencySpec
def pretty_hash
{ name: name_without_version, license: public_license_name, license_url: @license_url, project_url: @project_url }
end
def name_without_version
return @name unless @name.include?('-')
@name.split('-')[0..-2].join('-')
end
def self.introspected
Bundler.load.specs.map do |spec|
# bundler versions aren't controlled by the Gemfile
if spec.name == "bundler"
spec.name
else
"#{spec.name}-#{spec.version}"
end
end
end
def self.manifest_key
"gems"
end
end
class JsSpec < DependencySpec
def pretty_hash
{ name: @name, license: public_license_name, license_url: @license_url, project_url: @project_url }
end
def self.introspected
dirs = []
dirs << File.join(Rails.root, "app", "assets", "javascripts")
root_regexp = /^#{Regexp.escape Rails.root.to_s}\//
dirs.map {|dir| Dir["#{dir}/**/*.js"]}.flatten.map {|name| name.sub(root_regexp, '')}
end
def self.manifest_key
"javascripts"
end
end
end
end
Hybrid license on the whitelist for Mobile Activity Traces JS
File: app/javascripts/mobile_applications/activity_traces.js
Original source: https://gist.github.com/bunkat/1962173
License: MIT
Heavily modified by New Relic's awesome engineers.
# This looks at a YAML manifest of gems and JS to
# a) make sure that the manifest matches what we're using
# b) make sure that the manifest knows their licenses
# c) make sure those licenses aren't dangerous
module Papers
class DependencyLicenseValidator
def initialize
@errors = []
end
def valid?
@errors = []
validate_gems
validate_js
@errors.empty?
end
def errors
@errors
end
def manifest
@manifest ||= YAML.load File.read(manifest_file)
end
def pretty_gem_list
GemSpec.all_from_manifest(manifest).map(&:pretty_hash)
end
def pretty_js_list
JsSpec.all_from_manifest(manifest).map(&:pretty_hash)
end
private
def validate_spec_type(spec_type)
spec_type.missing_from_manifest(manifest).each do |name|
errors << "#{name} is in the app, but not in the manifest"
end
spec_type.unknown_in_manifest(manifest).each do |name|
errors << "#{name} is in the manifest, but not in the app"
end
spec_type.all_from_manifest(manifest).each do |spec|
errors << "#{spec.name} is licensed under #{spec.license}, which is not acceptable" unless spec.acceptable_license?
end
end
def validate_gems
validate_spec_type GemSpec
end
def validate_js
validate_spec_type JsSpec
end
def manifest_file
File.join Rails.root, "config", "dependency_manifest.yml"
end
class DependencySpec
attr_accessor :name, :license, :license_url, :project_url
def initialize(options)
@name = options[:name]
@license = options[:license]
@license_url = options[:license_url]
@project_url = options[:project_url]
end
# Do not modify this without talking to Bjorn or Jade!
GOOD_LICENSES = ["MIT", "MIT + New Relic", "BSD", "LGPLv2.1", "LGPLv3", "Ruby", "Apache 2.0", "Perl Artistic", "Artistic 2.0", "ISC", "New Relic", "None", "Manually reviewed"]
def acceptable_license?
GOOD_LICENSES.include?(license)
end
protected
def public_license_name
if license == 'Manually reviewed'
'Other'
else
license
end
end
def self.all_from_manifest(manifest)
(manifest[manifest_key] || []).map do |name, info|
license_url = info['license_url']
license = info['license']
project_url = info['project_url']
self.new(name: name, license: license, license_url: license_url, project_url: project_url)
end.sort { |a, b| a.name.downcase <=> b.name.downcase }
end
def self.missing_from_manifest(manifest)
introspected.to_set - all_from_manifest(manifest).map(&:name).to_set
end
def self.unknown_in_manifest(manifest)
all_from_manifest(manifest).map(&:name).to_set - introspected.to_set
end
end
class GemSpec < DependencySpec
def pretty_hash
{ name: name_without_version, license: public_license_name, license_url: @license_url, project_url: @project_url }
end
def name_without_version
return @name unless @name.include?('-')
@name.split('-')[0..-2].join('-')
end
def self.introspected
Bundler.load.specs.map do |spec|
# bundler versions aren't controlled by the Gemfile
if spec.name == "bundler"
spec.name
else
"#{spec.name}-#{spec.version}"
end
end
end
def self.manifest_key
"gems"
end
end
class JsSpec < DependencySpec
def pretty_hash
{ name: @name, license: public_license_name, license_url: @license_url, project_url: @project_url }
end
def self.introspected
dirs = []
dirs << File.join(Rails.root, "app", "assets", "javascripts")
root_regexp = /^#{Regexp.escape Rails.root.to_s}\//
dirs.map {|dir| Dir["#{dir}/**/*.js"]}.flatten.map {|name| name.sub(root_regexp, '')}
end
def self.manifest_key
"javascripts"
end
end
end
end
|
gem "minitest"
require "minitest/autorun"
load "#{File.dirname($0)}/../later"
class LaterDateTest < Minitest::Test
@today = Date.today()
@tomorrow = @today.next_day()
@yesterday = @today.prev_day()
@last_month = @today.prev_month()
@next_month = @today.next_month()
@last_year = @today.prev_year()
@range_past = @last_month.strftime("%F") + ".." + @yesterday.strftime("%F")
@range_current = @last_month.strftime("%F") + ".." + @next_month.strftime("%F")
@range_future = @tomorrow.strftime("%F") + ".." + @next_month.strftime("%F")
def setup
end
@@intervals = [ "daily", "weekly", "monthly", "yearly" ]
@@ranges = { "past" => @range_past, "current" => @range_current, "future" => @range_future}
@@format = { "weekly" => "%a", "monthly" => "%d" }
# testing for invalid dates
invalids = {
"weird_string" => "bad date",
"without_date" => "Mon,Fri/3",
"start_date_only" => "2014-20-01",
"start_date_of_range" => "2014-20-01..2014-01-01",
"end_date_of_range" => "2014-01-01..2014-20-01",
"start_date_after_end_date" => "2014-08-01..2014-01-01;Mon",
"date_range_without_selector" => "2014-01-01..2014-08-01",
"start_and_end_date_of_range" => "2014-20-01..2014-21-01",
"selector_mix_1" => "1,2,3,Mon",
"selector_mix_2" => "Nov23,1,2,3,Mon",
"selector_mix_1_with_date" => "2014-01-01;1,2,3,Mon",
"selector_mix_2_with_date" => "2014-01-01;Nov23,1,2,3,Mon",
"selector_mix_1_with_date_range" => "2014-01-01..2014-02-02;1,2,3,Mon",
"selector_mix_2_with_date_range" => "2014-01-01..2014-02-02;Nov23,1,2,3,Mon"
}
# testing valid dates
invalids.each_pair do |invalid_name,invalid_param|
define_method("test_invalid_#{invalid_name}") do
@laterdate = LaterDate.new(invalid_param)
assert_equal false, @laterdate.valid?
end
end
valids = {
"date_only" => "2014-08-01",
"day_of_week" => "Mon,Thu,Fri",
"day_of_month" => "1,14,20",
"date_and_day_of_week" => "2013-08-04;Mon,Thu,Fri",
"day_of_month" => "1,14,20"
}
valids.each_pair do |valid_name,valid_param|
define_method("test_valid_#{valid_name}") do
@laterdate = LaterDate.new(valid_param)
assert_equal true, @laterdate.valid?
end
end
# testing for doables
doable = {
"date_only" => @today.strftime("%F"),
"day_of_week" => @today.strftime("%a"),
"day_of_week_interval1" => @today.strftime("%F") + ";" + @today.strftime("%a") + "/2",
"day_of_week_interval2" => @yesterday.strftime("%F") + ";" + @today.strftime("%a") + "/2",
"day_of_week_interval3" => @last_month.strftime("%F") + ";" + @today.strftime("%a") + "/4",
"days_of_week" => [ @yesterday.strftime("%a"), @today.strftime("%a"), @tomorrow.strftime("%a") ].join(','),
"day_of_month" => @today.strftime("%d"),
"day_of_month_interval1" => @today.strftime("%F") + ";" + @today.strftime("%d") + "/2",
"day_of_month_interval2" => @last_month.prev_month().strftime("%F") + ";" + @today.strftime("%d") + "/2",
"days_of_month" => [ @yesterday.strftime("%d"), @today.strftime("%d"), @tomorrow.strftime("%d") ].join(','),
"month_day" => @today.strftime("%b%d"),
"month_day_interval1" => @last_year.prev_year().strftime("%F") + ";" + @today.strftime("%b%d") + "/2",
"month_days" => [ @yesterday.strftime("%b%d"), @today.strftime("%b%d"), @tomorrow.strftime("%b%d") ].join(','),
"day_of_week_range" => "#{@range_current};" + @today.strftime("%a"),
"days_of_week_range" => "#{@range_current};" + [ @yesterday.strftime("%a"), @today.strftime("%a"), @tomorrow.strftime("%a") ].join(','),
"day_of_month_range" => "#{@range_current};" + @today.strftime("%d"),
"days_of_month_range" => "#{@range_current};" + [ @yesterday.strftime("%d"), @today.strftime("%d"), @tomorrow.strftime("%d") ].join(','),
"month_day_range" => "#{@range_current};" + @today.strftime("%b%d"),
"month_days_range" => "#{@range_current};" + [ @yesterday.strftime("%b%d"), @today.strftime("%b%d"), @tomorrow.strftime("%b%d") ].join(',')
}
doable.each_pair do |do_name,do_param|
define_method("test_do_#{do_name}") do
@laterdate = LaterDate.new(do_param)
assert_equal true, @laterdate.do_today?
end
end
# testing for nondoables
nondoable = {
"date_only" => @tomorrow.strftime("%F"),
"day_of_week" => @tomorrow.strftime("%a"),
"day_of_week_interval1" => @last_month.strftime("%F") + ";" + @today.strftime("%a") + "/3",
"day_of_week_interval2" => @today.strftime("%F") + ";" + @tomorrow.strftime("%a") + "/2",
"day_of_week_interval3" => @yesterday.strftime("%F") + ";" + @tomorrow.strftime("%a") + "/2",
"day_of_week_interval4" => @last_month.strftime("%F") + ";" + @tomorrow.strftime("%a") + "/4",
"days_of_week" => [ @yesterday.strftime("%a"), @tomorrow.strftime("%a") ].join(','),
"day_of_month" => @tomorrow.strftime("%d"),
"day_of_week_interval1" => @last_month.strftime("%F") + ";" + @today.strftime("%d") + "/3",
"day_of_week_interval2" => @today.strftime("%F") + ";" + @tomorrow.strftime("%d") + "/2",
"days_of_month" => [ @yesterday.strftime("%d"), @tomorrow.strftime("%d") ].join(','),
"month_day" => @tomorrow.strftime("%b%d"),
"month_day_interval1" => @last_year.strftime("%F") + ";" + @today.strftime("%b%d") + "/2",
"month_days" => [ @yesterday.strftime("%b%d"), @tomorrow.strftime("%b%d") ].join(','),
"day_of_week_range_past" => "#{@range_past};" + @today.strftime("%a"),
"days_of_week_range_past" => "#{@range_past};" + [ @yesterday.strftime("%a"), @today.strftime("%a"), @tomorrow.strftime("%a") ].join(','),
"day_of_month_range_past" => "#{@range_past};" + @today.strftime("%d"),
"days_of_month_range_past" => "#{@range_past};" + [ @yesterday.strftime("%d"), @today.strftime("%d"), @tomorrow.strftime("%d") ].join(','),
"month_day_range_past" => "#{@range_past};" + @today.strftime("%b%d"),
"month_days_range_past" => "#{@range_past};" + [ @yesterday.strftime("%b%d"), @today.strftime("%b%d"), @tomorrow.strftime("%b%d") ].join(','),
"day_of_week_range_future" => "#{@range_future};" + @today.strftime("%a"),
"days_of_week_range_future" => "#{@range_future};" + [ @yesterday.strftime("%a"), @today.strftime("%a"), @tomorrow.strftime("%a") ].join(','),
"day_of_month_range_future" => "#{@range_future};" + @today.strftime("%d"),
"days_of_month_range_future" => "#{@range_future};" + [ @yesterday.strftime("%d"), @today.strftime("%d"), @tomorrow.strftime("%d") ].join(','),
"month_day_range_future" => "#{@range_future};" + @today.strftime("%b%d"),
"month_days_range_future" => "#{@range_future};" + [ @yesterday.strftime("%b%d"), @today.strftime("%b%d"), @tomorrow.strftime("%b%d") ].join(',')
}
nondoable.each_pair do |nodo_name,nodo_param|
define_method("test_no_do_#{nodo_name}") do
@laterdate = LaterDate.new(nodo_param)
assert_equal false, @laterdate.do_today?
end
end
end
added tests for past_end?()
gem "minitest"
require "minitest/autorun"
load "#{File.dirname($0)}/../later"
class LaterDateTest < Minitest::Test
@today = Date.today()
@tomorrow = @today.next_day()
@yesterday = @today.prev_day()
@last_month = @today.prev_month()
@next_month = @today.next_month()
@last_year = @today.prev_year()
def setup
end
@range_past = @last_month.strftime("%F") + ".." + @yesterday.strftime("%F")
@range_current = @last_month.strftime("%F") + ".." + @next_month.strftime("%F")
@range_future = @tomorrow.strftime("%F") + ".." + @next_month.strftime("%F")
@@ranges = { "past" => @range_past, "current" => @range_current, "future" => @range_future}
# testing for invalid dates
invalids = {
"weird_string" => "bad date",
"without_date" => "Mon,Fri/3",
"start_date_only" => "2014-20-01",
"start_date_of_range" => "2014-20-01..2014-01-01",
"end_date_of_range" => "2014-01-01..2014-20-01",
"start_date_after_end_date" => "2014-08-01..2014-01-01;Mon",
"date_range_without_selector" => "2014-01-01..2014-08-01",
"start_and_end_date_of_range" => "2014-20-01..2014-21-01",
"selector_mix_1" => "1,2,3,Mon",
"selector_mix_2" => "Nov23,1,2,3,Mon",
"selector_mix_1_with_date" => "2014-01-01;1,2,3,Mon",
"selector_mix_2_with_date" => "2014-01-01;Nov23,1,2,3,Mon",
"selector_mix_1_with_date_range" => "2014-01-01..2014-02-02;1,2,3,Mon",
"selector_mix_2_with_date_range" => "2014-01-01..2014-02-02;Nov23,1,2,3,Mon"
}
# testing valid dates
invalids.each_pair do |invalid_name,invalid_param|
define_method("test_invalid_#{invalid_name}") do
@laterdate = LaterDate.new(invalid_param)
assert_equal false, @laterdate.valid?
end
end
valids = {
"date_only" => "2014-08-01",
"day_of_week" => "Mon,Thu,Fri",
"day_of_month" => "1,14,20",
"date_and_day_of_week" => "2013-08-04;Mon,Thu,Fri",
"day_of_month" => "1,14,20"
}
valids.each_pair do |valid_name,valid_param|
define_method("test_valid_#{valid_name}") do
@laterdate = LaterDate.new(valid_param)
assert_equal true, @laterdate.valid?
end
end
# testing for doables
doable = {
"date_only" => @today.strftime("%F"),
"day_of_week" => @today.strftime("%a"),
"day_of_week_interval1" => @today.strftime("%F") + ";" + @today.strftime("%a") + "/2",
"day_of_week_interval2" => @yesterday.strftime("%F") + ";" + @today.strftime("%a") + "/2",
"day_of_week_interval3" => @last_month.strftime("%F") + ";" + @today.strftime("%a") + "/4",
"days_of_week" => [ @yesterday.strftime("%a"), @today.strftime("%a"), @tomorrow.strftime("%a") ].join(','),
"day_of_month" => @today.strftime("%d"),
"day_of_month_interval1" => @today.strftime("%F") + ";" + @today.strftime("%d") + "/2",
"day_of_month_interval2" => @last_month.prev_month().strftime("%F") + ";" + @today.strftime("%d") + "/2",
"days_of_month" => [ @yesterday.strftime("%d"), @today.strftime("%d"), @tomorrow.strftime("%d") ].join(','),
"month_day" => @today.strftime("%b%d"),
"month_day_interval1" => @last_year.prev_year().strftime("%F") + ";" + @today.strftime("%b%d") + "/2",
"month_days" => [ @yesterday.strftime("%b%d"), @today.strftime("%b%d"), @tomorrow.strftime("%b%d") ].join(','),
"day_of_week_range" => "#{@range_current};" + @today.strftime("%a"),
"days_of_week_range" => "#{@range_current};" + [ @yesterday.strftime("%a"), @today.strftime("%a"), @tomorrow.strftime("%a") ].join(','),
"day_of_month_range" => "#{@range_current};" + @today.strftime("%d"),
"days_of_month_range" => "#{@range_current};" + [ @yesterday.strftime("%d"), @today.strftime("%d"), @tomorrow.strftime("%d") ].join(','),
"month_day_range" => "#{@range_current};" + @today.strftime("%b%d"),
"month_days_range" => "#{@range_current};" + [ @yesterday.strftime("%b%d"), @today.strftime("%b%d"), @tomorrow.strftime("%b%d") ].join(',')
}
doable.each_pair do |do_name,do_param|
define_method("test_do_#{do_name}") do
@laterdate = LaterDate.new(do_param)
assert_equal true, @laterdate.do_today?
end
end
# testing for nondoables
nondoable = {
"date_only" => @tomorrow.strftime("%F"),
"day_of_week" => @tomorrow.strftime("%a"),
"day_of_week_interval1" => @last_month.strftime("%F") + ";" + @today.strftime("%a") + "/3",
"day_of_week_interval2" => @today.strftime("%F") + ";" + @tomorrow.strftime("%a") + "/2",
"day_of_week_interval3" => @yesterday.strftime("%F") + ";" + @tomorrow.strftime("%a") + "/2",
"day_of_week_interval4" => @last_month.strftime("%F") + ";" + @tomorrow.strftime("%a") + "/4",
"days_of_week" => [ @yesterday.strftime("%a"), @tomorrow.strftime("%a") ].join(','),
"day_of_month" => @tomorrow.strftime("%d"),
"day_of_week_interval1" => @last_month.strftime("%F") + ";" + @today.strftime("%d") + "/3",
"day_of_week_interval2" => @today.strftime("%F") + ";" + @tomorrow.strftime("%d") + "/2",
"days_of_month" => [ @yesterday.strftime("%d"), @tomorrow.strftime("%d") ].join(','),
"month_day" => @tomorrow.strftime("%b%d"),
"month_day_interval1" => @last_year.strftime("%F") + ";" + @today.strftime("%b%d") + "/2",
"month_days" => [ @yesterday.strftime("%b%d"), @tomorrow.strftime("%b%d") ].join(','),
"day_of_week_range_past" => "#{@range_past};" + @today.strftime("%a"),
"days_of_week_range_past" => "#{@range_past};" + [ @yesterday.strftime("%a"), @today.strftime("%a"), @tomorrow.strftime("%a") ].join(','),
"day_of_month_range_past" => "#{@range_past};" + @today.strftime("%d"),
"days_of_month_range_past" => "#{@range_past};" + [ @yesterday.strftime("%d"), @today.strftime("%d"), @tomorrow.strftime("%d") ].join(','),
"month_day_range_past" => "#{@range_past};" + @today.strftime("%b%d"),
"month_days_range_past" => "#{@range_past};" + [ @yesterday.strftime("%b%d"), @today.strftime("%b%d"), @tomorrow.strftime("%b%d") ].join(','),
"day_of_week_range_future" => "#{@range_future};" + @today.strftime("%a"),
"days_of_week_range_future" => "#{@range_future};" + [ @yesterday.strftime("%a"), @today.strftime("%a"), @tomorrow.strftime("%a") ].join(','),
"day_of_month_range_future" => "#{@range_future};" + @today.strftime("%d"),
"days_of_month_range_future" => "#{@range_future};" + [ @yesterday.strftime("%d"), @today.strftime("%d"), @tomorrow.strftime("%d") ].join(','),
"month_day_range_future" => "#{@range_future};" + @today.strftime("%b%d"),
"month_days_range_future" => "#{@range_future};" + [ @yesterday.strftime("%b%d"), @today.strftime("%b%d"), @tomorrow.strftime("%b%d") ].join(',')
}
nondoable.each_pair do |nodo_name,nodo_param|
define_method("test_no_do_#{nodo_name}") do
@laterdate = LaterDate.new(nodo_param)
assert_equal false, @laterdate.do_today?
end
end
# test end of range
end_ranges = [
[ "range_past", "#{@range_past};" + @today.strftime("%d"), true ],
[ "range_current", "#{@range_current};" + @today.strftime("%d"), false ],
[ "range_future", "#{@range_future};" + @today.strftime("%d"), false ]
]
end_ranges.each do |range|
define_method("test_#{range[0]}") do
@laterdate = LaterDate.new(range[1])
assert_equal range[2], @laterdate.past_end?
end
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "continuity"
s.version = "0.0.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Bob Potter"]
s.date = "2013-09-11"
s.description = "Distributed scheduling of jobs"
s.email = "bobby.potter@gmail.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"continuity.gemspec",
"examples/worker.rb",
"lib/continuity.rb",
"lib/continuity/cron_entry.rb",
"lib/continuity/periodic_entry.rb",
"lib/continuity/redis_backend.rb",
"lib/continuity/scheduler.rb",
"test/helper.rb",
"test/redis.conf",
"test/test_cron_entry.rb",
"test/test_periodic_entry.rb",
"test/test_race_issues.rb",
"test/test_redis_backend.rb",
"test/test_scheduler.rb"
]
s.homepage = "http://github.com/bpot/continuity"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "2.0.2"
s.summary = "Allows you to distribute job scheduling across a number of processes"
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<redis>, [">= 0"])
s.add_development_dependency(%q<minitest>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.3.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_development_dependency(%q<simplecov>, [">= 0"])
else
s.add_dependency(%q<redis>, [">= 0"])
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.3.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
else
s.add_dependency(%q<redis>, [">= 0"])
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.3.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
end
Update to v0.0.1
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "continuity"
s.version = "0.0.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Bob Potter"]
s.date = "2013-09-11"
s.description = "Distributed scheduling of jobs"
s.email = "bobby.potter@gmail.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"continuity.gemspec",
"examples/worker.rb",
"lib/continuity.rb",
"lib/continuity/cron_entry.rb",
"lib/continuity/periodic_entry.rb",
"lib/continuity/redis_backend.rb",
"lib/continuity/scheduler.rb",
"test/helper.rb",
"test/redis.conf",
"test/test_cron_entry.rb",
"test/test_periodic_entry.rb",
"test/test_race_issues.rb",
"test/test_redis_backend.rb",
"test/test_scheduler.rb"
]
s.homepage = "http://github.com/bpot/continuity"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "2.0.2"
s.summary = "Allows you to distribute job scheduling across a number of processes"
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<redis>, [">= 0"])
s.add_development_dependency(%q<minitest>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.3.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_development_dependency(%q<simplecov>, [">= 0"])
else
s.add_dependency(%q<redis>, [">= 0"])
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.3.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
else
s.add_dependency(%q<redis>, [">= 0"])
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.3.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
end
|
# DO NOT STRUGGLE ON THIS PROBLEM FOR MORE THAN 30 MINUTES!!
# Define a class called User that keeps track of a person's username
# it should receive the username when initialized
# it should have a method add_blog which accepts a date and text
# it should have a method blogs which returns an array of all blogs the user has written
# they should be in reverse chronological order (newest first)
#
# Define a class called Blog that could be used to store an entry for your web log.
# The class should have a getter and setter methods: text , date , user
# its initialize method should receive the date, user , and text
# have a method called summary that returns the first 10 words from the text (or the entire text if it is less than 10 words)
#
# Two blogs should be equal to eachother if they have the same user, date, and text
# here is a partially filled out example of how to define the == operator:
# def ==(other)
# return self.date == other.date
# end
# ========== EXAMPLE ==========
#
# lissa = User.new 'QTSort'
# lissa.username # => "QTSort"
# lissa.blogs # => []
#
# lissa.add_blog Date.parse("2010-05-28") , "Sailor Mars is my favourite"
# lissa.blogs # => [ blog1 ]
#
# blog1 = lissa.blogs.first
# blog1.user # => lissa
#
# Blog.new Date.parse("2007-01-02"), lissa, "Going dancing!" # we'll call this blog2
# Blog.new Date.parse("2006-01-02"), lissa, "For the last time, fuck facebook >.<" # we'll call this blog3
# Blog.new Date.parse("2010-01-02"), lissa, "Got a new job, cuz I'm pretty much the best ^_^" # we'll call this blog4
# lissa.blogs # => [ blog1 , blog4 , blog2 , blog3 ]
#
# blog5 = Blog.new Date.today, lissa, <<BLOG_ENTRY
# Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce orci nunc, porta non tristique eu, auctor tincidunt mauris.
# Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Etiam vitae nibh sapien. Curabitur
# eget eros bibendum justo congue auctor non at turpis. Aenean feugiat vestibulum mi ac pulvinar. Fusce ut felis justo, in
# porta lectus.
# BLOG_ENTRY
#
# blog5.get_summary # => "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce orci"
# blog5.entry # => QTSort 2010-05-28
# Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce orci nunc, porta non tristique eu, auctor tincidunt mauris.
# Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Etiam vitae nibh sapien. Curabitur
# eget eros bibendum justo congue auctor non at turpis. Aenean feugiat vestibulum mi ac pulvinar. Fusce ut felis justo, in
# porta lectus.
#
# blog5.date = Date.parse('2009-01-02')
# blog5.user = User.new 'disloyalist.party'
# blog5.text = "From the school of revision, Comes the standard inventor's rule, Books of subtle notation Compositions, all original\n" \
# "I am a pioneer, synthetic engineer, On the brink of discovery, On the eve of historic light, Worked in secret for decades,\n" \
# "All my labor will be lost with time"
#
# blog5.entry # => disloyalist.party 2009-01-02
# From the school of revision, Comes the standard inventor's rule, Books of subtle notation Compositions, all original
# I am a pioneer, synthetic engineer, On the brink of discovery, On the eve of historic light, Worked in secret for decades,
# All my labor will be lost with time
# date docs are at: http://ruby-doc.org/core/classes/Date.html
# don't spend too much time worrying about them :)
require 'date'
13_classes_iterating_sorting.rb
# DO NOT STRUGGLE ON THIS PROBLEM FOR MORE THAN 30 MINUTES!!
# Define a class called User that keeps track of a person's username
# it should receive the username when initialized
# it should have a method add_blog which accepts a date and text
# it should have a method blogs which returns an array of all blogs the user has written
# they should be in reverse chronological order (newest first)
#
# Define a class called Blog that could be used to store an entry for your web log.
# The class should have a getter and setter methods: text , date , user
# its initialize method should receive the date, user , and text
# have a method called summary that returns the first 10 words from the text (or the entire text if it is less than 10 words)
#
# Two blogs should be equal to eachother if they have the same user, date, and text
# here is a partially filled out example of how to define the == operator:
# def ==(other)
# return self.date == other.date
# end
# ========== EXAMPLE ==========
#
# lissa = User.new 'QTSort'
# lissa.username # => "QTSort"
# lissa.blogs # => []
#
# lissa.add_blog Date.parse("2010-05-28") , "Sailor Mars is my favourite"
# lissa.blogs # => [ blog1 ]
#
# blog1 = lissa.blogs.first
# blog1.user # => lissa
#
# Blog.new Date.parse("2007-01-02"), lissa, "Going dancing!" # we'll call this blog2
# Blog.new Date.parse("2006-01-02"), lissa, "For the last time, fuck facebook >.<" # we'll call this blog3
# Blog.new Date.parse("2010-01-02"), lissa, "Got a new job, cuz I'm pretty much the best ^_^" # we'll call this blog4
# lissa.blogs # => [ blog1 , blog4 , blog2 , blog3 ]
#
# blog5 = Blog.new Date.today, lissa, <<BLOG_ENTRY
# Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce orci nunc, porta non tristique eu, auctor tincidunt mauris.
# Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Etiam vitae nibh sapien. Curabitur
# eget eros bibendum justo congue auctor non at turpis. Aenean feugiat vestibulum mi ac pulvinar. Fusce ut felis justo, in
# porta lectus.
# BLOG_ENTRY
#
# blog5.get_summary # => "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce orci"
# blog5.entry # => QTSort 2010-05-28
# Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce orci nunc, porta non tristique eu, auctor tincidunt mauris.
# Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Etiam vitae nibh sapien. Curabitur
# eget eros bibendum justo congue auctor non at turpis. Aenean feugiat vestibulum mi ac pulvinar. Fusce ut felis justo, in
# porta lectus.
#
# blog5.date = Date.parse('2009-01-02')
# blog5.user = User.new 'disloyalist.party'
# blog5.text = "From the school of revision, Comes the standard inventor's rule, Books of subtle notation Compositions, all original\n" \
# "I am a pioneer, synthetic engineer, On the brink of discovery, On the eve of historic light, Worked in secret for decades,\n" \
# "All my labor will be lost with time"
#
# blog5.entry # => disloyalist.party 2009-01-02
# From the school of revision, Comes the standard inventor's rule, Books of subtle notation Compositions, all original
# I am a pioneer, synthetic engineer, On the brink of discovery, On the eve of historic light, Worked in secret for decades,
# All my labor will be lost with time
# date docs are at: http://ruby-doc.org/core/classes/Date.html
# don't spend too much time worrying about them :)
require 'date'
class User
attr_accessor :username, :blogs
def initialize(username)
self.username = username
self.blogs = []
end
def add_blog(date, text)
added_blog = Blog.new(date, self, text)
blogs << added_blog
self.blogs = blogs.sort_by { |blog| blog.date }.reverse
added_blog
end
end
class Blog
attr_accessor :date, :user, :text
def initialize(date, user, text)
self.date = date
self.user = user
self.text = text
end
def summary
text.split[0..9].join(' ')
end
def entry
"#{user.username} #{date}\n#{text}"
end
def ==(other)
date == other.date &&
user == other.user &&
text == other.text
end
end
|
module PolymorphicIntegerType
module Extensions
module ClassMethods
def belongs_to(name, scope = nil, options = {})
options = scope if scope.kind_of? Hash
integer_type = options.delete :integer_type
super
if options[:polymorphic] && (integer_type || options[:polymorphic].is_a?(Hash))
mapping =
case integer_type
when true then PolymorphicIntegerType::Mapping[name]
when nil then options[:polymorphic]
else
raise ArgumentError, "Unknown integer_type value: #{integer_type.inspect}"
end.dup
foreign_type = reflections[name.to_s].foreign_type
_polymorphic_foreign_types << foreign_type
# Required way to dynamically define a class method on the model
singleton_class.__send__(:define_method, "#{foreign_type}_mapping") do
mapping
end
define_method foreign_type do
t = super()
mapping[t]
end
define_method "#{foreign_type}=" do |klass|
enum = mapping.key(klass.to_s)
enum ||= mapping.key(klass.base_class.to_s) if klass.kind_of?(Class) && klass <= ActiveRecord::Base
enum ||= klass if klass != NilClass
super(enum)
end
define_method "#{name}=" do |record|
super(record)
send("#{foreign_type}=", record.class)
end
validate do
t = send(foreign_type)
unless t.nil? || mapping.values.include?(t)
errors.add(foreign_type, "is not included in the mapping")
end
end
end
end
def remove_type_and_establish_mapping(name, options, scope)
integer_type = options.delete :integer_type
polymorphic_type_mapping = retrieve_polymorphic_type_mapping(
polymorphic_type: options[:as],
class_name: options[:class_name] || name.to_s.classify
)
if options[:as] && (polymorphic_type_mapping || integer_type)
poly_type = options.delete(:as)
polymorphic_type_mapping ||= PolymorphicIntegerType::Mapping[poly_type]
if polymorphic_type_mapping == nil
raise "Polymorphic type mapping missing for #{poly_type.inspect}"
end
klass_mapping = (polymorphic_type_mapping || {}).key(sti_name)
if klass_mapping == nil
raise "Class not found for #{sti_name.inspect} in polymorphic type mapping: #{polymorphic_type_mapping}"
end
options[:foreign_key] ||= "#{poly_type}_id"
foreign_type = options.delete(:foreign_type) || "#{poly_type}_type"
options[:scope] ||= -> {
condition = where(foreign_type => klass_mapping.to_i)
condition = instance_exec(&scope).merge(condition) if scope.is_a?(Proc)
condition
}
else
options[:scope] ||= scope
end
end
def retrieve_polymorphic_type_mapping(polymorphic_type:, class_name:)
return if polymorphic_type == nil
belongs_to_class = begin
class_name.constantize
rescue NameError
# Class not found
end
method_name = "#{polymorphic_type}_type_mapping"
if belongs_to_class && belongs_to_class.respond_to?(method_name)
belongs_to_class.public_send(method_name)
end
end
def has_many(name, scope = nil, options = {}, &extension)
if scope.kind_of? Hash
options = scope
scope = nil
end
remove_type_and_establish_mapping(name, options, scope)
super(name, options.delete(:scope), options, &extension)
end
def has_one(name, scope = nil, options = {})
if scope.kind_of? Hash
options = scope
scope = nil
end
remove_type_and_establish_mapping(name, options, scope)
super(name, options.delete(:scope), options)
end
end
def self.included(base)
base.class_eval {
cattr_accessor :_polymorphic_foreign_types
self._polymorphic_foreign_types = []
}
base.extend(ClassMethods)
end
def _polymorphic_foreign_types
self.class._polymorphic_foreign_types
end
def [](value)
if _polymorphic_foreign_types.include?(value)
send(value)
else
super(value)
end
end
def []=(attr_name, value)
if _polymorphic_foreign_types.include?(attr_name)
send("#{attr_name}=", value)
else
super(attr_name, value)
end
end
end
end
Use safe_constantize instead
Plus, use `nil?` to be idiomatic.
module PolymorphicIntegerType
module Extensions
module ClassMethods
def belongs_to(name, scope = nil, options = {})
options = scope if scope.kind_of? Hash
integer_type = options.delete :integer_type
super
if options[:polymorphic] && (integer_type || options[:polymorphic].is_a?(Hash))
mapping =
case integer_type
when true then PolymorphicIntegerType::Mapping[name]
when nil then options[:polymorphic]
else
raise ArgumentError, "Unknown integer_type value: #{integer_type.inspect}"
end.dup
foreign_type = reflections[name.to_s].foreign_type
_polymorphic_foreign_types << foreign_type
# Required way to dynamically define a class method on the model
singleton_class.__send__(:define_method, "#{foreign_type}_mapping") do
mapping
end
define_method foreign_type do
t = super()
mapping[t]
end
define_method "#{foreign_type}=" do |klass|
enum = mapping.key(klass.to_s)
enum ||= mapping.key(klass.base_class.to_s) if klass.kind_of?(Class) && klass <= ActiveRecord::Base
enum ||= klass if klass != NilClass
super(enum)
end
define_method "#{name}=" do |record|
super(record)
send("#{foreign_type}=", record.class)
end
validate do
t = send(foreign_type)
unless t.nil? || mapping.values.include?(t)
errors.add(foreign_type, "is not included in the mapping")
end
end
end
end
def remove_type_and_establish_mapping(name, options, scope)
integer_type = options.delete :integer_type
polymorphic_type_mapping = retrieve_polymorphic_type_mapping(
polymorphic_type: options[:as],
class_name: options[:class_name] || name.to_s.classify
)
if options[:as] && (polymorphic_type_mapping || integer_type)
poly_type = options.delete(:as)
polymorphic_type_mapping ||= PolymorphicIntegerType::Mapping[poly_type]
if polymorphic_type_mapping == nil
raise "Polymorphic type mapping missing for #{poly_type.inspect}"
end
klass_mapping = (polymorphic_type_mapping || {}).key(sti_name)
if klass_mapping == nil
raise "Class not found for #{sti_name.inspect} in polymorphic type mapping: #{polymorphic_type_mapping}"
end
options[:foreign_key] ||= "#{poly_type}_id"
foreign_type = options.delete(:foreign_type) || "#{poly_type}_type"
options[:scope] ||= -> {
condition = where(foreign_type => klass_mapping.to_i)
condition = instance_exec(&scope).merge(condition) if scope.is_a?(Proc)
condition
}
else
options[:scope] ||= scope
end
end
def retrieve_polymorphic_type_mapping(polymorphic_type:, class_name:)
return if polymorphic_type.nil?
belongs_to_class = class_name.safe_constantize
method_name = "#{polymorphic_type}_type_mapping"
if belongs_to_class && belongs_to_class.respond_to?(method_name)
belongs_to_class.public_send(method_name)
end
end
def has_many(name, scope = nil, options = {}, &extension)
if scope.kind_of? Hash
options = scope
scope = nil
end
remove_type_and_establish_mapping(name, options, scope)
super(name, options.delete(:scope), options, &extension)
end
def has_one(name, scope = nil, options = {})
if scope.kind_of? Hash
options = scope
scope = nil
end
remove_type_and_establish_mapping(name, options, scope)
super(name, options.delete(:scope), options)
end
end
def self.included(base)
base.class_eval {
cattr_accessor :_polymorphic_foreign_types
self._polymorphic_foreign_types = []
}
base.extend(ClassMethods)
end
def _polymorphic_foreign_types
self.class._polymorphic_foreign_types
end
def [](value)
if _polymorphic_foreign_types.include?(value)
send(value)
else
super(value)
end
end
def []=(attr_name, value)
if _polymorphic_foreign_types.include?(attr_name)
send("#{attr_name}=", value)
else
super(attr_name, value)
end
end
end
end
|
module Prpr
module Action
module MentionComment
class Mention < Base
def call
if mention?
Publisher::Adapter::Base.broadcast message
end
end
private
def message
Prpr::Publisher::Message.new(body: body, from: from, room: room)
end
def mention?
comment.body =~ /@[a-zA-Z0-9_]+/
end
def body
<<-END
#{comment_body}
#{comment.html_url}
END
end
def comment_body
comment.body.gsub(/@[a-zA-Z0-9_]+/) { |old|
members[old] || old
}
end
def comment
event.coment
end
def from
event.sender
end
def room
env[:mention_comment_room]
end
def members
@members ||= config.read(name).lines.map { |line|
if line =~ / \* (\S+):\s*(\S+)/
[$1, $2]
end
}.to_h
rescue
@members ||= {}
end
def config
@config ||= Config::Github.new(repository_name)
end
def env
Config::Env.default
end
def name
env[:mention_comment_members] || 'MEMBERS.md'
end
def repository_name
event.repository.full_name
end
end
end
end
end
typo fix
module Prpr
module Action
module MentionComment
class Mention < Base
def call
if mention?
Publisher::Adapter::Base.broadcast message
end
end
private
def message
Prpr::Publisher::Message.new(body: body, from: from, room: room)
end
def mention?
comment.body =~ /@[a-zA-Z0-9_]+/
end
def body
<<-END
#{comment_body}
#{comment.html_url}
END
end
def comment_body
comment.body.gsub(/@[a-zA-Z0-9_]+/) { |old|
members[old] || old
}
end
def comment
event.comment
end
def from
event.sender
end
def room
env[:mention_comment_room]
end
def members
@members ||= config.read(name).lines.map { |line|
if line =~ / \* (\S+):\s*(\S+)/
[$1, $2]
end
}.to_h
rescue
@members ||= {}
end
def config
@config ||= Config::Github.new(repository_name)
end
def env
Config::Env.default
end
def name
env[:mention_comment_members] || 'MEMBERS.md'
end
def repository_name
event.repository.full_name
end
end
end
end
end
|
require 'puppet/provider/exec/powershell'
require 'json'
require 'pry'
Puppet::Type.type(:iis_site).provide(:powershell) do
def self.iisnames
{
:name => 'name',
:path => 'physicalPath',
:app_pool => 'applicationPool',
}
end
commands :powershell =>
if File.exists?("#{ENV['SYSTEMROOT']}\\sysnative\\WindowsPowershell\\v1.0\\powershell.exe")
"#{ENV['SYSTEMROOT']}\\sysnative\\WindowsPowershell\\v1.0\\powershell.exe"
elsif File.exists?("#{ENV['SYSTEMROOT']}\\system32\\WindowsPowershell\\v1.0\\powershell.exe")
"#{ENV['SYSTEMROOT']}\\system32\\WindowsPowershell\\v1.0\\powershell.exe"
else
'powershell.exe'
end
def self.run(command, check = false)
write_script(command) do |native_path|
psh = "cmd.exe /c \"\"#{native_path(command(:powershell))}\" #{args} -Command - < \"#{native_path}\"\""
return %x(#{psh})
end
end
def self.instances
inst_cmd = 'Import-Module WebAdministration; Get-Website | Select Name, PhysicalPath, ApplicationPool, HostHeader, Bindings | ConvertTo-JSON'
site_json = JSON.parse(run(inst_cmd))
# The command returns a Hash if there is 1 site
if site_json.is_a?(Hash)
[site_json].collect do |site|
site_hash = {}
site_hash[:name] = site['name']
site_hash[:path] = site['physicalPath']
site_hash[:app_pool] = site['applicationPool']
bindings = site['bindings']['Collection'].first['bindingInformation']
site_hash[:protocol] = site['bindings']['Collection'].first['protocol']
site_hash[:ip] = bindings.split(':')[0]
site_hash[:port] = bindings.split(':')[1]
site_hash[:host_header] = bindings.split(':')[2]
if site['bindings']['Collection'].first['sslFlags'] == 0
site_hash[:ssl] = :true
else
site_hash[:ssl] = :false
end
site_hash[:ensure] = :present
new(site_hash)
end
# The command returns an Array if there is >1 site. WHY IS THIS DIFFERENT WINDOWS?
elsif site_json.is_a?(Array)
site_json.each.collect do |site|
site_hash = {}
site_hash[:name] = site['name']
site_hash[:path] = site['physicalPath']
site_hash[:app_pool] = site['applicationPool']
# Also the format of the bindings is different here. WHY WINDOWS?
bindings = site['bindings']['Collection'].split(':')
site_hash[:protocol] = bindings[0].split[0]
site_hash[:ip] = bindings[0].split[1]
site_hash[:port] = bindings[1]
site_hash[:host_header] = bindings[2]
if bindings.last.split('=')[1] == '0'
site_hash[:ssl] = :true
else
site_hash[:ssl] = :false
end
site_hash[:ensure] = :present
new(site_hash)
end
end
end
def self.prefetch(resources)
sites = instances
resources.keys.each do |site|
if provider = sites.find{ |s| s.name == site }
resources[site].provider = provider
end
end
end
def exists?
@property_hash[:ensure] == :present
end
mk_resource_methods
def create
createSwitches = [
"-Name \"#{@resource[:name]}\"",
"-Port #{@resource[:port]} -IP #{@resource[:ip]}",
"-HostHeader \"#{@resource[:host_header]}\"",
"-PhysicalPath \"#{@resource[:path]}\"",
"-ApplicationPool \"#{@resource[:app_pool]}\"",
"-Ssl:$#{@resource[:ssl]}",
'-Force'
]
inst_cmd = "Import-Module WebAdministration; New-Website #{createSwitches.join(' ')}"
resp = Puppet::Type::Iis_site::ProviderPowershell.run(inst_cmd)
debug resp if resp.length > 0
@resource.original_parameters.each_key do |k|
@property_hash[k] = @resource[k]
end
exists? ? (return true) : (return false)
end
def destroy
inst_cmd = "Import-Module WebAdministration; Remove-Website -Name \"#{@property_hash[:name]}\""
resp = Puppet::Type::Iis_site::ProviderPowershell.run(inst_cmd)
debug resp if resp.length > 0
@property_hash.clear
exists? ? (return false) : (return true)
end
iisnames.each do |property,iisname|
next if property == :ensure
define_method "#{property.to_s}=" do |value|
inst_cmd = "Import-Module WebAdministration; Set-ItemProperty -Path \"IIS:\\\\Sites\\#{@property_hash[:name]}\" -Name \"#{iisname}\" -Value \"#{value}\""
resp = Puppet::Type::Iis_site::ProviderPowershell.run(inst_cmd)
debug resp if resp.length > 0
end
end
# These three properties have to be submitted together
binders = [
'protocol',
'ip',
'port',
'ssl'
]
binders.each do |property|
define_method "#{property}=" do |value|
bhash = {}
binders.each do |b|
if b == property
bhash[b] = value
else
bhash[b] = @property_hash[b.to_sym]
end
end
inst_cmd = "Import-Module WebAdministration; Set-ItemProperty -Path \"IIS:\\\\Sites\\#{@property_hash[:name]}\" -Name Bindings -Value @{protocol=\"#{bhash['protocol']}\";bindingInformation=\"#{bhash['ip']}:#{bhash['port']}"
# Append sslFlags to args is enabled
inst_cmd += "; sslFlags=0" if bhash['ssl']
inst_cmd += "\"}"
resp = Puppet::Type::Iis_site::ProviderPowershell.run(inst_cmd)
debug resp if resp.length > 0
@property_hash[property.to_sym] = value
end
end
private
def self.write_script(content, &block)
Tempfile.open(['puppet-powershell', '.ps1']) do |file|
file.write(content)
file.flush
yield native_path(file.path)
end
end
def self.native_path(path)
path.gsub(File::SEPARATOR, File::ALT_SEPARATOR)
end
def self.args
'-NoProfile -NonInteractive -NoLogo -ExecutionPolicy Bypass'
end
end
Fixed quotes to separate sslFlag from host_header in string
require 'puppet/provider/exec/powershell'
require 'json'
require 'pry'
Puppet::Type.type(:iis_site).provide(:powershell) do
def self.iisnames
{
:name => 'name',
:path => 'physicalPath',
:app_pool => 'applicationPool',
}
end
commands :powershell =>
if File.exists?("#{ENV['SYSTEMROOT']}\\sysnative\\WindowsPowershell\\v1.0\\powershell.exe")
"#{ENV['SYSTEMROOT']}\\sysnative\\WindowsPowershell\\v1.0\\powershell.exe"
elsif File.exists?("#{ENV['SYSTEMROOT']}\\system32\\WindowsPowershell\\v1.0\\powershell.exe")
"#{ENV['SYSTEMROOT']}\\system32\\WindowsPowershell\\v1.0\\powershell.exe"
else
'powershell.exe'
end
def self.run(command, check = false)
write_script(command) do |native_path|
psh = "cmd.exe /c \"\"#{native_path(command(:powershell))}\" #{args} -Command - < \"#{native_path}\"\""
return %x(#{psh})
end
end
def self.instances
inst_cmd = 'Import-Module WebAdministration; Get-Website | Select Name, PhysicalPath, ApplicationPool, HostHeader, Bindings | ConvertTo-JSON'
site_json = JSON.parse(run(inst_cmd))
# The command returns a Hash if there is 1 site
if site_json.is_a?(Hash)
[site_json].collect do |site|
site_hash = {}
site_hash[:name] = site['name']
site_hash[:path] = site['physicalPath']
site_hash[:app_pool] = site['applicationPool']
bindings = site['bindings']['Collection'].first['bindingInformation']
site_hash[:protocol] = site['bindings']['Collection'].first['protocol']
site_hash[:ip] = bindings.split(':')[0]
site_hash[:port] = bindings.split(':')[1]
site_hash[:host_header] = bindings.split(':')[2]
if site['bindings']['Collection'].first['sslFlags'] == 0
site_hash[:ssl] = :true
else
site_hash[:ssl] = :false
end
site_hash[:ensure] = :present
new(site_hash)
end
# The command returns an Array if there is >1 site. WHY IS THIS DIFFERENT WINDOWS?
elsif site_json.is_a?(Array)
site_json.each.collect do |site|
site_hash = {}
site_hash[:name] = site['name']
site_hash[:path] = site['physicalPath']
site_hash[:app_pool] = site['applicationPool']
# Also the format of the bindings is different here. WHY WINDOWS?
bindings = site['bindings']['Collection'].split(':')
site_hash[:protocol] = bindings[0].split[0]
site_hash[:ip] = bindings[0].split[1]
site_hash[:port] = bindings[1]
site_hash[:host_header] = bindings[2]
if bindings.last.split('=')[1] == '0'
site_hash[:ssl] = :true
else
site_hash[:ssl] = :false
end
site_hash[:ensure] = :present
new(site_hash)
end
end
end
def self.prefetch(resources)
sites = instances
resources.keys.each do |site|
if provider = sites.find{ |s| s.name == site }
resources[site].provider = provider
end
end
end
def exists?
@property_hash[:ensure] == :present
end
mk_resource_methods
def create
createSwitches = [
"-Name \"#{@resource[:name]}\"",
"-Port #{@resource[:port]} -IP #{@resource[:ip]}",
"-HostHeader \"#{@resource[:host_header]}\"",
"-PhysicalPath \"#{@resource[:path]}\"",
"-ApplicationPool \"#{@resource[:app_pool]}\"",
"-Ssl:$#{@resource[:ssl]}",
'-Force'
]
inst_cmd = "Import-Module WebAdministration; New-Website #{createSwitches.join(' ')}"
resp = Puppet::Type::Iis_site::ProviderPowershell.run(inst_cmd)
debug resp if resp.length > 0
@resource.original_parameters.each_key do |k|
@property_hash[k] = @resource[k]
end
exists? ? (return true) : (return false)
end
def destroy
inst_cmd = "Import-Module WebAdministration; Remove-Website -Name \"#{@property_hash[:name]}\""
resp = Puppet::Type::Iis_site::ProviderPowershell.run(inst_cmd)
debug resp if resp.length > 0
@property_hash.clear
exists? ? (return false) : (return true)
end
iisnames.each do |property,iisname|
next if property == :ensure
define_method "#{property.to_s}=" do |value|
inst_cmd = "Import-Module WebAdministration; Set-ItemProperty -Path \"IIS:\\\\Sites\\#{@property_hash[:name]}\" -Name \"#{iisname}\" -Value \"#{value}\""
resp = Puppet::Type::Iis_site::ProviderPowershell.run(inst_cmd)
debug resp if resp.length > 0
end
end
# These three properties have to be submitted together
binders = [
'protocol',
'ip',
'port',
'host_header',
'ssl'
]
binders.each do |property|
define_method "#{property}=" do |value|
bhash = {}
binders.each do |b|
if b == property
bhash[b] = value
else
bhash[b] = @property_hash[b.to_sym]
end
end
inst_cmd = "Import-Module WebAdministration; Set-ItemProperty -Path \"IIS:\\\\Sites\\#{@property_hash[:name]}\" -Name Bindings -Value @{protocol=\"#{bhash['protocol']}\";bindingInformation=\"#{bhash['ip']}:#{bhash['port']}"
binding.pry
inst_cmd += ":#{bhash['host_header']}" if bhash['host_header']
inst_cmd += '"'
# Append sslFlags to args is enabled
inst_cmd += '; sslFlags=0' if bhash['ssl']
inst_cmd += '}'
resp = Puppet::Type::Iis_site::ProviderPowershell.run(inst_cmd)
debug resp if resp.length > 0
@property_hash[property.to_sym] = value
end
end
private
def self.write_script(content, &block)
Tempfile.open(['puppet-powershell', '.ps1']) do |file|
file.write(content)
file.flush
yield native_path(file.path)
end
end
def self.native_path(path)
path.gsub(File::SEPARATOR, File::ALT_SEPARATOR)
end
def self.args
'-NoProfile -NonInteractive -NoLogo -ExecutionPolicy Bypass'
end
end
|
module Riot
module ActiveRecord
module AssertionMacros
# An ActiveRecord assertion that expects to fail when a given attribute is validated after a nil value
# is provided to it.
#
# context "a User" do
# setup { User.new }
# topic.validates_presence_of(:name)
# end
def validates_presence_of(attribute)
msg = "expected to validate presence of #{attribute.inspect}"
error_from_writing_value(actual, attribute, nil) || fail(msg)
end
# An ActiveRecord assertion that expects to pass with a given value or set of values for a given
# attribute.
#
# context "a User" do
# setup { User.new }
# topic.allows_values_for :email, "a@b.cd"
# topic.allows_values_for :email, "a@b.cd", "e@f.gh"
# end
def allows_values_for(attribute, *values)
bad_values = []
values.each do |value|
bad_values << value if error_from_writing_value(actual, attribute, value)
end
msg = "expected #{attribute.inspect} to allow value(s) #{bad_values.inspect}"
fail(msg) unless bad_values.empty?
end
# An ActiveRecord assertion that expects to fail with a given value or set of values for a given
# attribute.
#
# context "a User" do
# setup { User.new }
# topic.does_not_allow_values_for :email, "a"
# topic.does_not_allow_values_for :email, "a@b", "e f@g.h"
# end
def does_not_allow_values_for(attribute, *values)
good_values = []
values.each do |value|
good_values << value unless error_from_writing_value(actual, attribute, value)
end
msg = "expected #{attribute.inspect} not to allow value(s) #{good_values.inspect}"
fail(msg) unless good_values.empty?
end
# An ActiveRecord assertion that expects to fail with an attribute is not valid for record because the
# value of the attribute is not unique. Requires the topic of the context to be a created record; one
# that returns false for a call to +new_record?+.
#
# context "a User" do
# setup { User.create(:email => "a@b.cde", ... ) }
# topic.validates_uniqueness_of :email
# end
def validates_uniqueness_of(attribute)
actual_record = actual
if actual_record.new_record?
fail("topic is not a new record when testing uniqueness of #{attribute}")
else
copied_model = actual_record.class.new
actual_record.attributes.each do |dup_attribute, dup_value|
actual_record.write_attribute(dup_attribute, dup_value)
end
copied_value = actual_record.read_attribute(attribute)
msg = "expected to fail because #{attribute.inspect} is not unique"
error_from_writing_value(copied_model, attribute, copied_value) || fail(msg)
end
end
# An ActiveRecord assertion macro that expects to pass when a given attribute is defined as +has_many+
# association. Will fail if an association is not defined for the attribute and if the association is
# not +has_many.
#
# context "a Room" do
# setup { Room.new }
#
# topic.has_many(:doors)
# topic.has_many(:floors) # should probably fail given our current universe :)
# end
def has_many(attribute)
reflection = actual.class.reflect_on_association(attribute)
static_msg = "expected #{attribute.inspect} to be a has_many association, but was "
if reflection.nil?
fail(static_msg + "not")
elsif "has_many" != reflection.macro.to_s
fail(static_msg + "a #{reflection.macro} instead")
end
end
private
def error_from_writing_value(model, attribute, value)
model.write_attribute(attribute, value)
model.valid?
model.errors.on(attribute)
end
end # AssertionMacros
end # ActiveRecord
end # Riot
Riot::Assertion.instance_eval { include Riot::ActiveRecord::AssertionMacros }
I'm a big dumby
module Riot
module ActiveRecord
module AssertionMacros
# An ActiveRecord assertion that expects to fail when a given attribute is validated after a nil value
# is provided to it.
#
# context "a User" do
# setup { User.new }
# topic.validates_presence_of(:name)
# end
def validates_presence_of(attribute)
msg = "expected to validate presence of #{attribute.inspect}"
error_from_writing_value(actual, attribute, nil) || fail(msg)
end
# An ActiveRecord assertion that expects to pass with a given value or set of values for a given
# attribute.
#
# context "a User" do
# setup { User.new }
# topic.allows_values_for :email, "a@b.cd"
# topic.allows_values_for :email, "a@b.cd", "e@f.gh"
# end
def allows_values_for(attribute, *values)
bad_values = []
values.each do |value|
bad_values << value if error_from_writing_value(actual, attribute, value)
end
msg = "expected #{attribute.inspect} to allow value(s) #{bad_values.inspect}"
fail(msg) unless bad_values.empty?
end
# An ActiveRecord assertion that expects to fail with a given value or set of values for a given
# attribute.
#
# context "a User" do
# setup { User.new }
# topic.does_not_allow_values_for :email, "a"
# topic.does_not_allow_values_for :email, "a@b", "e f@g.h"
# end
def does_not_allow_values_for(attribute, *values)
good_values = []
values.each do |value|
good_values << value unless error_from_writing_value(actual, attribute, value)
end
msg = "expected #{attribute.inspect} not to allow value(s) #{good_values.inspect}"
fail(msg) unless good_values.empty?
end
# An ActiveRecord assertion that expects to fail with an attribute is not valid for record because the
# value of the attribute is not unique. Requires the topic of the context to be a created record; one
# that returns false for a call to +new_record?+.
#
# context "a User" do
# setup { User.create(:email => "a@b.cde", ... ) }
# topic.validates_uniqueness_of :email
# end
def validates_uniqueness_of(attribute)
actual_record = actual
if actual_record.new_record?
fail("topic is not a new record when testing uniqueness of #{attribute}")
else
copied_model = actual_record.class.new
actual_record.attributes.each do |dup_attribute, dup_value|
copied_model.write_attribute(dup_attribute, dup_value)
end
copied_value = actual_record.read_attribute(attribute)
msg = "expected to fail because #{attribute.inspect} is not unique"
error_from_writing_value(copied_model, attribute, copied_value) || fail(msg)
end
end
# An ActiveRecord assertion macro that expects to pass when a given attribute is defined as +has_many+
# association. Will fail if an association is not defined for the attribute and if the association is
# not +has_many.
#
# context "a Room" do
# setup { Room.new }
#
# topic.has_many(:doors)
# topic.has_many(:floors) # should probably fail given our current universe :)
# end
def has_many(attribute)
reflection = actual.class.reflect_on_association(attribute)
static_msg = "expected #{attribute.inspect} to be a has_many association, but was "
if reflection.nil?
fail(static_msg + "not")
elsif "has_many" != reflection.macro.to_s
fail(static_msg + "a #{reflection.macro} instead")
end
end
private
def error_from_writing_value(model, attribute, value)
model.write_attribute(attribute, value)
model.valid?
model.errors.on(attribute)
end
end # AssertionMacros
end # ActiveRecord
end # Riot
Riot::Assertion.instance_eval { include Riot::ActiveRecord::AssertionMacros }
|
require 'test/unit'
$VERBOSE = true
if defined?(JRUBY_VERSION)
require 'java'
require "#{File.dirname File.dirname(__FILE__)}/lib/timestamp.jar"
else
require "#{File.dirname File.dirname(__FILE__)}/lib/timestamp"
end
class Test_timestamp < Test::Unit::TestCase
def test_timestamp
stp = 1000.times.map{Time.timestamp}
stp.inject do |m,t|
assert( t >= m, "Retrograde timestamp #{m} to #{t}" )
t
end
end
def test_unix_time
int = Time.now.to_i
stp = Time.unix_time
d = (stp-int).abs
assert( d <= 1, "Unexpectedly highly difference #{d}" )
end
def test_unix_timestamp
int = Time.now.to_i
stp = Time.unix_timestamp
d = (stp-int).abs
assert( d <= 1, "Unexpectedly highly difference #{d}" )
end
def test_unix_microtime
flt = Time.now.to_f
stp = Time.unix_microtime
d = (stp-flt).abs
assert( d <= 1, "Unexpectedly highly difference #{d}" )
end
end
messing with jruby test
require 'test/unit'
$VERBOSE = true
if defined?(JRUBY_VERSION)
#require 'java'
require "#{File.dirname File.dirname(__FILE__)}/lib/timestamp.jar"
else
require "#{File.dirname File.dirname(__FILE__)}/lib/timestamp"
end
class Test_timestamp < Test::Unit::TestCase
def test_timestamp
stp = 1000.times.map{Time.timestamp}
stp.inject do |m,t|
assert( t >= m, "Retrograde timestamp #{m} to #{t}" )
t
end
end
def test_unix_time
int = Time.now.to_i
stp = Time.unix_time
d = (stp-int).abs
assert( d <= 1, "Unexpectedly highly difference #{d}" )
end
def test_unix_timestamp
int = Time.now.to_i
stp = Time.unix_timestamp
d = (stp-int).abs
assert( d <= 1, "Unexpectedly highly difference #{d}" )
end
def test_unix_microtime
flt = Time.now.to_f
stp = Time.unix_microtime
d = (stp-flt).abs
assert( d <= 1, "Unexpectedly highly difference #{d}" )
end
end
|
require_relative '../loader'
require 'bundler'
Bundler.setup(:development)
require 'cucumber'
require 'minitest/autorun'
require 'minitest/spec'
require_relative 'spec_helper'
require 'lexer'
require 'parser'
describe Lexer do
before do
def lexer
@lexer = Lexer.new(test_code)
end
end
it "should check if the given code is an instance of string" do
self.lexer.code.must_be_instance_of String
end
it "should tokenize and output the given code" do
puts "Input code : #{ code_print(test_code, :ruby) }"
tokens = lexer.tokenize
puts "Parsed Tokens :\n #{ code_print(tokens.pretty_inspect, :ruby) }"
puts "Number of Tokens #{tokens.length}"
end
it "should read token sequentially using read_token method" do
lexer_instance = lexer
total_tokens = lexer_instance.tokenize.size
total_tokens.times{|count| puts "Count : #{count} :\n #{ code_print(lexer_instance.read_token.pretty_inspect, :java) }" }
end
it "should be able to read the current token and look_ahead one token" do
lexer_instance = lexer
total_tokens = lexer_instance.tokenize.size
total_tokens.times do |count|
print "#{ paint("Count :#{count}", :red)}
Current Token : #{ code_print(lexer_instance.read_token.pretty_inspect, :java) }
Lookbehind Token : #{ code_print(lexer_instance.look_ahead.pretty_inspect, :java) }"
end
end
end
Skipped current tokensizer tests for the time being
require_relative '../loader'
require 'bundler'
Bundler.setup(:development)
require 'cucumber'
require 'minitest/autorun'
require 'minitest/spec'
require_relative 'spec_helper'
require 'lexer'
require 'parser'
describe Lexer do
before do
def lexer
@lexer = Lexer.new(simple_code)
end
end
it "should check if the given code is an instance of string" do
skip
self.lexer.code.must_be_instance_of String
end
it "should tokenize and output the given code" do
#puts "Input code : #{ code_print(test_code, :ruby) }"
tokens = lexer.tokenize
puts "Parsed Tokens :\n #{ code_print(tokens.pretty_inspect, :ruby) }"
puts "Number of Tokens #{tokens.length}"
end
it "should read token sequentially using read_token method" do
skip
lexer_instance = lexer
total_tokens = lexer_instance.tokenize.size
total_tokens.times{|count| puts "Count : #{count} :\n #{ code_print(lexer_instance.read_token.pretty_inspect, :java) }" }
end
it "should be able to read the current token and look_ahead one token" do
skip
lexer_instance = lexer
total_tokens = lexer_instance.tokenize.size
total_tokens.times do |count|
print "#{ paint("Count :#{count}", :red)}
Current Token : #{ code_print(lexer_instance.read_token.pretty_inspect, :java) }
Lookbehind Token : #{ code_print(lexer_instance.look_ahead.pretty_inspect, :java) }"
end
end
end
|
require_relative '../loader'
require 'bundler'
Bundler.setup(:development)
require 'cucumber'
require 'minitest/autorun'
require 'minitest/spec'
require_relative 'spec_helper'
require 'lexer'
require 'parser'
describe Lexer do
before do
def lexer
@lexer = Lexer.new(simple_code)
end
end
it "should check if the given code is an instance of string" do
skip
self.lexer.code.must_be_instance_of String
end
it "should tokenize and output the given code" do
#puts "Input code : #{ code_print(test_code, :ruby) }"
tokens = lexer.tokenize
puts "Parsed Tokens :\n #{ code_print(tokens.pretty_inspect, :ruby) }"
puts "Number of Tokens #{tokens.length}"
end
it "should read token sequentially using read_token method" do
skip
lexer_instance = lexer
total_tokens = lexer_instance.tokenize.size
total_tokens.times{|count| puts "Count : #{count} :\n #{ code_print(lexer_instance.read_token.pretty_inspect, :java) }" }
end
it "should be able to read the current token and look_ahead one token" do
skip
lexer_instance = lexer
total_tokens = lexer_instance.tokenize.size
total_tokens.times do |count|
print "#{ paint("Count :#{count}", :red)}
Current Token : #{ code_print(lexer_instance.read_token.pretty_inspect, :java) }
Lookbehind Token : #{ code_print(lexer_instance.look_ahead.pretty_inspect, :java) }"
end
end
end
Removed Unnecessary commented code
require_relative '../loader'
require 'bundler'
Bundler.setup(:development)
require 'cucumber'
require 'minitest/autorun'
require 'minitest/spec'
require_relative 'spec_helper'
require 'lexer'
require 'parser'
describe Lexer do
before do
def lexer
@lexer = Lexer.new(simple_code)
end
end
it "should check if the given code is an instance of string" do
self.lexer.code.must_be_instance_of String
end
it "should tokenize and output the given code" do
tokens = lexer.tokenize
puts "Parsed Tokens :\n #{code_print(tokens.pretty_inspect, :ruby)}"
puts "Number of Tokens #{tokens.length}"
end
it "should read token sequentially using read_token method" do
skip
lexer_instance = lexer
total_tokens = lexer_instance.tokenize.size
total_tokens.times{|count| puts "Count : #{count} :\n #{ code_print(lexer_instance.read_token.pretty_inspect, :java) }" }
end
it "should be able to read the current token and look_ahead one token" do
skip
lexer_instance = lexer
total_tokens = lexer_instance.tokenize.size
total_tokens.times do |count|
print "#{ paint("Count :#{count}", :red)}
Current Token : #{ code_print(lexer_instance.read_token.pretty_inspect, :java) }
Lookbehind Token : #{ code_print(lexer_instance.look_ahead.pretty_inspect, :java) }"
end
end
end
|
#!/usr/bin/env ruby
$LOAD_PATH << 'lib'
require 'rubygems'
require 'ruby-nessus'
require 'terminal-table'
require 'yaml'
require 'trollop'
report_root_dir = "/data/nessus-analyzer-data/"
def calculate_top_events(scan, event_count = 10)
# We're going to store the event details as a hash of hashes
unique_events = Hash.new{|h, k| h[k] = {}}
scan.each_host do |host|
next if host.total_event_count.zero?
host.each_event do |event|
# at this point we don't care about informational
next if event.informational?
if unique_events.has_key?(event.id)
unique_events[event.id][:count] += 1
else
unique_events[event.id] = {:count => 1,
:name => event.name,
:severity => event.severity,
:family => event.family,
:synopsis => event.synopsis,
:description=> event.description,
:solution => event.solution,
:cvss_base_score => event.cvss_base_score,
:cve => event.cve,
:cvss_vector => event.cvss_vector }
end # if
end # host.each_event
end # scan.each_host
# sort the hash by v[:count] (descending)
puts unique_events.sort_by{|k, v| -v[:count]}.take(event_count).to_yaml
end
def calculate_statistics(scan)
aggregate_high_severity_count = 0
hosts_with_high_severity_count = 0
total_hosts = 0
total_hosts += scan.host_count
aggregate_high_severity_count += scan.high_severity_count
output_table = Terminal::Table.new :title => scan.title,
:style => {:width => 60 }
output_table << ['High severity issues', scan.high_severity_count]
output_table << ['Medium severity issues', scan.medium_severity_count]
output_table << ['Low severity isseus', scan.low_severity_count]
output_table << ['Open ports', scan.open_ports_count]
output_table.align_column(1, :right)
puts output_table
scan.each_host do |host|
hosts_with_high_severity_count += 1 if host.high_severity_count > 0
end
aggregate_statistics = Terminal::Table.new :title => "Aggregate statistics",
:style => { :width => 60 }
aggregate_statistics << ['Aggregate high severity issuse',
aggregate_high_severity_count]
aggregate_statistics << ['Hosts with high severity issues',
hosts_with_high_severity_count]
aggregate_statistics << ['Total hosts',
total_hosts]
percent_hosts_high_severity = sprintf "%.2f%%",
(100 * hosts_with_high_severity_count.to_f / total_hosts)
aggregate_statistics << ['% hosts with a high severity issue',
percent_hosts_high_severity]
aggregate_statistics.align_column(1, :right)
puts aggregate_statistics
end
if __FILE__ == $PROGRAM_NAME
Dir.glob(report_root_dir+'*.nessus') do |report_file|
Nessus::Parse.new(report_file) do |scan|
calculate_top_events(scan, 10)
# calculate_statistics(scan)
end
end
end
Added Trollop option for calculating the top events
#!/usr/bin/env ruby
$LOAD_PATH << 'lib'
require 'rubygems'
require 'ruby-nessus'
require 'terminal-table'
require 'yaml'
require 'trollop'
report_root_dir = "/data/nessus-analyzer-data/"
def calculate_top_events(scan, event_count = 10)
# We're going to store the event details as a hash of hashes
unique_events = Hash.new{|h, k| h[k] = {}}
scan.each_host do |host|
next if host.total_event_count.zero?
host.each_event do |event|
# at this point we don't care about informational
next if event.informational?
if unique_events.has_key?(event.id)
unique_events[event.id][:count] += 1
else
unique_events[event.id] = {:count => 1,
:name => event.name,
:severity => event.severity,
:family => event.family,
:synopsis => event.synopsis,
:description=> event.description,
:solution => event.solution,
:cvss_base_score => event.cvss_base_score,
:cve => event.cve,
:cvss_vector => event.cvss_vector }
end # if
end # host.each_event
end # scan.each_host
# sort the hash by v[:count] (descending)
puts unique_events.sort_by{|k, v| -v[:count]}.take(event_count).to_yaml
end
def calculate_statistics(scan)
aggregate_high_severity_count = 0
hosts_with_high_severity_count = 0
total_hosts = 0
total_hosts += scan.host_count
aggregate_high_severity_count += scan.high_severity_count
output_table = Terminal::Table.new :title => scan.title,
:style => {:width => 60 }
output_table << ['High severity issues', scan.high_severity_count]
output_table << ['Medium severity issues', scan.medium_severity_count]
output_table << ['Low severity isseus', scan.low_severity_count]
output_table << ['Open ports', scan.open_ports_count]
output_table.align_column(1, :right)
puts output_table
scan.each_host do |host|
hosts_with_high_severity_count += 1 if host.high_severity_count > 0
end
aggregate_statistics = Terminal::Table.new :title => "Aggregate statistics",
:style => { :width => 60 }
aggregate_statistics << ['Aggregate high severity issuse',
aggregate_high_severity_count]
aggregate_statistics << ['Hosts with high severity issues',
hosts_with_high_severity_count]
aggregate_statistics << ['Total hosts',
total_hosts]
percent_hosts_high_severity = sprintf "%.2f%%",
(100 * hosts_with_high_severity_count.to_f / total_hosts)
aggregate_statistics << ['% hosts with a high severity issue',
percent_hosts_high_severity]
aggregate_statistics.align_column(1, :right)
puts aggregate_statistics
end
if __FILE__ == $PROGRAM_NAME
opts = Trollop::options do
opt :top_events, "The <i> most common events", :default => 10
end
Dir.glob(report_root_dir+'*.nessus') do |report_file|
Nessus::Parse.new(report_file) do |scan|
calculate_top_events(scan, opts[:top_events])
# calculate_statistics(scan)
end
end
end
|
require 'test_helper'
class FormTest < ActiveSupport::TestCase
setup do
clear_objects(Questioning, Question, Form)
end
test "update ranks" do
f = FactoryGirl.create(:form, :question_types => %w(integer integer))
# reload form to ensure questions are sorted by rank
f.reload
# save ID of first questioning
first_qing_id = f.questionings[0].id
# swap ranks and save
f.update_ranks(f.questionings[0].id.to_s => '2', f.questionings[1].id.to_s => '1')
f.save!
# now reload and make sure they're switched
f.reload
assert_equal(first_qing_id, f.questionings.last.id)
end
test "destroy questionings" do
f = FactoryGirl.create(:form, :question_types => %w(integer decimal decimal integer))
# remove the decimal questions
f.destroy_questionings(f.questionings[1..2])
f.reload
# make sure they're gone and ranks are ok
assert_equal(2, f.questionings.count)
assert_equal([1,2], f.questionings.map(&:rank))
end
test "questionings count should work" do
f = FactoryGirl.create(:form, :question_types => %w(integer integer))
f.reload
assert_equal(2, f.questionings_count)
end
test "all required" do
f = FactoryGirl.create(:form, :question_types => %w(integer integer))
assert_equal(false, f.all_required?)
f.questionings.each{|q| q.required = true; q.save}
assert_equal(true, f.all_required?)
end
test "form should create new version for itself when published" do
f = FactoryGirl.create(:form)
assert_nil(f.current_version)
# publish and check again
f.publish!
f.reload
assert_equal(1, f.current_version.sequence)
# ensure form_id is set properly on version object
assert_equal(f.id, f.current_version.form_id)
# unpublish (shouldn't change)
old = f.current_version.code
f.unpublish!
f.reload
assert_equal(old, f.current_version.code)
# publish again (shouldn't change)
old = f.current_version.code
f.publish!
f.reload
assert_equal(old, f.current_version.code)
# unpublish, set upgrade flag, and publish (should change)
old = f.current_version.code
f.unpublish!
f.flag_for_upgrade!
f.publish!
f.reload
assert_not_equal(old, f.current_version.code)
# unpublish and publish (shouldn't change)
old = f.current_version.code
f.unpublish!
f.publish!
f.reload
assert_equal(old, f.current_version.code)
end
test "replicating form within mission should avoid name conflict" do
f = FactoryGirl.create(:form, :name => "Myform", :question_types => %w(integer select_one))
f2 = f.replicate
assert_equal('Myform 2', f2.name)
f3 = f2.replicate
assert_equal('Myform 3', f3.name)
f4 = f3.replicate
assert_equal('Myform 4', f4.name)
end
test "replicating form within mission should produce different questionings but same questions and option set" do
f = FactoryGirl.create(:form, :question_types => %w(integer select_one))
f2 = f.replicate
assert_not_equal(f.questionings.first, f2.questionings.first)
# questionings should point to proper form
assert_equal(f.questionings[0].form, f)
assert_equal(f2.questionings[0].form, f2)
# questions and option sets should be same
assert_equal(f.questions, f2.questions)
assert_not_nil(f2.questions[1].option_set)
assert_equal(f.questions[1].option_set, f2.questions[1].option_set)
end
test "replicating a standard form should do a deep copy" do
f = FactoryGirl.create(:form, :question_types => %w(select_one integer), :is_standard => true)
f2 = f.replicate(get_mission)
# mission should now be set and should not be standard
assert(!f2.is_standard)
assert_equal(get_mission, f2.mission)
# all objects should be distinct
assert_not_equal(f, f2)
assert_not_equal(f.questionings[0], f2.questionings[0])
assert_not_equal(f.questionings[0].question, f2.questionings[0].question)
assert_not_equal(f.questionings[0].question.option_set, f2.questionings[0].question.option_set)
assert_not_equal(f.questionings[0].question.option_set.optionings[0], f2.questionings[0].question.option_set.optionings[0])
assert_not_equal(f.questionings[0].question.option_set.optionings[0].option, f2.questionings[0].question.option_set.optionings[0].option)
# but properties should be same
assert_equal(f.questionings[0].rank, f2.questionings[0].rank)
assert_equal(f.questionings[0].question.code, f2.questionings[0].question.code)
assert_equal(f.questionings[0].question.option_set.optionings[0].option.name, f2.questionings[0].question.option_set.optionings[0].option.name)
end
test "replicating form with conditions should produce correct new conditions" do
f = FactoryGirl.create(:form, :question_types => %w(integer select_one))
# create condition
f.questionings[1].condition = FactoryGirl.build(:condition, :ref_qing => f.questionings[0], :op => 'gt', :value => 1)
# replicate and test
f2 = f.replicate
# questionings and conditions should be distinct
assert_not_equal(f.questionings[1], f2.questionings[1])
assert_not_equal(f.questionings[1].condition, f2.questionings[1].condition)
# new condition should point to new questioning
assert_equal(f2.questionings[1].condition.ref_qing, f2.questionings[0])
end
test "replicating a standard form with a condition referencing an option should produce correct new option reference" do
f = FactoryGirl.create(:form, :question_types => %w(select_one integer), :is_standard => true)
# create condition with option reference
f.questionings[1].condition = FactoryGirl.build(:condition, :ref_qing => f.questionings[0], :op => 'eq',
:option => f.questions[0].option_set.options[0])
# replicate and test
f2 = f.replicate(get_mission)
# questionings, conditions, and options should be distinct
assert_not_equal(f.questionings[1], f2.questionings[1])
assert_not_equal(f.questionings[1].condition, f2.questionings[1].condition)
assert_not_equal(f.questionings[0].question.option_set.optionings[0].option, f2.questionings[0].question.option_set.optionings[0].option)
# new condition should point to new questioning
assert_equal(f2.questionings[1].condition.ref_qing, f2.questionings[0])
# new condition should point to new option
assert_not_nil(f2.questionings[1].condition.option)
assert_not_nil(f2.questionings[0].question.option_set.optionings[0].option)
assert_equal(f2.questionings[1].condition.option, f2.questionings[0].question.option_set.optionings[0].option)
end
test "replicating a form with multiple conditions should also work" do
f = FactoryGirl.create(:form, :question_types => %w(integer integer integer integer))
# create conditions
f.questionings[1].condition = FactoryGirl.build(:condition, :ref_qing => f.questionings[0], :op => 'gt', :value => 1)
f.questionings[3].condition = FactoryGirl.build(:condition, :ref_qing => f.questionings[1], :op => 'gt', :value => 1)
f2 = f.replicate
# new conditions should point to new questionings
assert_equal(f2.questionings[1].condition.ref_qing, f2.questionings[0])
assert_equal(f2.questionings[3].condition.ref_qing, f2.questionings[1])
end
test "adding new question with condition to middle of form should add to copy also" do
# setup
f = FactoryGirl.create(:form, :question_types => %w(integer integer), :is_standard => true)
f2 = f.replicate(get_mission)
# add question to std
f.questionings.build(:rank => 2, :question => FactoryGirl.create(:question, :code => 'charley', :is_standard => true),
:condition => Condition.new(:ref_qing => f.questionings[0], :op => 'gt', :value => '1', :is_standard => true))
f.questionings[1].rank = 3
f.save!
# ensure question and condition got added properly on std
f.reload
assert_equal('charley', f.questionings[1].question.code)
assert_equal(f.questionings[0], f.questionings[1].condition.ref_qing)
# ensure replication was ok
f2.reload
assert_equal('charley', f2.questionings[1].question.code)
assert_equal(f2.questionings[0], f2.questionings[1].condition.ref_qing)
assert_not_equal(f.questionings[1].question.id, f2.questionings[1].question.id)
end
test "adding new condition to std form should create copy" do
# setup
f = FactoryGirl.create(:form, :question_types => %w(integer integer), :is_standard => true)
f2 = f.replicate(get_mission)
# add condition to standard
f.questionings[1].condition = FactoryGirl.build(:condition, :ref_qing => f.questionings[0], :op => 'lt', :value => 10)
f.save!
f2.reload
# a similiar condition should now exist in copy
assert_equal("10", f2.questionings[1].condition.value)
assert_equal(get_mission, f2.questionings[1].condition.mission)
assert_equal(f2.questionings[0], f2.questionings[1].condition.ref_qing)
# but conditions should be distinct
assert_not_equal(f.questionings[1].condition, f2.questionings[1].condition)
end
test "changing condition ref_qing should replicate properly" do
f = FactoryGirl.create(:form, :question_types => %w(integer integer integer), :is_standard => true)
# create condition
f.questionings[2].condition = FactoryGirl.build(:condition, :ref_qing => f.questionings[0], :op => 'gt', :value => 1, :is_standard => true)
f.save!
# replicate first time
f2 = f.replicate(get_mission)
# change condition ref_qing
f.questionings[2].condition.ref_qing = f.questionings[1]
f.save!
# ensure change replicated
f2.reload
assert_equal(f2.questionings[1], f2.questionings[2].condition.ref_qing)
end
test "changes replicated to multiple copies" do
std = FactoryGirl.create(:form, :question_types => %w(integer integer integer), :is_standard => true)
c1 = std.replicate(get_mission)
c2 = std.replicate(FactoryGirl.create(:mission, :name => 'foo'))
# add option set to first question
q = std.questions[0]
q.qtype_name = 'select_one'
q.option_set = FactoryGirl.create(:option_set, :is_standard => true)
q.save!
# ensure change worked on std
std.reload
assert_equal('select_one', std.questions[0].qtype_name)
# ensure two copies get made
c1.reload
c2.reload
assert_equal('select_one', c1.questions[0].qtype_name)
assert_equal('select_one', c2.questions[0].qtype_name)
end
test "question order should remain correct after replication" do
f = FactoryGirl.create(:form, :question_types => %w(integer integer integer), :is_standard => true)
copy = f.replicate(get_mission)
first_std_q_id = f.questions[0].id
first_copy_q_id = copy.questions[0].id
# change the first question in std
q = f.questions[0]
q.qtype_name = "decimal"
q.save!
# ensure question order is still correct
f.reload
assert_equal(first_std_q_id, f.questions[0].id)
copy.reload
assert_equal(first_copy_q_id, copy.questions[0].id)
end
test "removal of question should be replcated to copy" do
std = FactoryGirl.create(:form, :question_types => %w(integer decimal date), :is_standard => true)
copy = std.replicate(get_mission)
# use the special destroy_questionings method
std.destroy_questionings(std.questionings[1])
std.save
copy.reload
assert_equal(2, copy.questionings.size)
assert_equal(2, copy.questions.size)
assert_equal(%w(integer date), copy.questionings.map(&:qtype_name))
# ranks should also remain correct on copy
assert_equal([1,2], copy.questionings.map(&:rank))
end
test "removal of condition from question should be replcated to copy" do
std = FactoryGirl.create(:form, :question_types => %w(integer integer integer), :is_standard => true)
# create condition
std.questionings[2].condition = FactoryGirl.build(:condition, :ref_qing => std.questionings[0], :op => 'gt', :value => 1, :is_standard => true)
std.save!
std.reload
# replicate initially
copy = std.replicate(get_mission)
# save copy condition id
copy_cond_id = std.questionings[2].condition.id
assert_not_nil(copy_cond_id)
# remove condition and save the qing. this is how it will happen in the controller.
std.questionings[2].destroy_condition
assert_nil(std.questionings[2].condition)
std.questionings[2].save!
copy.reload
# copy qing should still be linked to std
assert_equal(std.questionings[2], copy.questionings[2].standard)
# but questioning should have no condition and copied condition should no longer exist
assert_nil(copy.questionings[2].condition)
assert_nil(Condition.where(:id => copy_cond_id).first)
end
test "deleting a standard form should delete copies and copy questionings and conditions" do
std = FactoryGirl.create(:form, :question_types => %w(integer integer), :is_standard => true)
copy = std.replicate(get_mission)
# add condition to standard, which will get replicated
std.questionings[1].condition = FactoryGirl.build(:condition, :ref_qing => std.questionings[0], :op => 'lt', :value => 10)
std.save!
assert_not_nil(Questioning.where(:form_id => copy.id).first)
# get ID of copy condition
copy.reload
copy_cond_id = copy.questionings[1].condition.id
assert_not_nil(copy_cond_id)
# destroy std
std.destroy
# copy and assoc'd questionings and conditions should be gone
assert(!Form.exists?(copy))
assert_nil(Questioning.where(:form_id => copy.id).first)
assert_nil(Condition.where(:id => copy_cond_id).first)
end
end
added test checking that ranks are fixed after question destroyed
require 'test_helper'
class FormTest < ActiveSupport::TestCase
setup do
clear_objects(Questioning, Question, Form)
end
test "update ranks" do
f = FactoryGirl.create(:form, :question_types => %w(integer integer))
# reload form to ensure questions are sorted by rank
f.reload
# save ID of first questioning
first_qing_id = f.questionings[0].id
# swap ranks and save
f.update_ranks(f.questionings[0].id.to_s => '2', f.questionings[1].id.to_s => '1')
f.save!
# now reload and make sure they're switched
f.reload
assert_equal(first_qing_id, f.questionings.last.id)
end
test "destroy questionings" do
f = FactoryGirl.create(:form, :question_types => %w(integer decimal decimal integer))
# remove the decimal questions
f.destroy_questionings(f.questionings[1..2])
f.reload
# make sure they're gone and ranks are ok
assert_equal(2, f.questionings.count)
assert_equal([1,2], f.questionings.map(&:rank))
end
test "questionings count should work" do
f = FactoryGirl.create(:form, :question_types => %w(integer integer))
f.reload
assert_equal(2, f.questionings_count)
end
test "all required" do
f = FactoryGirl.create(:form, :question_types => %w(integer integer))
assert_equal(false, f.all_required?)
f.questionings.each{|q| q.required = true; q.save}
assert_equal(true, f.all_required?)
end
test "form should create new version for itself when published" do
f = FactoryGirl.create(:form)
assert_nil(f.current_version)
# publish and check again
f.publish!
f.reload
assert_equal(1, f.current_version.sequence)
# ensure form_id is set properly on version object
assert_equal(f.id, f.current_version.form_id)
# unpublish (shouldn't change)
old = f.current_version.code
f.unpublish!
f.reload
assert_equal(old, f.current_version.code)
# publish again (shouldn't change)
old = f.current_version.code
f.publish!
f.reload
assert_equal(old, f.current_version.code)
# unpublish, set upgrade flag, and publish (should change)
old = f.current_version.code
f.unpublish!
f.flag_for_upgrade!
f.publish!
f.reload
assert_not_equal(old, f.current_version.code)
# unpublish and publish (shouldn't change)
old = f.current_version.code
f.unpublish!
f.publish!
f.reload
assert_equal(old, f.current_version.code)
end
test "replicating form within mission should avoid name conflict" do
f = FactoryGirl.create(:form, :name => "Myform", :question_types => %w(integer select_one))
f2 = f.replicate
assert_equal('Myform 2', f2.name)
f3 = f2.replicate
assert_equal('Myform 3', f3.name)
f4 = f3.replicate
assert_equal('Myform 4', f4.name)
end
test "replicating form within mission should produce different questionings but same questions and option set" do
f = FactoryGirl.create(:form, :question_types => %w(integer select_one))
f2 = f.replicate
assert_not_equal(f.questionings.first, f2.questionings.first)
# questionings should point to proper form
assert_equal(f.questionings[0].form, f)
assert_equal(f2.questionings[0].form, f2)
# questions and option sets should be same
assert_equal(f.questions, f2.questions)
assert_not_nil(f2.questions[1].option_set)
assert_equal(f.questions[1].option_set, f2.questions[1].option_set)
end
test "replicating a standard form should do a deep copy" do
f = FactoryGirl.create(:form, :question_types => %w(select_one integer), :is_standard => true)
f2 = f.replicate(get_mission)
# mission should now be set and should not be standard
assert(!f2.is_standard)
assert_equal(get_mission, f2.mission)
# all objects should be distinct
assert_not_equal(f, f2)
assert_not_equal(f.questionings[0], f2.questionings[0])
assert_not_equal(f.questionings[0].question, f2.questionings[0].question)
assert_not_equal(f.questionings[0].question.option_set, f2.questionings[0].question.option_set)
assert_not_equal(f.questionings[0].question.option_set.optionings[0], f2.questionings[0].question.option_set.optionings[0])
assert_not_equal(f.questionings[0].question.option_set.optionings[0].option, f2.questionings[0].question.option_set.optionings[0].option)
# but properties should be same
assert_equal(f.questionings[0].rank, f2.questionings[0].rank)
assert_equal(f.questionings[0].question.code, f2.questionings[0].question.code)
assert_equal(f.questionings[0].question.option_set.optionings[0].option.name, f2.questionings[0].question.option_set.optionings[0].option.name)
end
test "replicating form with conditions should produce correct new conditions" do
f = FactoryGirl.create(:form, :question_types => %w(integer select_one))
# create condition
f.questionings[1].condition = FactoryGirl.build(:condition, :ref_qing => f.questionings[0], :op => 'gt', :value => 1)
# replicate and test
f2 = f.replicate
# questionings and conditions should be distinct
assert_not_equal(f.questionings[1], f2.questionings[1])
assert_not_equal(f.questionings[1].condition, f2.questionings[1].condition)
# new condition should point to new questioning
assert_equal(f2.questionings[1].condition.ref_qing, f2.questionings[0])
end
test "replicating a standard form with a condition referencing an option should produce correct new option reference" do
f = FactoryGirl.create(:form, :question_types => %w(select_one integer), :is_standard => true)
# create condition with option reference
f.questionings[1].condition = FactoryGirl.build(:condition, :ref_qing => f.questionings[0], :op => 'eq',
:option => f.questions[0].option_set.options[0])
# replicate and test
f2 = f.replicate(get_mission)
# questionings, conditions, and options should be distinct
assert_not_equal(f.questionings[1], f2.questionings[1])
assert_not_equal(f.questionings[1].condition, f2.questionings[1].condition)
assert_not_equal(f.questionings[0].question.option_set.optionings[0].option, f2.questionings[0].question.option_set.optionings[0].option)
# new condition should point to new questioning
assert_equal(f2.questionings[1].condition.ref_qing, f2.questionings[0])
# new condition should point to new option
assert_not_nil(f2.questionings[1].condition.option)
assert_not_nil(f2.questionings[0].question.option_set.optionings[0].option)
assert_equal(f2.questionings[1].condition.option, f2.questionings[0].question.option_set.optionings[0].option)
end
test "replicating a form with multiple conditions should also work" do
f = FactoryGirl.create(:form, :question_types => %w(integer integer integer integer))
# create conditions
f.questionings[1].condition = FactoryGirl.build(:condition, :ref_qing => f.questionings[0], :op => 'gt', :value => 1)
f.questionings[3].condition = FactoryGirl.build(:condition, :ref_qing => f.questionings[1], :op => 'gt', :value => 1)
f2 = f.replicate
# new conditions should point to new questionings
assert_equal(f2.questionings[1].condition.ref_qing, f2.questionings[0])
assert_equal(f2.questionings[3].condition.ref_qing, f2.questionings[1])
end
test "adding new question with condition to middle of form should add to copy also" do
# setup
f = FactoryGirl.create(:form, :question_types => %w(integer integer), :is_standard => true)
f2 = f.replicate(get_mission)
# add question to std
f.questionings.build(:rank => 2, :question => FactoryGirl.create(:question, :code => 'charley', :is_standard => true),
:condition => Condition.new(:ref_qing => f.questionings[0], :op => 'gt', :value => '1', :is_standard => true))
f.questionings[1].rank = 3
f.save!
# ensure question and condition got added properly on std
f.reload
assert_equal('charley', f.questionings[1].question.code)
assert_equal(f.questionings[0], f.questionings[1].condition.ref_qing)
# ensure replication was ok
f2.reload
assert_equal('charley', f2.questionings[1].question.code)
assert_equal(f2.questionings[0], f2.questionings[1].condition.ref_qing)
assert_not_equal(f.questionings[1].question.id, f2.questionings[1].question.id)
end
test "adding new condition to std form should create copy" do
# setup
f = FactoryGirl.create(:form, :question_types => %w(integer integer), :is_standard => true)
f2 = f.replicate(get_mission)
# add condition to standard
f.questionings[1].condition = FactoryGirl.build(:condition, :ref_qing => f.questionings[0], :op => 'lt', :value => 10)
f.save!
f2.reload
# a similiar condition should now exist in copy
assert_equal("10", f2.questionings[1].condition.value)
assert_equal(get_mission, f2.questionings[1].condition.mission)
assert_equal(f2.questionings[0], f2.questionings[1].condition.ref_qing)
# but conditions should be distinct
assert_not_equal(f.questionings[1].condition, f2.questionings[1].condition)
end
test "changing condition ref_qing should replicate properly" do
f = FactoryGirl.create(:form, :question_types => %w(integer integer integer), :is_standard => true)
# create condition
f.questionings[2].condition = FactoryGirl.build(:condition, :ref_qing => f.questionings[0], :op => 'gt', :value => 1, :is_standard => true)
f.save!
# replicate first time
f2 = f.replicate(get_mission)
# change condition ref_qing
f.questionings[2].condition.ref_qing = f.questionings[1]
f.save!
# ensure change replicated
f2.reload
assert_equal(f2.questionings[1], f2.questionings[2].condition.ref_qing)
end
test "changes replicated to multiple copies" do
std = FactoryGirl.create(:form, :question_types => %w(integer integer integer), :is_standard => true)
c1 = std.replicate(get_mission)
c2 = std.replicate(FactoryGirl.create(:mission, :name => 'foo'))
# add option set to first question
q = std.questions[0]
q.qtype_name = 'select_one'
q.option_set = FactoryGirl.create(:option_set, :is_standard => true)
q.save!
# ensure change worked on std
std.reload
assert_equal('select_one', std.questions[0].qtype_name)
# ensure two copies get made
c1.reload
c2.reload
assert_equal('select_one', c1.questions[0].qtype_name)
assert_equal('select_one', c2.questions[0].qtype_name)
end
test "question order should remain correct after replication" do
f = FactoryGirl.create(:form, :question_types => %w(integer integer integer), :is_standard => true)
copy = f.replicate(get_mission)
first_std_q_id = f.questions[0].id
first_copy_q_id = copy.questions[0].id
# change the first question in std
q = f.questions[0]
q.qtype_name = "decimal"
q.save!
# ensure question order is still correct
f.reload
assert_equal(first_std_q_id, f.questions[0].id)
copy.reload
assert_equal(first_copy_q_id, copy.questions[0].id)
end
test "removal of question should be replcated to copy" do
std = FactoryGirl.create(:form, :question_types => %w(integer decimal date), :is_standard => true)
copy = std.replicate(get_mission)
# use the special destroy_questionings method
std.destroy_questionings(std.questionings[1])
std.save
copy.reload
assert_equal(2, copy.questionings.size)
assert_equal(2, copy.questions.size)
assert_equal(%w(integer date), copy.questionings.map(&:qtype_name))
# ranks should also remain correct on copy
assert_equal([1,2], copy.questionings.map(&:rank))
end
test "removal of condition from question should be replcated to copy" do
std = FactoryGirl.create(:form, :question_types => %w(integer integer integer), :is_standard => true)
# create condition
std.questionings[2].condition = FactoryGirl.build(:condition, :ref_qing => std.questionings[0], :op => 'gt', :value => 1, :is_standard => true)
std.save!
std.reload
# replicate initially
copy = std.replicate(get_mission)
# save copy condition id
copy_cond_id = std.questionings[2].condition.id
assert_not_nil(copy_cond_id)
# remove condition and save the qing. this is how it will happen in the controller.
std.questionings[2].destroy_condition
assert_nil(std.questionings[2].condition)
std.questionings[2].save!
copy.reload
# copy qing should still be linked to std
assert_equal(std.questionings[2], copy.questionings[2].standard)
# but questioning should have no condition and copied condition should no longer exist
assert_nil(copy.questionings[2].condition)
assert_nil(Condition.where(:id => copy_cond_id).first)
end
test "deleting a standard form should delete copies and copy questionings and conditions" do
std = FactoryGirl.create(:form, :question_types => %w(integer integer), :is_standard => true)
copy = std.replicate(get_mission)
# add condition to standard, which will get replicated
std.questionings[1].condition = FactoryGirl.build(:condition, :ref_qing => std.questionings[0], :op => 'lt', :value => 10)
std.save!
assert_not_nil(Questioning.where(:form_id => copy.id).first)
# get ID of copy condition
copy.reload
copy_cond_id = copy.questionings[1].condition.id
assert_not_nil(copy_cond_id)
# destroy std
std.destroy
# copy and assoc'd questionings and conditions should be gone
assert(!Form.exists?(copy))
assert_nil(Questioning.where(:form_id => copy.id).first)
assert_nil(Condition.where(:id => copy_cond_id).first)
end
test "ranks should be fixed after deleting a question" do
f = FactoryGirl.create(:form, :question_types => %w(integer integer integer))
f.questions[1].destroy
assert_equal(2, f.reload.questions.size)
assert_equal(2, f.questionings.last.rank)
end
end
|
require 'set_builder/modifier/adverb'
module SetBuilder
module Modifiers
class DateModifier < Modifier::Adverb
def self.operators
{
:ever => [],
:before => [:date],
:after => [:date],
:on => [:date],
:in_the_last => [:number, :period]
}
end
def build_conditions_for(selector)
case operator
when :ever
"#{selector} IS NOT NULL"
when :before
"#{selector}<'#{format_value}'"
when :after
"#{selector}>'#{format_value}'"
when :on
"#{selector}='#{format_value}'"
when :in_the_last
"#{selector}>='#{format_value}'"
end
end
protected
def format_value
(case operator
when :in_the_last
case values[1]
when "years", "year"
values[0].to_i.years.ago
when "months", "month"
values[0].to_i.months.ago
when "weeks", "week"
values[0].to_i.weeks.ago
when "days", "day"
values[0].to_i.days.ago
end
else
values[0].to_date
end).strftime('%Y-%m-%d')
end
end
end
end
refactored date_modifier slightly
require 'set_builder/modifier/adverb'
module SetBuilder
module Modifiers
class DateModifier < Modifier::Adverb
def self.operators
{
:ever => [],
:before => [:date],
:after => [:date],
:on => [:date],
:in_the_last => [:number, :period]
}
end
def build_conditions_for(selector)
case operator
when :ever
"#{selector} IS NOT NULL"
when :before
"#{selector}<'#{format_value}'"
when :after
"#{selector}>'#{format_value}'"
when :on
"#{selector}='#{format_value}'"
when :in_the_last
"#{selector}>='#{format_value}'"
end
end
protected
def get_date
case operator
when :in_the_last
case values[1]
when "years", "year"
values[0].to_i.years.ago
when "months", "month"
values[0].to_i.months.ago
when "weeks", "week"
values[0].to_i.weeks.ago
when "days", "day"
values[0].to_i.days.ago
end
else
values[0].to_date
end
end
def format_value
get_date.strftime('%Y-%m-%d')
end
end
end
end |
build item test
require File.expand_path(File.dirname(__FILE__) + '/../test_helper')
require "custom_field/ladder"
require "custom_field/item"
class LadderTest < Test::Unit::TestCase
def setup
ladder=CustomField::Ladder.build "TestLadder"
ladder.item "TestItem"
@item=ladder.items.first
@item.template "one template","path"
end
def test_should_have_a_template
assert_equal 1,@item.template.size
end
def test_should_have_name_and_path_in_template
template=@item.templates.first
assert_equal "one template",template.name
assert_equal "path",template.name
end
end
|
require File.dirname(__FILE__) + '/../test_helper'
class TeamTest < ActiveSupport::TestCase
def test_find_by_name_or_alias_or_create
assert_equal(teams(:gentle_lovers), Team.find_by_name_or_alias_or_create('Gentle Lovers'), 'Gentle Lovers')
assert_equal(teams(:gentle_lovers), Team.find_by_name_or_alias_or_create('Gentile Lovers'), 'Gentle Lovers alias')
assert_nil(Team.find_by_name_or_alias('Health Net'), 'Health Net should not exist')
team = Team.find_by_name_or_alias_or_create('Health Net')
assert_not_nil(team, 'Health Net')
assert_equal('Health Net', team.name, 'New team')
end
def test_merge
team_to_keep = teams(:vanilla)
team_to_merge = teams(:gentle_lovers)
assert_not_nil(Team.find_by_name(team_to_keep.name), "#{team_to_keep.name} should be in DB")
assert_equal(2, Result.find_all_by_team_id(team_to_keep.id).size, "Vanilla's results")
assert_equal(1, Racer.find_all_by_team_id(team_to_keep.id).size, "Vanilla's racers")
assert_equal(1, Alias.find_all_by_team_id(team_to_keep.id).size, "Vanilla's aliases")
assert_not_nil(Team.find_by_name(team_to_merge.name), "#{team_to_merge.name} should be in DB")
assert_equal(1, Result.find_all_by_team_id(team_to_merge.id).size, "Gentle Lovers's results")
assert_equal(2, Racer.find_all_by_team_id(team_to_merge.id).size, "Gentle Lovers's racers")
assert_equal(1, Alias.find_all_by_team_id(team_to_merge.id).size, "Gentle Lovers's aliases")
team_to_keep.merge(team_to_merge)
assert_not_nil(Team.find_by_name(team_to_keep.name), "#{team_to_keep.name} should be in DB")
assert_equal(3, Result.find_all_by_team_id(team_to_keep.id).size, "Vanilla's results")
assert_equal(3, Racer.find_all_by_team_id(team_to_keep.id).size, "Vanilla's racers")
aliases = Alias.find_all_by_team_id(team_to_keep.id)
lovers_alias = aliases.detect{|a| a.name == 'Gentle Lovers'}
assert_not_nil(lovers_alias, 'Vanilla should have Gentle Lovers alias')
assert_equal(3, aliases.size, "Vanilla's aliases")
assert_nil(Team.find_by_name(team_to_merge.name), "#{team_to_merge.name} should not be in DB")
assert_equal(0, Result.find_all_by_team_id(team_to_merge.id).size, "Gentle Lovers's results")
assert_equal(0, Racer.find_all_by_team_id(team_to_merge.id).size, "Gentle Lovers's racers")
assert_equal(0, Alias.find_all_by_team_id(team_to_merge.id).size, "Gentle Lovers's aliases")
end
def test_find_by_name_or_alias
# new
name = 'Brooklyn Cycling Force'
assert_nil(Team.find_by_name(name), "#{name} should not exist")
team = Team.find_by_name_or_alias(name)
assert_nil(Team.find_by_name(name), "#{name} should not exist")
assert_nil(team, "#{name} should not exist")
# exists
Team.create(:name => name)
team = Team.find_by_name_or_alias(name)
assert_not_nil(team, "#{name} should exist")
assert_equal(name, team.name, 'name')
# alias
Alias.create(:name => 'BCF', :team => team)
team = Team.find_by_name_or_alias('BCF')
assert_not_nil(team, "#{name} should exist")
assert_equal(name, team.name, 'name')
team = Team.find_by_name_or_alias(name)
assert_not_nil(team, "#{name} should exist")
assert_equal(name, team.name, 'name')
end
def test_create_dupe
assert_not_nil(Team.find_by_name('Vanilla'), 'Vanilla should exist')
dupe = Team.new(:name => 'Vanilla')
assert(!dupe.valid?, 'Dupe Vanilla should not be valid')
end
def test_create_and_override_alias
assert_not_nil(Team.find_by_name('Vanilla'), 'Vanilla should exist')
assert_not_nil(Alias.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles alias should exist')
assert_nil(Team.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles should not exist')
dupe = Team.create!(:name => 'Vanilla Bicycles')
assert(dupe.valid?, 'Dupe Vanilla should be valid')
assert_not_nil(Team.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles should exist')
assert_not_nil(Team.find_by_name('Vanilla'), 'Vanilla should exist')
assert_nil(Alias.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles alias should not exist')
assert_nil(Alias.find_by_name('Vanilla'), 'Vanilla alias should not exist')
end
def test_update_to_alias
assert_not_nil(Team.find_by_name('Vanilla'), 'Vanilla should exist')
assert_not_nil(Alias.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles alias should exist')
assert_nil(Team.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles should not exist')
vanilla = teams(:vanilla)
vanilla.name = 'Vanilla Bicycles'
vanilla.save!
assert(vanilla.valid?, 'Renamed Vanilla should be valid')
assert_not_nil(Team.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles should exist')
assert_nil(Team.find_by_name('Vanilla'), 'Vanilla should not exist')
assert_nil(Alias.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles alias should not exist')
assert_not_nil(Alias.find_by_name('Vanilla'), 'Vanilla alias should exist')
end
def test_update_name_different_case
vanilla = teams(:vanilla)
assert_equal('Vanilla', vanilla.name, 'Name before update')
vanilla.name = 'vanilla'
vanilla.save
assert(vanilla.errors.empty?, 'Should have no errors after save')
vanilla.reload
assert_equal('vanilla', vanilla.name, 'Name after update')
end
def test_member
team = Team.new(:name => 'Team Spine')
assert_equal(false, team.member, 'member')
team.save!
team.reload
assert_equal(false, team.member, 'member')
team = Team.new(:name => 'California Road Club')
assert_equal(false, team.member, 'member')
team.member = true
assert_equal(true, team.member, 'member')
team.save!
team.reload
assert_equal(true, team.member, 'member')
team.member = true
team.save!
team.reload
assert_equal(true, team.member, 'member')
end
def test_name_with_date
team = Team.create!(:name => "Tecate-Una Mas")
assert_equal(0, team.historical_names(true).size, "historical_names")
team.historical_names.create!(:name => "Team Tecate", :date => 1.years.ago)
assert_equal(1, team.historical_names(true).size, "historical_names")
team.historical_names.create!(:name => "Twin Peaks", :date => 2.years.ago)
assert_equal(2, team.historical_names(true).size, "historical_names")
assert_equal("Tecate-Una Mas", team.name)
assert_equal("Tecate-Una Mas", team.name(Date.today))
assert_equal("Team Tecate", team.name(1.years.ago))
assert_equal("Twin Peaks", team.name(2.years.ago))
assert_equal("Tecate-Una Mas", team.name(Date.today.next_year))
end
def test_create_new_name_if_there_are_results_from_previous_year
team = Team.create!(:name => "Twin Peaks")
event = SingleDayEvent.create!(:date => 1.years.ago)
old_result = event.standings.create!.races.create!(:category => categories(:senior_men)).results.create!(:team => team)
assert_equal("Twin Peaks", old_result.team_name, "Team name on old result")
event = SingleDayEvent.create!(:date => Date.today)
result = event.standings.create!.races.create!(:category => categories(:senior_men)).results.create!(:team => team)
assert_equal("Twin Peaks", result.team_name, "Team name on new result")
assert_equal("Twin Peaks", old_result.team_name, "Team name on old result")
team.name = "Tecate-Una Mas"
team.save!
assert_equal(1, team.historical_names(true).size, "historical_names")
assert_equal("Twin Peaks", old_result.team_name, "Team name should stay the same on old result")
assert_equal("Tecate-Una Mas", result.team_name, "Team name should change on this year's result")
end
def test_results_before_this_year
team = Team.create!(:name => "Twin Peaks")
assert(!team.results_before_this_year?, "results_before_this_year? with no results")
event = SingleDayEvent.create!(:date => Date.today)
result = event.standings.create!.races.create!(:category => categories(:senior_men)).results.create!(:team => team)
assert(!team.results_before_this_year?, "results_before_this_year? with results in this year")
result.destroy
event = SingleDayEvent.create!(:date => 1.years.ago)
event.standings.create!.races.create!(:category => categories(:senior_men)).results.create!(:team => team)
team.results_before_this_year?
assert(team.results_before_this_year?, "results_before_this_year? with results only a year ago")
event = SingleDayEvent.create!(:date => 2.years.ago)
event.standings.create!.races.create!(:category => categories(:senior_men)).results.create!(:team => team)
team.results_before_this_year?
assert(team.results_before_this_year?, "results_before_this_year? with several old results")
event = SingleDayEvent.create!(:date => Date.today)
event.standings.create!.races.create!(:category => categories(:senior_men)).results.create!(:team => team)
team.results_before_this_year?
assert(team.results_before_this_year?, "results_before_this_year? with results in many years")
end
def test_rename_multiple_times
team = Team.create!(:name => "Twin Peaks")
event = SingleDayEvent.create!(:date => 3.years.ago)
result = event.standings.create!.races.create!(:category => categories(:senior_men)).results.create!(:team => team)
assert_equal(0, team.historical_names(true).size, "historical_names")
team.name = "Tecate"
team.save!
assert_equal(1, team.historical_names(true).size, "historical_names")
assert_equal(1, team.aliases(true).size, "aliases")
team.name = "Tecate Una Mas"
team.save!
assert_equal(1, team.historical_names(true).size, "historical_names")
assert_equal(2, team.aliases(true).size, "aliases")
team.name = "Tecate-¡Una Mas!"
team.save!
assert_equal(1, team.historical_names(true).size, "historical_names")
assert_equal(3, team.aliases(true).size, "aliases")
assert_equal("Tecate-¡Una Mas!", team.name, "New team name")
assert_equal("Twin Peaks", team.historical_names.first.name, "Old team name")
assert_equal(Date.today.year - 1, team.historical_names.first.year, "Old team name year")
end
def test_historical_name_date_or_year
team = teams(:vanilla)
HistoricalName.create!(:team_id => team.id, :name => "Sacha's Team", :year => 2001)
assert_equal("Sacha's Team", team.name(Date.new(2001, 12, 31)), "name for 2001-12-31")
assert_equal("Sacha's Team", team.name(Date.new(2001)), "name for 2001-01-01")
assert_equal("Sacha's Team", team.name(2001), "name for 2001")
end
def test_multiple_historical_names
team = teams(:vanilla)
HistoricalName.create!(:team_id => team.id, :name => "Mapei", :year => 2001)
HistoricalName.create!(:team_id => team.id, :name => "Mapei-Clas", :year => 2002)
HistoricalName.create!(:team_id => team.id, :name => "Quick Step", :year => 2003)
assert_equal(3, team.historical_names.size, "Historical names")
assert_equal("Mapei", team.name(2000), "Historical name 2000")
assert_equal("Mapei", team.name(2001), "Historical name 2001")
assert_equal("Mapei-Clas", team.name(2002), "Historical name 2002")
assert_equal("Quick Step", team.name(2003), "Historical name 2003")
assert_equal("Quick Step", team.name(2003), "Historical name 2004")
assert_equal("Quick Step", team.name(Date.today.year - 1), "Historical name last year")
assert_equal("Vanilla", team.name(Date.today.year), "Name this year")
assert_equal("Vanilla", team.name(Date.today.year + 1), "Name next year")
end
def test_rename_to_old_name
team = teams(:vanilla)
HistoricalName.create!(:team_id => team.id, :name => "Sacha's Team", :year => 2001)
assert_equal(1, team.historical_names.size, "Historical names")
assert_equal("Sacha's Team", team.name(2001), "Historical name 2001")
team.name = "Sacha's Team"
team.save!
assert_equal("Sacha's Team", team.name, "New name")
end
end
Ensure that team aliases are carried over from year to year
require File.dirname(__FILE__) + '/../test_helper'
class TeamTest < ActiveSupport::TestCase
def test_find_by_name_or_alias_or_create
assert_equal(teams(:gentle_lovers), Team.find_by_name_or_alias_or_create('Gentle Lovers'), 'Gentle Lovers')
assert_equal(teams(:gentle_lovers), Team.find_by_name_or_alias_or_create('Gentile Lovers'), 'Gentle Lovers alias')
assert_nil(Team.find_by_name_or_alias('Health Net'), 'Health Net should not exist')
team = Team.find_by_name_or_alias_or_create('Health Net')
assert_not_nil(team, 'Health Net')
assert_equal('Health Net', team.name, 'New team')
end
def test_merge
team_to_keep = teams(:vanilla)
team_to_merge = teams(:gentle_lovers)
assert_not_nil(Team.find_by_name(team_to_keep.name), "#{team_to_keep.name} should be in DB")
assert_equal(2, Result.find_all_by_team_id(team_to_keep.id).size, "Vanilla's results")
assert_equal(1, Racer.find_all_by_team_id(team_to_keep.id).size, "Vanilla's racers")
assert_equal(1, Alias.find_all_by_team_id(team_to_keep.id).size, "Vanilla's aliases")
assert_not_nil(Team.find_by_name(team_to_merge.name), "#{team_to_merge.name} should be in DB")
assert_equal(1, Result.find_all_by_team_id(team_to_merge.id).size, "Gentle Lovers's results")
assert_equal(2, Racer.find_all_by_team_id(team_to_merge.id).size, "Gentle Lovers's racers")
assert_equal(1, Alias.find_all_by_team_id(team_to_merge.id).size, "Gentle Lovers's aliases")
team_to_keep.merge(team_to_merge)
assert_not_nil(Team.find_by_name(team_to_keep.name), "#{team_to_keep.name} should be in DB")
assert_equal(3, Result.find_all_by_team_id(team_to_keep.id).size, "Vanilla's results")
assert_equal(3, Racer.find_all_by_team_id(team_to_keep.id).size, "Vanilla's racers")
aliases = Alias.find_all_by_team_id(team_to_keep.id)
lovers_alias = aliases.detect{|a| a.name == 'Gentle Lovers'}
assert_not_nil(lovers_alias, 'Vanilla should have Gentle Lovers alias')
assert_equal(3, aliases.size, "Vanilla's aliases")
assert_nil(Team.find_by_name(team_to_merge.name), "#{team_to_merge.name} should not be in DB")
assert_equal(0, Result.find_all_by_team_id(team_to_merge.id).size, "Gentle Lovers's results")
assert_equal(0, Racer.find_all_by_team_id(team_to_merge.id).size, "Gentle Lovers's racers")
assert_equal(0, Alias.find_all_by_team_id(team_to_merge.id).size, "Gentle Lovers's aliases")
end
def test_find_by_name_or_alias
# new
name = 'Brooklyn Cycling Force'
assert_nil(Team.find_by_name(name), "#{name} should not exist")
team = Team.find_by_name_or_alias(name)
assert_nil(Team.find_by_name(name), "#{name} should not exist")
assert_nil(team, "#{name} should not exist")
# exists
Team.create(:name => name)
team = Team.find_by_name_or_alias(name)
assert_not_nil(team, "#{name} should exist")
assert_equal(name, team.name, 'name')
# alias
Alias.create(:name => 'BCF', :team => team)
team = Team.find_by_name_or_alias('BCF')
assert_not_nil(team, "#{name} should exist")
assert_equal(name, team.name, 'name')
team = Team.find_by_name_or_alias(name)
assert_not_nil(team, "#{name} should exist")
assert_equal(name, team.name, 'name')
end
def test_create_dupe
assert_not_nil(Team.find_by_name('Vanilla'), 'Vanilla should exist')
dupe = Team.new(:name => 'Vanilla')
assert(!dupe.valid?, 'Dupe Vanilla should not be valid')
end
def test_create_and_override_alias
assert_not_nil(Team.find_by_name('Vanilla'), 'Vanilla should exist')
assert_not_nil(Alias.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles alias should exist')
assert_nil(Team.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles should not exist')
dupe = Team.create!(:name => 'Vanilla Bicycles')
assert(dupe.valid?, 'Dupe Vanilla should be valid')
assert_not_nil(Team.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles should exist')
assert_not_nil(Team.find_by_name('Vanilla'), 'Vanilla should exist')
assert_nil(Alias.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles alias should not exist')
assert_nil(Alias.find_by_name('Vanilla'), 'Vanilla alias should not exist')
end
def test_update_to_alias
assert_not_nil(Team.find_by_name('Vanilla'), 'Vanilla should exist')
assert_not_nil(Alias.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles alias should exist')
assert_nil(Team.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles should not exist')
vanilla = teams(:vanilla)
vanilla.name = 'Vanilla Bicycles'
vanilla.save!
assert(vanilla.valid?, 'Renamed Vanilla should be valid')
assert_not_nil(Team.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles should exist')
assert_nil(Team.find_by_name('Vanilla'), 'Vanilla should not exist')
assert_nil(Alias.find_by_name('Vanilla Bicycles'), 'Vanilla Bicycles alias should not exist')
assert_not_nil(Alias.find_by_name('Vanilla'), 'Vanilla alias should exist')
end
def test_update_name_different_case
vanilla = teams(:vanilla)
assert_equal('Vanilla', vanilla.name, 'Name before update')
vanilla.name = 'vanilla'
vanilla.save
assert(vanilla.errors.empty?, 'Should have no errors after save')
vanilla.reload
assert_equal('vanilla', vanilla.name, 'Name after update')
end
def test_member
team = Team.new(:name => 'Team Spine')
assert_equal(false, team.member, 'member')
team.save!
team.reload
assert_equal(false, team.member, 'member')
team = Team.new(:name => 'California Road Club')
assert_equal(false, team.member, 'member')
team.member = true
assert_equal(true, team.member, 'member')
team.save!
team.reload
assert_equal(true, team.member, 'member')
team.member = true
team.save!
team.reload
assert_equal(true, team.member, 'member')
end
def test_name_with_date
team = Team.create!(:name => "Tecate-Una Mas")
assert_equal(0, team.historical_names(true).size, "historical_names")
team.historical_names.create!(:name => "Team Tecate", :date => 1.years.ago)
assert_equal(1, team.historical_names(true).size, "historical_names")
team.historical_names.create!(:name => "Twin Peaks", :date => 2.years.ago)
assert_equal(2, team.historical_names(true).size, "historical_names")
assert_equal("Tecate-Una Mas", team.name)
assert_equal("Tecate-Una Mas", team.name(Date.today))
assert_equal("Team Tecate", team.name(1.years.ago))
assert_equal("Twin Peaks", team.name(2.years.ago))
assert_equal("Tecate-Una Mas", team.name(Date.today.next_year))
end
def test_create_new_name_if_there_are_results_from_previous_year
team = Team.create!(:name => "Twin Peaks")
event = SingleDayEvent.create!(:date => 1.years.ago)
old_result = event.standings.create!.races.create!(:category => categories(:senior_men)).results.create!(:team => team)
assert_equal("Twin Peaks", old_result.team_name, "Team name on old result")
event = SingleDayEvent.create!(:date => Date.today)
result = event.standings.create!.races.create!(:category => categories(:senior_men)).results.create!(:team => team)
assert_equal("Twin Peaks", result.team_name, "Team name on new result")
assert_equal("Twin Peaks", old_result.team_name, "Team name on old result")
team.name = "Tecate-Una Mas"
team.save!
assert_equal(1, team.historical_names(true).size, "historical_names")
assert_equal("Twin Peaks", old_result.team_name, "Team name should stay the same on old result")
assert_equal("Tecate-Una Mas", result.team_name, "Team name should change on this year's result")
end
def test_results_before_this_year
team = Team.create!(:name => "Twin Peaks")
assert(!team.results_before_this_year?, "results_before_this_year? with no results")
event = SingleDayEvent.create!(:date => Date.today)
result = event.standings.create!.races.create!(:category => categories(:senior_men)).results.create!(:team => team)
assert(!team.results_before_this_year?, "results_before_this_year? with results in this year")
result.destroy
event = SingleDayEvent.create!(:date => 1.years.ago)
event.standings.create!.races.create!(:category => categories(:senior_men)).results.create!(:team => team)
team.results_before_this_year?
assert(team.results_before_this_year?, "results_before_this_year? with results only a year ago")
event = SingleDayEvent.create!(:date => 2.years.ago)
event.standings.create!.races.create!(:category => categories(:senior_men)).results.create!(:team => team)
team.results_before_this_year?
assert(team.results_before_this_year?, "results_before_this_year? with several old results")
event = SingleDayEvent.create!(:date => Date.today)
event.standings.create!.races.create!(:category => categories(:senior_men)).results.create!(:team => team)
team.results_before_this_year?
assert(team.results_before_this_year?, "results_before_this_year? with results in many years")
end
def test_rename_multiple_times
team = Team.create!(:name => "Twin Peaks")
event = SingleDayEvent.create!(:date => 3.years.ago)
result = event.standings.create!.races.create!(:category => categories(:senior_men)).results.create!(:team => team)
assert_equal(0, team.historical_names(true).size, "historical_names")
team.name = "Tecate"
team.save!
assert_equal(1, team.historical_names(true).size, "historical_names")
assert_equal(1, team.aliases(true).size, "aliases")
team.name = "Tecate Una Mas"
team.save!
assert_equal(1, team.historical_names(true).size, "historical_names")
assert_equal(2, team.aliases(true).size, "aliases")
team.name = "Tecate-¡Una Mas!"
team.save!
assert_equal(1, team.historical_names(true).size, "historical_names")
assert_equal(3, team.aliases(true).size, "aliases")
assert_equal("Tecate-¡Una Mas!", team.name, "New team name")
assert_equal("Twin Peaks", team.historical_names.first.name, "Old team name")
assert_equal(Date.today.year - 1, team.historical_names.first.year, "Old team name year")
end
def test_historical_name_date_or_year
team = teams(:vanilla)
HistoricalName.create!(:team_id => team.id, :name => "Sacha's Team", :year => 2001)
assert_equal("Sacha's Team", team.name(Date.new(2001, 12, 31)), "name for 2001-12-31")
assert_equal("Sacha's Team", team.name(Date.new(2001)), "name for 2001-01-01")
assert_equal("Sacha's Team", team.name(2001), "name for 2001")
end
def test_multiple_historical_names
team = teams(:vanilla)
HistoricalName.create!(:team_id => team.id, :name => "Mapei", :year => 2001)
HistoricalName.create!(:team_id => team.id, :name => "Mapei-Clas", :year => 2002)
HistoricalName.create!(:team_id => team.id, :name => "Quick Step", :year => 2003)
assert_equal(3, team.historical_names.size, "Historical names")
assert_equal("Mapei", team.name(2000), "Historical name 2000")
assert_equal("Mapei", team.name(2001), "Historical name 2001")
assert_equal("Mapei-Clas", team.name(2002), "Historical name 2002")
assert_equal("Quick Step", team.name(2003), "Historical name 2003")
assert_equal("Quick Step", team.name(2003), "Historical name 2004")
assert_equal("Quick Step", team.name(Date.today.year - 1), "Historical name last year")
assert_equal("Vanilla", team.name(Date.today.year), "Name this year")
assert_equal("Vanilla", team.name(Date.today.year + 1), "Name next year")
end
def test_rename_to_old_name
team = teams(:vanilla)
HistoricalName.create!(:team_id => team.id, :name => "Sacha's Team", :year => 2001)
assert_equal(1, team.historical_names.size, "Historical names")
assert_equal("Sacha's Team", team.name(2001), "Historical name 2001")
team.name = "Sacha's Team"
team.save!
assert_equal("Sacha's Team", team.name, "New name")
end
def test_renamed_teams_should_keep_aliases
team = Team.create!(:name => "Twin Peaks/The Bike Nook")
event = SingleDayEvent.create!(:date => 3.years.ago)
result = event.standings.create!.races.create!(:category => categories(:senior_men)).results.create!(:team => team)
team.aliases.create!(:name => "Twin Peaks")
assert_equal(0, team.historical_names(true).size, "historical_names")
assert_equal(1, team.aliases(true).size, "Aliases")
team.name = "Tecate"
team.save!
assert_equal(1, team.historical_names(true).size, "historical_names")
assert_equal(2, team.aliases(true).size, "aliases")
assert_equal(["Twin Peaks", "Twin Peaks/The Bike Nook"], team.aliases.map(&:name).sort, "Should retain keep alias from old name")
end
end |
require File.dirname(__FILE__) + '/../test_helper'
class UserTest < Test::Unit::TestCase
fixtures :users
def test_invalid_with_empty_attributes
user = User.new
assert !user.valid?
assert user.errors.invalid?(:email)
assert user.errors.invalid?(:pass_crypt)
assert user.errors.invalid?(:display_name)
assert user.errors.invalid?(:email)
assert !user.errors.invalid?(:home_lat)
assert !user.errors.invalid?(:home_lon)
assert !user.errors.invalid?(:home_zoom)
end
def test_unique_email
new_user = User.new(:email => users(:normal_user).email,
:active => 1,
:pass_crypt => Digest::MD5.hexdigest('test'),
:display_name => "new user",
:data_public => 1,
:description => "desc")
assert !new_user.save
assert_equal ActiveRecord::Errors.default_error_messages[:taken], new_user.errors.on(:email)
end
def test_unique_display_name
new_user = User.new(:email => "tester@openstreetmap.org",
:active => 0,
:pass_crypt => Digest::MD5.hexdigest('test'),
:display_name => users(:normal_user).display_name,
:data_public => 1,
:description => "desc")
assert !new_user.save
assert_equal ActiveRecord::Errors.default_error_messages[:taken], new_user.errors.on(:display_name)
end
end
Check to see if the e-mail test works
require File.dirname(__FILE__) + '/../test_helper'
class UserTest < Test::Unit::TestCase
fixtures :users
def test_invalid_with_empty_attributes
user = User.new
assert !user.valid?
assert user.errors.invalid?(:email)
assert user.errors.invalid?(:pass_crypt)
assert user.errors.invalid?(:display_name)
assert user.errors.invalid?(:email)
assert !user.errors.invalid?(:home_lat)
assert !user.errors.invalid?(:home_lon)
assert !user.errors.invalid?(:home_zoom)
end
def test_unique_email
new_user = User.new(:email => users(:normal_user).email,
:active => 1,
:pass_crypt => Digest::MD5.hexdigest('test'),
:display_name => "new user",
:data_public => 1,
:description => "desc")
assert !new_user.save
assert_equal ActiveRecord::Errors.default_error_messages[:taken], new_user.errors.on(:email)
end
def test_unique_display_name
new_user = User.new(:email => "tester@openstreetmap.org",
:active => 0,
:pass_crypt => Digest::MD5.hexdigest('test'),
:display_name => users(:normal_user).display_name,
:data_public => 1,
:description => "desc")
assert !new_user.save
assert_equal ActiveRecord::Errors.default_error_messages[:taken], new_user.errors.on(:display_name)
end
def test_email_valid
ok = %w{ a@s.com test@shaunmcdonald.me.uk hello_local@ping-d.ng test_local@openstreetmap.org test-local@example.com }
bad = %w{ hi ht@ n@ @.com help@.me.uk help"hi.me.uk }
ok.each do |name|
user = users(:normal_user)
user.email = name
assert user.valid?, user.errors.full_messages
end
bad.each do |name|
user = users(:normal_user)
user.email = name
assert !user.valid?, "#{name} is valid when it shouldn't be"
end
end
end
|
#This file need to create the navigation in your app.
SemanticNavigation::Configuration.run do |config|
#What's the name of the active menu class will be (the dafault is 'active')
config.active_class = 'active';
#Create the menu ids automatically?
config.create_ids = true
#Show the submenu only when the menu is active
config.show_submenu = true
#That's how you can create your userbar menu
#config.userbar do |userbar|
# userbar.signin :controller => :session, :action => :signin, :if => current_user.nil?
# userbar.signout :controller => :session, :action => :signout, :if => !current_user.nil?
#end
#so you can use the helper 'render_userbar_menu' to render it.
#That's the creation of the navigation menu
config.navigation #do |navigation|
#navigation.first 'first', :controller => :first, :action => :act
#navigation.second 'second', :controller => :second, :action => :act
#end
end
change config template
#This file need to create the navigation in your app.
SemanticNavigation::Configuration.run do |config|
#That's the creation of the navigation menu
config.navigation do |n|
n.main 'Main', :controller => :dashboard, :action => :index
n.about 'About', :controller => :about, :action => :index do |a|
a.company 'About company', :controller => :about, :action => :company
a.employes 'About our employes', :controller => :about, :action => :employes
end
n.feed_back 'Feedback', :controller => :feed_back, :action => :index
end
#You can render this menu by calling method render_navigation_menu
end
|
require 'vagrant-digitalocean/helpers/client'
module VagrantPlugins
module DigitalOcean
module Actions
class Create
include Helpers::Client
include Vagrant::Util::Retryable
def initialize(app, env)
@app = app
@machine = env[:machine]
@client = client
@logger = Log4r::Logger.new('vagrant::digitalocean::create')
end
def call(env)
ssh_key_id = env[:ssh_key_id]
size_id = @client
.request('/sizes')
.find_id(:sizes, :name => @machine.provider_config.size)
image_id = @client
.request('/images', { :filter => 'global' })
.find_id(:images, :name => @machine.provider_config.image)
region_id = @client
.request('/regions')
.find_id(:regions, :name => @machine.provider_config.region)
# submit new droplet request
result = @client.request('/droplets/new', {
:size_id => size_id,
:region_id => region_id,
:image_id => image_id,
:name => @machine.config.vm.hostname || @machine.name,
:ssh_key_ids => ssh_key_id
})
# wait for request to complete
env[:ui].info I18n.t('vagrant_digital_ocean.info.creating')
@client.wait_for_event(env, result['droplet']['event_id'])
# assign the machine id for reference in other commands
@machine.id = result['droplet']['id'].to_s
# refresh droplet state with provider and output ip address
droplet = Provider.droplet(@machine, :refresh => true)
env[:ui].info I18n.t('vagrant_digital_ocean.info.droplet_ip', {
:ip => droplet['ip_address']
})
# wait for ssh to be ready using the root user account
user = @machine.config.ssh.username
@machine.config.ssh.username = 'root'
retryable(:tries => 120, :sleep => 10) do
next if env[:interrupted]
raise 'not ready' if !@machine.communicate.ready?
end
@machine.config.ssh.username = user
@app.call(env)
end
# Both the recover and terminate are stolen almost verbatim from
# the Vagrant AWS provider up action
def recover(env)
return if env['vagrant.error'].is_a?(Vagrant::Errors::VagrantError)
if @machine.state.id != :not_created
terminate(env)
end
end
def terminate(env)
destroy_env = env.dup
destroy_env.delete(:interrupted)
destroy_env[:config_validate] = false
destroy_env[:force_confirm_destroy] = true
env[:action_runner].run(Actions.destroy, destroy_env)
end
end
end
end
end
do not apply any filter on images reported by api
this allows snapshots or backups to be used as images for new droplets
require 'vagrant-digitalocean/helpers/client'
module VagrantPlugins
module DigitalOcean
module Actions
class Create
include Helpers::Client
include Vagrant::Util::Retryable
def initialize(app, env)
@app = app
@machine = env[:machine]
@client = client
@logger = Log4r::Logger.new('vagrant::digitalocean::create')
end
def call(env)
ssh_key_id = env[:ssh_key_id]
size_id = @client
.request('/sizes')
.find_id(:sizes, :name => @machine.provider_config.size)
image_id = @client
.request('/images')
.find_id(:images, :name => @machine.provider_config.image)
region_id = @client
.request('/regions')
.find_id(:regions, :name => @machine.provider_config.region)
# submit new droplet request
result = @client.request('/droplets/new', {
:size_id => size_id,
:region_id => region_id,
:image_id => image_id,
:name => @machine.config.vm.hostname || @machine.name,
:ssh_key_ids => ssh_key_id
})
# wait for request to complete
env[:ui].info I18n.t('vagrant_digital_ocean.info.creating')
@client.wait_for_event(env, result['droplet']['event_id'])
# assign the machine id for reference in other commands
@machine.id = result['droplet']['id'].to_s
# refresh droplet state with provider and output ip address
droplet = Provider.droplet(@machine, :refresh => true)
env[:ui].info I18n.t('vagrant_digital_ocean.info.droplet_ip', {
:ip => droplet['ip_address']
})
# wait for ssh to be ready using the root user account
user = @machine.config.ssh.username
@machine.config.ssh.username = 'root'
retryable(:tries => 120, :sleep => 10) do
next if env[:interrupted]
raise 'not ready' if !@machine.communicate.ready?
end
@machine.config.ssh.username = user
@app.call(env)
end
# Both the recover and terminate are stolen almost verbatim from
# the Vagrant AWS provider up action
def recover(env)
return if env['vagrant.error'].is_a?(Vagrant::Errors::VagrantError)
if @machine.state.id != :not_created
terminate(env)
end
end
def terminate(env)
destroy_env = env.dup
destroy_env.delete(:interrupted)
destroy_env[:config_validate] = false
destroy_env[:force_confirm_destroy] = true
env[:action_runner].run(Actions.destroy, destroy_env)
end
end
end
end
end
|
require File.dirname(__FILE__) + '/../test_helper'
class UserTest < ActiveSupport::TestCase
def setup
@bct = User.find_by_login('bct')
@medwards = User.find_by_login('medwards')
@pierre = User.find_by_login('pierre')
end
def test_owned
lhd = items(:lhd)
assert @medwards.owned.member?(lhd)
end
def test_distance
assert_in_delta 2.47, @bct.distance_from(@medwards), 0.15
assert_in_delta 2.47, @medwards.distance_from(@bct), 0.15
assert_in_delta 2975, @bct.distance_from(@pierre), 5
assert_in_delta 2975, @pierre.distance_from(@bct), 5
end
def test_find_local
near = @bct.find_nearby_users(3)
assert_equal [@medwards], near
far = @bct.find_nearby_users(3000)
assert_equal [@medwards, @pierre], far
end
def test_region
edmonton = Region.find_by_name('Edmonton')
montreal = Region.find_by_name('Montréal')
assert_equal edmonton, @bct.region
assert_equal edmonton, @medwards.region
assert_equal montreal, @pierre.region
end
def test_borrowed
lhd = items(:lhd)
assert_equal 1, @bct.borrowed.length
assert_equal lhd, @bct.borrowed.first
assert_equal 0, @pierre.borrowed.length
end
def test_borrowed_and_pending
assert_equal 1, @bct.borrowed_and_pending.length
assert_equal 1, @medwards.borrowed_and_pending.length
assert_equal 2, @pierre.borrowed_and_pending.length
end
def test_lent_and_pending
assert_equal 2, @medwards.lent_and_pending.length
assert_equal 2, @bct.lent_and_pending.length
end
def test_tag_counts
assert_equal([ ['politics', 2], ['spain', 1 ] ], @bct.tag_counts.to_a)
assert_equal([ ], @medwards.tag_counts.to_a)
end
def test_tagging
bct = users(:bct)
tags = bct.tags.sort
assert_equal ['engineering', 'science'], tags
# tags can be added
bct.tag_with ['decentralization']
# users can be fonud by tag
tagged_with = User.find_by_tag('decentralization')
assert_equal 1, tagged_with.length
assert_equal bct, tagged_with[0]
end
end
stop failing tests when using sqlite
require File.dirname(__FILE__) + '/../test_helper'
class UserTest < ActiveSupport::TestCase
def setup
@bct = User.find_by_login('bct')
@medwards = User.find_by_login('medwards')
@pierre = User.find_by_login('pierre')
end
def test_owned
lhd = items(:lhd)
assert @medwards.owned.member?(lhd)
end
def test_distance
assert_in_delta 2.47, @bct.distance_from(@medwards), 0.15
assert_in_delta 2.47, @medwards.distance_from(@bct), 0.15
assert_in_delta 2975, @bct.distance_from(@pierre), 5
assert_in_delta 2975, @pierre.distance_from(@bct), 5
end
def test_find_local
# haha this is such a terrible way of doing this
if ActiveRecord::Base.connection.class.to_s.downcase.match /sqlite/
$stderr.puts "skipping test_find_local, sqlite doesn't have the ACOS function"
return
end
near = @bct.find_nearby_users(3)
assert_equal [@medwards], near
far = @bct.find_nearby_users(3000)
assert_equal [@medwards, @pierre], far
end
def test_region
edmonton = Region.find_by_name('Edmonton')
montreal = Region.find_by_name('Montréal')
assert_equal edmonton, @bct.region
assert_equal edmonton, @medwards.region
assert_equal montreal, @pierre.region
end
def test_borrowed
lhd = items(:lhd)
assert_equal 1, @bct.borrowed.length
assert_equal lhd, @bct.borrowed.first
assert_equal 0, @pierre.borrowed.length
end
def test_borrowed_and_pending
assert_equal 1, @bct.borrowed_and_pending.length
assert_equal 1, @medwards.borrowed_and_pending.length
assert_equal 2, @pierre.borrowed_and_pending.length
end
def test_lent_and_pending
assert_equal 2, @medwards.lent_and_pending.length
assert_equal 2, @bct.lent_and_pending.length
end
def test_tag_counts
assert_equal([ ['politics', 2], ['spain', 1 ] ], @bct.tag_counts.to_a)
assert_equal([ ], @medwards.tag_counts.to_a)
end
def test_tagging
bct = users(:bct)
tags = bct.tags.sort
assert_equal ['engineering', 'science'], tags
# tags can be added
bct.tag_with ['decentralization']
# users can be fonud by tag
tagged_with = User.find_by_tag('decentralization')
assert_equal 1, tagged_with.length
assert_equal bct, tagged_with[0]
end
end
|
class ProductService < Versioneye::Service
# Languages have to be an array of strings.
def self.search(q, group_id = nil, languages = nil, page_count = 1)
EsProduct.search(q, group_id, languages, page_count)
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
MongoProduct.find_by(q, '', group_id, languages, 300).paginate(:page => page_count)
end
# This method fetches and product and initializes it for the UI.
def self.fetch_product( lang, prod_key, version = nil )
product = Product.fetch_product lang, prod_key
if product.nil? && lang.eql?( Product::A_LANGUAGE_CLOJURE )
product = Product.fetch_product Product::A_LANGUAGE_JAVA, prod_key
end
return nil if product.nil?
product.check_nil_version
product.version_newest = product.version
product.version = version if version
product.project_usage = ReferenceService.project_references( lang, prod_key ).count
product
end
def self.follow language, prod_key, user
result = false
product = Product.fetch_product language, prod_key
product.users = Array.new if product && product.users.nil?
if product && user && !product.users.include?( user )
product.users.push user
product.followers = 0 if product.followers.nil?
product.followers += 1
result = product.save
end
result
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
false
end
def self.unfollow language, prod_key, user
result = false
product = Product.fetch_product language, prod_key
product.users = Array.new if product && product.users.nil?
if product && user && product.users.include?( user )
product.users.delete(user)
product.followers = 0 if product.followers.nil?
product.followers -= 1
result = product.save
end
result
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
false
end
def self.all_products_paged
count = Product.count()
page = 1000
iterations = count / page
iterations += 1
(0..iterations).each do |i|
skip = i * page
products = Product.all().skip(skip).limit(page)
yield products
co = i * page
log_msg = "all_products_paged iteration: #{i} - products processed: #{co}"
p log_msg
log.info log_msg
end
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
end
def self.update_meta_data_global
all_products_paged do |products|
log.info " - update_meta_data_global - "
update_products products
end
end
def self.update_products products
products.each do |product|
self.update_meta_data product
end
end
def self.update_meta_data product, update_used_by = true
self.update_version_data product, false
if update_used_by == true
self.update_used_by_count product, false
end
self.update_average_release_time product
self.update_followers_for product
product.save
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
end
# Updates product.version with the newest version number from product.versions
def self.update_version_data( product, persist = true )
return nil if product.nil?
versions = product.versions
return nil if versions.nil? || versions.empty?
newest_stable_version = VersionService.newest_version( versions )
return nil if newest_stable_version.to_s.eql?( product.version)
product.version = newest_stable_version.to_s
product.save if persist
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
end
def self.update_newest_version product
self.update_version_data product
end
def self.update_used_by_count product, persist = true
prod_keys = nil
if product.group_id && product.artifact_id
prod_keys = Dependency.where(:group_id => product.group_id, :artifact_id => product.artifact_id).distinct(:prod_key)
else
prod_keys = Dependency.where(:language => product.language, :dep_prod_key => product.prod_key).distinct(:prod_key)
end
count = prod_keys.count
return nil if count == product.used_by_count
reference = Reference.find_or_create_by(:language => product.language, :prod_key => product.prod_key )
reference.update_from prod_keys
if product.group_id && product.artifact_id
reference.group_id = product.group_id
reference.artifact_id = product.artifact_id
end
reference.save
product.used_by_count = count
product.save if persist
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
false
end
def self.update_average_release_time product
average_release_time = VersionService.average_release_time( product.versions )
if average_release_time.nil?
average_release_time = VersionService.estimated_average_release_time( product.versions )
end
product.average_release_time = average_release_time
end
def self.update_followers_for product
return nil if product.followers == product.user_ids.count
product.followers = product.user_ids.count
product.save
end
def self.update_followers
products = Product.where( :'user_ids.0' => {'$exists' => true} )
products.each do |product|
self.update_followers_for product
end
end
def self.remove product
EsProduct.remove( product )
archives = Versionarchive.where( :language => product.language, :prod_key => product.prod_key )
if archives && !archives.empty?
archives.each do |archive|
archive.delete
end
end
links = Versionlink.where( :language => product.language, :prod_key => product.prod_key )
if links && !links.empty?
links.each do |link|
link.delete
end
end
dependencies = Dependency.where( :language => product.language, :prod_key => product.prod_key )
if dependencies && !dependencies.empty?
dependencies.each do |dependency|
dependency.delete
end
end
product.remove
end
def self.most_referenced(language, page)
Product.by_language( language ).desc(:used_by_count).paginate(:page => page)
end
end
Sort reference keys by ref count.
class ProductService < Versioneye::Service
# Languages have to be an array of strings.
def self.search(q, group_id = nil, languages = nil, page_count = 1)
EsProduct.search(q, group_id, languages, page_count)
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
MongoProduct.find_by(q, '', group_id, languages, 300).paginate(:page => page_count)
end
# This method fetches and product and initializes it for the UI.
def self.fetch_product( lang, prod_key, version = nil )
product = Product.fetch_product lang, prod_key
if product.nil? && lang.eql?( Product::A_LANGUAGE_CLOJURE )
product = Product.fetch_product Product::A_LANGUAGE_JAVA, prod_key
end
return nil if product.nil?
product.check_nil_version
product.version_newest = product.version
product.version = version if version
product.project_usage = ReferenceService.project_references( lang, prod_key ).count
product
end
def self.follow language, prod_key, user
result = false
product = Product.fetch_product language, prod_key
product.users = Array.new if product && product.users.nil?
if product && user && !product.users.include?( user )
product.users.push user
product.followers = 0 if product.followers.nil?
product.followers += 1
result = product.save
end
result
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
false
end
def self.unfollow language, prod_key, user
result = false
product = Product.fetch_product language, prod_key
product.users = Array.new if product && product.users.nil?
if product && user && product.users.include?( user )
product.users.delete(user)
product.followers = 0 if product.followers.nil?
product.followers -= 1
result = product.save
end
result
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
false
end
def self.all_products_paged
count = Product.count()
page = 1000
iterations = count / page
iterations += 1
(0..iterations).each do |i|
skip = i * page
products = Product.all().skip(skip).limit(page)
yield products
co = i * page
log_msg = "all_products_paged iteration: #{i} - products processed: #{co}"
p log_msg
log.info log_msg
end
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
end
def self.update_meta_data_global
all_products_paged do |products|
log.info " - update_meta_data_global - "
update_products products
end
end
def self.update_products products
products.each do |product|
self.update_meta_data product
end
end
def self.update_meta_data product, update_used_by = true
self.update_version_data product, false
if update_used_by == true
self.update_used_by_count product, false
end
self.update_average_release_time product
self.update_followers_for product
product.save
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
end
# Updates product.version with the newest version number from product.versions
def self.update_version_data( product, persist = true )
return nil if product.nil?
versions = product.versions
return nil if versions.nil? || versions.empty?
newest_stable_version = VersionService.newest_version( versions )
return nil if newest_stable_version.to_s.eql?( product.version)
product.version = newest_stable_version.to_s
product.save if persist
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
end
def self.update_newest_version product
self.update_version_data product
end
def self.update_used_by_count product, persist = true
prod_keys = nil
if product.group_id && product.artifact_id
prod_keys = Dependency.where(:group_id => product.group_id, :artifact_id => product.artifact_id).distinct(:prod_key)
else
prod_keys = Dependency.where(:language => product.language, :dep_prod_key => product.prod_key).distinct(:prod_key)
end
count = prod_keys.count
return nil if count == product.used_by_count
prod_keys_sorted = []
Product.where(:prod_key.in => prod_keys).desc(:used_by_count).each do |prod|
prod_keys_sorted << prod.prod_key if !prod_keys_sorted.include?( prod.prod_key )
end
reference = Reference.find_or_create_by(:language => product.language, :prod_key => product.prod_key )
reference.update_from prod_keys_sorted
if product.group_id && product.artifact_id
reference.group_id = product.group_id
reference.artifact_id = product.artifact_id
end
reference.save
product.used_by_count = count
product.save if persist
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
false
end
def self.update_average_release_time product
average_release_time = VersionService.average_release_time( product.versions )
if average_release_time.nil?
average_release_time = VersionService.estimated_average_release_time( product.versions )
end
product.average_release_time = average_release_time
end
def self.update_followers_for product
return nil if product.followers == product.user_ids.count
product.followers = product.user_ids.count
product.save
end
def self.update_followers
products = Product.where( :'user_ids.0' => {'$exists' => true} )
products.each do |product|
self.update_followers_for product
end
end
def self.remove product
EsProduct.remove( product )
archives = Versionarchive.where( :language => product.language, :prod_key => product.prod_key )
if archives && !archives.empty?
archives.each do |archive|
archive.delete
end
end
links = Versionlink.where( :language => product.language, :prod_key => product.prod_key )
if links && !links.empty?
links.each do |link|
link.delete
end
end
dependencies = Dependency.where( :language => product.language, :prod_key => product.prod_key )
if dependencies && !dependencies.empty?
dependencies.each do |dependency|
dependency.delete
end
end
product.remove
end
def self.most_referenced(language, page)
Product.by_language( language ).desc(:used_by_count).paginate(:page => page)
end
end
|
class ProjectService < Versioneye::Service
def self.type_by_filename filename
return nil if filename.to_s.empty?
return nil if filename.to_s.match(/\/node_modules\//) # Skip workirectory of NPM.
return nil if filename.to_s.casecmp('CMakeLists.txt') == 0
return nil if filename.to_s.casecmp('robots.txt') == 0
return nil if filename.to_s.match(/robots.txt\z/i)
return nil if filename.to_s.match(/LICENSE.txt\z/i)
return nil if filename.to_s.match(/README.txt\z/i)
return nil if filename.to_s.match(/content.txt\z/i)
trimmed_name = filename.split('?')[0]
return Project::A_TYPE_RUBYGEMS if (!(/Gemfile\z/ =~ trimmed_name).nil?) or (!(/Gemfile.lock\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_COMPOSER if (!(/composer.json\z/ =~ trimmed_name).nil?) or (!(/composer.lock\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_PIP if (!(/\S*.txt\z/ =~ trimmed_name).nil?) or (!(/setup.py\z/ =~ trimmed_name).nil?) or (!(/pip.log\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_NPM if (!(/package.json\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_GRADLE if (!(/.gradle\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_SBT if (!(/.sbt\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_MAVEN2 if (!(/pom.xml\z/ =~ trimmed_name).nil?) or (!(/.pom\z/ =~ trimmed_name).nil?) or (!(/external_dependencies.xml\z/ =~ trimmed_name).nil?) or (!(/external-dependencies.xml\z/ =~ trimmed_name).nil?) or (!(/pom.json\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_LEIN if (!(/project.clj\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_BOWER if (!(/bower.json\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_BIICODE if (!(/biicode.conf\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_COCOAPODS if (!(/Podfile\z/ =~ trimmed_name).nil?) or (!(/.podfile\z/ =~ trimmed_name).nil?) or (!(/Podfile.lock\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_CHEF if (!(/Berksfile.lock\z/ =~ trimmed_name).nil?) or (!(/Berksfile\z/ =~ trimmed_name).nil?) or (!(/metadata.rb\z/ =~ trimmed_name).nil?)
return nil
end
def self.corresponding_file filename
return nil if filename.to_s.empty?
trimmed_name = filename.split('?')[0]
return 'Gemfile.lock' if (/Gemfile\z/ =~ trimmed_name) == 0
return 'composer.lock' if (/composer.json\z/ =~ trimmed_name) == 0
return 'Podfile.lock' if (/Podfile\z/ =~ trimmed_name) == 0
return 'Berksfile.lock' if (/metadata.rb\z/ =~ trimmed_name) == 0
return 'Berksfile.lock' if (/Berksfile\z/ =~ trimmed_name) == 0
return nil
end
def self.index( user, filter = {}, sort = nil)
filter_options = {:parent_id => nil, :temp => false}
filter_options[:team_ids] = filter[:team] if filter[:team] && filter[:team].to_s.casecmp('ALL') != 0
filter_options[:language] = filter[:language] if filter[:language] && filter[:language].to_s.casecmp('ALL') != 0
filter_options[:version] = filter[:version] if filter[:version] && filter[:version].to_s.casecmp('ALL') != 0
filter_options[:name] = /#{filter[:name]}/i if filter[:name] && !filter[:name].to_s.strip.empty?
if filter[:scope].to_s == 'all_public'
filter_options[:public] = true
elsif filter[:scope].to_s == 'all' && user.admin == true
# Do nothing. Admin can see ALL projects
else
organisation = nil
if filter[:organisation] && !filter[:organisation].to_s.strip.empty?
organisation = Organisation.find filter[:organisation].to_s
organisation = Organisation.where(:name => filter[:organisation].to_s).first if organisation.nil?
end
member_of_orga = OrganisationService.member?( organisation, user )
if organisation && ( member_of_orga || user.admin == true )
filter_options[:organisation_id] = organisation.ids
else
filter_options[:user_id] = user.ids
filter_options[:organisation_id] = nil
end
end
case sort
when 'out_dated'
Project.where( filter_options ).desc(:out_number_sum).asc(:name_downcase)
when 'license_violations'
Project.where( filter_options ).desc(:licenses_red_sum).asc(:name_downcase)
else
Project.where( filter_options ).asc(:name_downcase).desc(:licenses_red_sum)
end
end
def self.all_projects( user )
projects = {}
projects[user.fullname] = user.projects.parents.where(:organisation_id => nil).any_of({ :temp => false }, { :temp => nil } )
orgas = OrganisationService.index( user )
return projects if orgas.to_a.empty?
orgas.each do |orga|
teams = orga.teams_by user
next if teams.to_a.empty?
teams.each do |team|
projs = orga.projects.parents.where(:team_ids => team.ids).any_of({ :temp => false }, { :temp => nil } )
next if projs.to_a.empty?
projects["#{orga.name}/#{team.name}"] = projs
end
end
projects
end
def self.find id
Project.find_by_id( id )
rescue => e
log.error e.message
nil
end
def self.find_child parent_id, child_id
Project.where( :id => child_id, :parent_id => parent_id ).first
rescue => e
log.error e.message
nil
end
def self.summary project_id
map = {}
project = find project_id
summary_single project, map
project.children.each do |child|
summary_single child, map
end
Hash[map.sort_by {|dep| -dep.last[:dependencies].count}]
end
def self.summary_single project, map = {}
name = project.filename
name = project.name if name.to_s.empty?
map[project.ids] = {:id => project.ids,
:name => name,
:dep_number => project.dep_number,
:dep_number_sum => project.dep_number,
:out_number => project.out_number,
:out_number_sum => project.out_number,
:unknown_number => project.unknown_number,
:unknown_number_sum => project.unknown_number,
:muted_dependencies_count => project.muted_dependencies_count,
:licenses_red => project.licenses_red,
:licenses_red_sum => project.licenses_red_sum,
:licenses_unknown => project.licenses_unknown,
:licenses_unknown_sum => project.licenses_unknown_sum,
:sv_count => project.sv_count,
:sv_count_sum => project.sv_count_sum,
:dependencies => [],
:licenses => [],
:sv => [] }
Projectdependency.any_of(
{:project_id => project.ids, :outdated => true},
{:project_id => project.ids, :prod_key => nil} ).each do |dep|
map[project.ids][:dependencies].push dep
end
Projectdependency.any_of(
{:project_id => project.ids, :lwl_violation => 'true'},
{:project_id => project.ids, :license_caches => nil},
{:project_id => project.ids, :license_caches.with_size => 0} ).each do |dep|
map[project.ids][:licenses].push dep
end
fill_sv project, map
map
end
def self.store project
raise "project is nil." if project.nil?
ensure_unique_ga( project )
ensure_unique_scm( project )
organisation = project.organisation
if organisation
project.license_whitelist_id = organisation.default_lwl_id
project.component_whitelist_id = organisation.default_cwl_id
end
if project.save
project.save_dependencies
update_license_numbers!( project )
ProjectdependencyService.update_security project
SyncService.sync_project_async project # For Enterprise environment only!
else
err_msg = "Can't save project: #{project.errors.full_messages.to_json}"
log.error err_msg
raise err_msg
end
project
end
def self.remove_temp_projects
Project.where(:temp => true, :temp_lock => false).delete_all
end
def self.ensure_unique_ga project
return true if Settings.instance.projects_unique_ga == false
return true if project.group_id.to_s.empty? && project.artifact_id.to_s.empty?
project = Project.find_by_ga( project.group_id, project.artifact_id )
return true if project.nil?
err_msg = "A project with same GroupId and ArtifactId exist already. Project ID: #{project.id.to_s}"
log.error err_msg
raise err_msg
end
def self.ensure_unique_gav project
return true if Settings.instance.projects_unique_gav == false
return true if project.group_id.to_s.empty? && project.artifact_id.to_s.empty? && project.version.to_s.empty?
project = Project.find_by_gav( project.group_id, project.artifact_id, project.version )
return true if project.nil?
err_msg = "A project with same GroupId, ArtifactId and Version exist already. Project ID: #{project.id.to_s}"
log.error err_msg
raise err_msg
end
def self.ensure_unique_scm project
return true if Settings.instance.projects_unique_scm == false
return true if project.scm_fullname.to_s.empty?
db_projects = Project.where(:source => project.source, :scm_fullname => project.scm_fullname, :scm_branch => project.scm_branch, :s3_filename => project.s3_filename)
return true if db_projects.nil? || db_projects.empty?
db_project = db_projects.first
return true if db_project.ids.eql?( project.ids )
destroy project # Delete new created proejct to prevent duplicates in the database!
log.error "The project file is already monitored by VersionEye. Project ID: #{db_project.id.to_s}. scm_fullname: #{db_project.scm_fullname}, scm_branch: #{db_project.scm_branch}, filename: #{db_project.s3_filename}"
raise "The project file is already monitored by VersionEye. Project ID: #{db_project.id.to_s}."
end
def self.merge_by_ga group_id, artifact_id, subproject_id, user_id
parent = Project.by_user_id(user_id).find_by_ga(group_id, artifact_id)
resp = merge( parent.id.to_s, subproject_id, user_id )
update_sums parent
resp
end
def self.merge project_id, subproject_id, user_id
project = find project_id
subproject = find subproject_id
return false if project.nil? || subproject.nil?
return false if subproject.parent_id # subproject has already a parent project!
return false if project.parent_id # project is already a subproject!
return false if !subproject.children.empty? # subproject is a parent project!
return false if project.id.to_s.eql?(subproject.id.to_s) # project & subproject are the same!
user = User.find user_id
return false if user.nil?
if !project.is_collaborator?(user)
raise "User has no permission to merge this project!"
end
subproject.parent_id = project.id
subproject.license_whitelist_id = project.license_whitelist_id
subproject.save
reset_badge project
reset_badge subproject
ProjectUpdateService.update_async project
true
end
def self.unmerge project_id, subproject_id, user_id
project = find project_id
subproject = Project.where( :id => subproject_id, :parent_id => project_id ).first
return false if project.nil? || subproject.nil?
user = User.find user_id
return false if user.nil?
if !project.is_collaborator?(user)
raise "User has no permission to unmerge this project!"
end
subproject.parent_id = nil
subproject.save
reset_badge project
reset_badge subproject
ProjectUpdateService.update_async project
ProjectUpdateService.update_async subproject
true
end
def self.destroy_by user, project_id
project = Project.find_by_id( project_id )
return false if project.nil?
if project.is_collaborator?( user ) || user.admin == true
destroy project
else
raise "User has no permission to delete this project!"
end
end
def self.destroy project
return false if project.nil?
project.children.each do |child_project|
destroy_single child_project.id
end
parent = project.parent
destroy_single project.id
update_sums parent
return true
end
def self.destroy_single project_id
project = Project.find_by_id( project_id )
return false if project.nil?
project.remove_dependencies
project.remove
end
# Returns a map with
# - :key => "language_prod_key"
# - :value => "Array of project IDs where the prod_key is used"
def self.user_product_index_map user, add_collaborated = true
indexes = Hash.new
projects = user.projects
if projects
project_prod_index projects, indexes
end
return indexes if add_collaborated == false
collaborated_projects = []
orgas = OrganisationService.index( user )
orgas.each do |orga|
orga.projects.each do |project|
collaborated_projects << project if project.is_collaborator?( user )
end
end
project_prod_index collaborated_projects, indexes
indexes
end
def self.project_prod_index projects, indexes
projects.each do |project|
next if project.nil?
project.dependencies.each do |dep|
next if dep.nil? or dep.product.nil?
product = dep.product
prod_id = "#{product.language_esc}_#{product.prod_key}"
indexes[prod_id] = [] unless indexes.has_key?(prod_id)
indexes[prod_id] << {:project_id => project[:_id].to_s, :version_requested => dep.version_requested}
end
end
indexes
end
def self.insecure?( project )
return true if insecure_single?( project )
project.children.each do |child_project|
return true if insecure_single?( child_project )
end
false
end
def self.insecure_single?( project )
return false if project.language.eql?(Product::A_LANGUAGE_PHP) && !project.filename.eql?('composer.lock')
project.sv_count > 0
end
def self.outdated?( project )
return true if outdated_single?( project )
project.children.each do |child_project|
return true if outdated_single?( child_project )
end
false
end
def self.outdated_single?( project )
project.projectdependencies.each do |dep|
next if dep.scope.to_s.eql?(Dependency::A_SCOPE_DEVELOPMENT)
next if dep.scope.to_s.eql?(Dependency::A_SCOPE_TEST)
return true if ProjectdependencyService.outdated?( dep )
end
false
end
def self.outdated_dependencies( project, force_update = false )
outdated_dependencies = Array.new
project.projectdependencies.each do |dep|
ProjectdependencyService.update_outdated!( dep ) if force_update
outdated_dependencies << dep if ProjectdependencyService.outdated?( dep )
end
outdated_dependencies
end
# Returns the projectdependencies which have unknown licenses
def self.unknown_licenses( project )
unknown = Array.new
return unknown if project.nil? || project.projectdependencies.empty?
project.projectdependencies.each do |dep|
product = dep.product
if product.nil?
unknown << dep
next
end
product.version = dep.version_requested
unknown << dep if product.licenses.nil? || product.licenses.empty?
end
unknown
end
# Returns the projectdependencies which violate the
# license whitelist AND are not on the component whitelist
def self.red_licenses( project )
red = []
return red if project.nil? || project.projectdependencies.empty? || project.license_whitelist_id.nil?
whitelist = project.license_whitelist
return red if whitelist.nil?
return red if whitelist.license_elements.nil? || whitelist.license_elements.empty?
project.projectdependencies.each do |dep|
license_caches = dep.license_caches
next if license_caches.nil? || license_caches.empty?
red << dep if whitelisted?( license_caches, whitelist ) == false
end
red
end
def self.update_license_numbers!( project )
return nil if project.nil? || project.projectdependencies.empty?
ProjectdependencyService.update_licenses_security project
project.licenses_unknown = unknown_licenses( project ).count
project.licenses_red = red_licenses( project ).count
project.save
end
def self.update_sums( project )
return if project.nil?
children = project.children
if children.empty?
project.sum_own!
reset_badge project
return nil
end
dep_hash = {}
project.sum_reset!
children.each do |child_project|
update_numbers_for project, child_project, dep_hash
child_project.sum_own!
end
update_numbers_for project, project, dep_hash
project.child_count = children.count
project.save
reset_badge project
project
end
def self.reset_badge project
reset_badge_for project.ids
end
def self.reset_badge_for project_id
cache.delete( project_id )
cache.delete( "#{project_id}__flat" )
cache.delete( "#{project_id}__flat-square" )
cache.delete( "#{project_id}__plastic" )
Badge.where( :key => project_id ).delete
Badge.where( :key => "#{project_id}__flat" ).delete
Badge.where( :key => "#{project_id}__flat-square" ).delete
Badge.where( :key => "#{project_id}__plastic" ).delete
end
private
# TODO optimize this by only loading affected deps.
def self.fill_sv project, map
# id = Moped::BSON::ObjectId.from_string(project.ids)
# id = project.ids
# deps = Projectdependency.collection.find(:project_id => id, 'sv_ids' => {'$not' => {'$size' => 0} } )
deps = Projectdependency.where(:project_id => project.ids )
deps.each do |dep|
map[project.ids][:sv].push dep if !dep.sv_ids.empty?
end
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
end
def self.update_numbers_for project, child_project, dep_hash = {}
lwl = project.license_whitelist
child_project.projectdependencies.each do |dep|
key = "#{dep.language}:#{dep.possible_prod_key}:#{dep.version_requested}"
next if dep_hash.include? key
product = dep.product
product.version = dep.version_requested if !product.nil?
dep_hash[key] = dep
project.dep_number_sum += 1
project.out_number_sum += 1 if dep.outdated
project.unknown_number_sum += 1 if dep.unknown?
project.licenses_unknown_sum += 1 if product.nil? || product.licenses.nil? || product.licenses.empty?
if lwl && red_license?( dep, lwl )
project.licenses_red_sum += 1
end
project.sv_count_sum += dep.sv_ids.count if !dep.sv_ids.empty?
end
dep_hash
end
def self.red_license? projectdependency, whitelist
lcs = projectdependency.license_caches
return false if lcs.nil? || lcs.empty?
if whitelist.pessimistic_mode == true
lcs.each do |lc|
return true if lc.is_whitelisted? == false
end
return false
else
lcs.each do |lc|
return false if lc.is_whitelisted? == true
end
return true
end
end
def self.whitelisted? license_caches, whitelist
if whitelist.pessimistic_mode == true
license_caches.each do |lc|
return false if lc.is_whitelisted? == false
end
return true
else
license_caches.each do |lc|
return true if lc.is_whitelisted? == true
end
return false
end
end
end
Improve ensure_unique_gav.
class ProjectService < Versioneye::Service
def self.type_by_filename filename
return nil if filename.to_s.empty?
return nil if filename.to_s.match(/\/node_modules\//) # Skip workirectory of NPM.
return nil if filename.to_s.casecmp('CMakeLists.txt') == 0
return nil if filename.to_s.casecmp('robots.txt') == 0
return nil if filename.to_s.match(/robots.txt\z/i)
return nil if filename.to_s.match(/LICENSE.txt\z/i)
return nil if filename.to_s.match(/README.txt\z/i)
return nil if filename.to_s.match(/content.txt\z/i)
trimmed_name = filename.split('?')[0]
return Project::A_TYPE_RUBYGEMS if (!(/Gemfile\z/ =~ trimmed_name).nil?) or (!(/Gemfile.lock\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_COMPOSER if (!(/composer.json\z/ =~ trimmed_name).nil?) or (!(/composer.lock\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_PIP if (!(/\S*.txt\z/ =~ trimmed_name).nil?) or (!(/setup.py\z/ =~ trimmed_name).nil?) or (!(/pip.log\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_NPM if (!(/package.json\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_GRADLE if (!(/.gradle\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_SBT if (!(/.sbt\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_MAVEN2 if (!(/pom.xml\z/ =~ trimmed_name).nil?) or (!(/.pom\z/ =~ trimmed_name).nil?) or (!(/external_dependencies.xml\z/ =~ trimmed_name).nil?) or (!(/external-dependencies.xml\z/ =~ trimmed_name).nil?) or (!(/pom.json\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_LEIN if (!(/project.clj\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_BOWER if (!(/bower.json\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_BIICODE if (!(/biicode.conf\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_COCOAPODS if (!(/Podfile\z/ =~ trimmed_name).nil?) or (!(/.podfile\z/ =~ trimmed_name).nil?) or (!(/Podfile.lock\z/ =~ trimmed_name).nil?)
return Project::A_TYPE_CHEF if (!(/Berksfile.lock\z/ =~ trimmed_name).nil?) or (!(/Berksfile\z/ =~ trimmed_name).nil?) or (!(/metadata.rb\z/ =~ trimmed_name).nil?)
return nil
end
def self.corresponding_file filename
return nil if filename.to_s.empty?
trimmed_name = filename.split('?')[0]
return 'Gemfile.lock' if (/Gemfile\z/ =~ trimmed_name) == 0
return 'composer.lock' if (/composer.json\z/ =~ trimmed_name) == 0
return 'Podfile.lock' if (/Podfile\z/ =~ trimmed_name) == 0
return 'Berksfile.lock' if (/metadata.rb\z/ =~ trimmed_name) == 0
return 'Berksfile.lock' if (/Berksfile\z/ =~ trimmed_name) == 0
return nil
end
def self.index( user, filter = {}, sort = nil)
filter_options = {:parent_id => nil, :temp => false}
filter_options[:team_ids] = filter[:team] if filter[:team] && filter[:team].to_s.casecmp('ALL') != 0
filter_options[:language] = filter[:language] if filter[:language] && filter[:language].to_s.casecmp('ALL') != 0
filter_options[:version] = filter[:version] if filter[:version] && filter[:version].to_s.casecmp('ALL') != 0
filter_options[:name] = /#{filter[:name]}/i if filter[:name] && !filter[:name].to_s.strip.empty?
if filter[:scope].to_s == 'all_public'
filter_options[:public] = true
elsif filter[:scope].to_s == 'all' && user.admin == true
# Do nothing. Admin can see ALL projects
else
organisation = nil
if filter[:organisation] && !filter[:organisation].to_s.strip.empty?
organisation = Organisation.find filter[:organisation].to_s
organisation = Organisation.where(:name => filter[:organisation].to_s).first if organisation.nil?
end
member_of_orga = OrganisationService.member?( organisation, user )
if organisation && ( member_of_orga || user.admin == true )
filter_options[:organisation_id] = organisation.ids
else
filter_options[:user_id] = user.ids
filter_options[:organisation_id] = nil
end
end
case sort
when 'out_dated'
Project.where( filter_options ).desc(:out_number_sum).asc(:name_downcase)
when 'license_violations'
Project.where( filter_options ).desc(:licenses_red_sum).asc(:name_downcase)
else
Project.where( filter_options ).asc(:name_downcase).desc(:licenses_red_sum)
end
end
def self.all_projects( user )
projects = {}
projects[user.fullname] = user.projects.parents.where(:organisation_id => nil).any_of({ :temp => false }, { :temp => nil } )
orgas = OrganisationService.index( user )
return projects if orgas.to_a.empty?
orgas.each do |orga|
teams = orga.teams_by user
next if teams.to_a.empty?
teams.each do |team|
projs = orga.projects.parents.where(:team_ids => team.ids).any_of({ :temp => false }, { :temp => nil } )
next if projs.to_a.empty?
projects["#{orga.name}/#{team.name}"] = projs
end
end
projects
end
def self.find id
Project.find_by_id( id )
rescue => e
log.error e.message
nil
end
def self.find_child parent_id, child_id
Project.where( :id => child_id, :parent_id => parent_id ).first
rescue => e
log.error e.message
nil
end
def self.summary project_id
map = {}
project = find project_id
summary_single project, map
project.children.each do |child|
summary_single child, map
end
Hash[map.sort_by {|dep| -dep.last[:dependencies].count}]
end
def self.summary_single project, map = {}
name = project.filename
name = project.name if name.to_s.empty?
map[project.ids] = {:id => project.ids,
:name => name,
:dep_number => project.dep_number,
:dep_number_sum => project.dep_number,
:out_number => project.out_number,
:out_number_sum => project.out_number,
:unknown_number => project.unknown_number,
:unknown_number_sum => project.unknown_number,
:muted_dependencies_count => project.muted_dependencies_count,
:licenses_red => project.licenses_red,
:licenses_red_sum => project.licenses_red_sum,
:licenses_unknown => project.licenses_unknown,
:licenses_unknown_sum => project.licenses_unknown_sum,
:sv_count => project.sv_count,
:sv_count_sum => project.sv_count_sum,
:dependencies => [],
:licenses => [],
:sv => [] }
Projectdependency.any_of(
{:project_id => project.ids, :outdated => true},
{:project_id => project.ids, :prod_key => nil} ).each do |dep|
map[project.ids][:dependencies].push dep
end
Projectdependency.any_of(
{:project_id => project.ids, :lwl_violation => 'true'},
{:project_id => project.ids, :license_caches => nil},
{:project_id => project.ids, :license_caches.with_size => 0} ).each do |dep|
map[project.ids][:licenses].push dep
end
fill_sv project, map
map
end
def self.store project
raise "project is nil." if project.nil?
ensure_unique_ga( project )
ensure_unique_scm( project )
organisation = project.organisation
if organisation
project.license_whitelist_id = organisation.default_lwl_id
project.component_whitelist_id = organisation.default_cwl_id
end
if project.save
project.save_dependencies
update_license_numbers!( project )
ProjectdependencyService.update_security project
SyncService.sync_project_async project # For Enterprise environment only!
else
err_msg = "Can't save project: #{project.errors.full_messages.to_json}"
log.error err_msg
raise err_msg
end
project
end
def self.remove_temp_projects
Project.where(:temp => true, :temp_lock => false).delete_all
end
def self.ensure_unique_ga project
return true if Settings.instance.projects_unique_ga == false
return true if project.group_id.to_s.empty? && project.artifact_id.to_s.empty?
project = Project.find_by_ga( project.group_id, project.artifact_id )
return true if project.nil?
err_msg = "A project with same GroupId and ArtifactId exist already. Project ID: #{project.id.to_s}"
log.error err_msg
raise err_msg
end
def self.ensure_unique_gav project
return true if Settings.instance.projects_unique_gav == false
return true if project.group_id.to_s.empty? && project.artifact_id.to_s.empty? && project.version.to_s.empty?
db_projects = Project.where(:group_id => project.group_id, :artifact_id => project.artifact_id, :version => project.version )
return true if db_projects.nil? || db_projects.empty
db_project = db_projects.first
return true if db_project.ids.eql?( project.ids ) && db_projects.count == 1
destroy project
log.error "A project with same GroupId, ArtifactId and Version exist already. Project ID: #{project.id.to_s}. GroupId: #{db_project.group_id}, ArtifactId: #{db_project.artifact_id}, Version: #{db_project.version}"
raise "A project with same GroupId, ArtifactId and Version exist already. Project ID: #{project.id.to_s}."
end
def self.ensure_unique_scm project
return true if Settings.instance.projects_unique_scm == false
return true if project.scm_fullname.to_s.empty?
db_projects = Project.where(:source => project.source, :scm_fullname => project.scm_fullname, :scm_branch => project.scm_branch, :s3_filename => project.s3_filename)
return true if db_projects.nil? || db_projects.empty?
db_project = db_projects.first
return true if db_project.ids.eql?( project.ids ) && db_projects.count == 1
destroy project # Delete new created proejct to prevent duplicates in the database!
log.error "The project file is already monitored by VersionEye. Project ID: #{db_project.id.to_s}. scm_fullname: #{db_project.scm_fullname}, scm_branch: #{db_project.scm_branch}, filename: #{db_project.s3_filename}"
raise "The project file is already monitored by VersionEye. Project ID: #{db_project.id.to_s}."
end
def self.merge_by_ga group_id, artifact_id, subproject_id, user_id
parent = Project.by_user_id(user_id).find_by_ga(group_id, artifact_id)
resp = merge( parent.id.to_s, subproject_id, user_id )
update_sums parent
resp
end
def self.merge project_id, subproject_id, user_id
project = find project_id
subproject = find subproject_id
return false if project.nil? || subproject.nil?
return false if subproject.parent_id # subproject has already a parent project!
return false if project.parent_id # project is already a subproject!
return false if !subproject.children.empty? # subproject is a parent project!
return false if project.id.to_s.eql?(subproject.id.to_s) # project & subproject are the same!
user = User.find user_id
return false if user.nil?
if !project.is_collaborator?(user)
raise "User has no permission to merge this project!"
end
subproject.parent_id = project.id
subproject.license_whitelist_id = project.license_whitelist_id
subproject.save
reset_badge project
reset_badge subproject
ProjectUpdateService.update_async project
true
end
def self.unmerge project_id, subproject_id, user_id
project = find project_id
subproject = Project.where( :id => subproject_id, :parent_id => project_id ).first
return false if project.nil? || subproject.nil?
user = User.find user_id
return false if user.nil?
if !project.is_collaborator?(user)
raise "User has no permission to unmerge this project!"
end
subproject.parent_id = nil
subproject.save
reset_badge project
reset_badge subproject
ProjectUpdateService.update_async project
ProjectUpdateService.update_async subproject
true
end
def self.destroy_by user, project_id
project = Project.find_by_id( project_id )
return false if project.nil?
if project.is_collaborator?( user ) || user.admin == true
destroy project
else
raise "User has no permission to delete this project!"
end
end
def self.destroy project
return false if project.nil?
project.children.each do |child_project|
destroy_single child_project.id
end
parent = project.parent
destroy_single project.id
update_sums parent
return true
end
def self.destroy_single project_id
project = Project.find_by_id( project_id )
return false if project.nil?
project.remove_dependencies
project.remove
end
# Returns a map with
# - :key => "language_prod_key"
# - :value => "Array of project IDs where the prod_key is used"
def self.user_product_index_map user, add_collaborated = true
indexes = Hash.new
projects = user.projects
if projects
project_prod_index projects, indexes
end
return indexes if add_collaborated == false
collaborated_projects = []
orgas = OrganisationService.index( user )
orgas.each do |orga|
orga.projects.each do |project|
collaborated_projects << project if project.is_collaborator?( user )
end
end
project_prod_index collaborated_projects, indexes
indexes
end
def self.project_prod_index projects, indexes
projects.each do |project|
next if project.nil?
project.dependencies.each do |dep|
next if dep.nil? or dep.product.nil?
product = dep.product
prod_id = "#{product.language_esc}_#{product.prod_key}"
indexes[prod_id] = [] unless indexes.has_key?(prod_id)
indexes[prod_id] << {:project_id => project[:_id].to_s, :version_requested => dep.version_requested}
end
end
indexes
end
def self.insecure?( project )
return true if insecure_single?( project )
project.children.each do |child_project|
return true if insecure_single?( child_project )
end
false
end
def self.insecure_single?( project )
return false if project.language.eql?(Product::A_LANGUAGE_PHP) && !project.filename.eql?('composer.lock')
project.sv_count > 0
end
def self.outdated?( project )
return true if outdated_single?( project )
project.children.each do |child_project|
return true if outdated_single?( child_project )
end
false
end
def self.outdated_single?( project )
project.projectdependencies.each do |dep|
next if dep.scope.to_s.eql?(Dependency::A_SCOPE_DEVELOPMENT)
next if dep.scope.to_s.eql?(Dependency::A_SCOPE_TEST)
return true if ProjectdependencyService.outdated?( dep )
end
false
end
def self.outdated_dependencies( project, force_update = false )
outdated_dependencies = Array.new
project.projectdependencies.each do |dep|
ProjectdependencyService.update_outdated!( dep ) if force_update
outdated_dependencies << dep if ProjectdependencyService.outdated?( dep )
end
outdated_dependencies
end
# Returns the projectdependencies which have unknown licenses
def self.unknown_licenses( project )
unknown = Array.new
return unknown if project.nil? || project.projectdependencies.empty?
project.projectdependencies.each do |dep|
product = dep.product
if product.nil?
unknown << dep
next
end
product.version = dep.version_requested
unknown << dep if product.licenses.nil? || product.licenses.empty?
end
unknown
end
# Returns the projectdependencies which violate the
# license whitelist AND are not on the component whitelist
def self.red_licenses( project )
red = []
return red if project.nil? || project.projectdependencies.empty? || project.license_whitelist_id.nil?
whitelist = project.license_whitelist
return red if whitelist.nil?
return red if whitelist.license_elements.nil? || whitelist.license_elements.empty?
project.projectdependencies.each do |dep|
license_caches = dep.license_caches
next if license_caches.nil? || license_caches.empty?
red << dep if whitelisted?( license_caches, whitelist ) == false
end
red
end
def self.update_license_numbers!( project )
return nil if project.nil? || project.projectdependencies.empty?
ProjectdependencyService.update_licenses_security project
project.licenses_unknown = unknown_licenses( project ).count
project.licenses_red = red_licenses( project ).count
project.save
end
def self.update_sums( project )
return if project.nil?
children = project.children
if children.empty?
project.sum_own!
reset_badge project
return nil
end
dep_hash = {}
project.sum_reset!
children.each do |child_project|
update_numbers_for project, child_project, dep_hash
child_project.sum_own!
end
update_numbers_for project, project, dep_hash
project.child_count = children.count
project.save
reset_badge project
project
end
def self.reset_badge project
reset_badge_for project.ids
end
def self.reset_badge_for project_id
cache.delete( project_id )
cache.delete( "#{project_id}__flat" )
cache.delete( "#{project_id}__flat-square" )
cache.delete( "#{project_id}__plastic" )
Badge.where( :key => project_id ).delete
Badge.where( :key => "#{project_id}__flat" ).delete
Badge.where( :key => "#{project_id}__flat-square" ).delete
Badge.where( :key => "#{project_id}__plastic" ).delete
end
private
# TODO optimize this by only loading affected deps.
def self.fill_sv project, map
# id = Moped::BSON::ObjectId.from_string(project.ids)
# id = project.ids
# deps = Projectdependency.collection.find(:project_id => id, 'sv_ids' => {'$not' => {'$size' => 0} } )
deps = Projectdependency.where(:project_id => project.ids )
deps.each do |dep|
map[project.ids][:sv].push dep if !dep.sv_ids.empty?
end
rescue => e
log.error e.message
log.error e.backtrace.join("\n")
end
def self.update_numbers_for project, child_project, dep_hash = {}
lwl = project.license_whitelist
child_project.projectdependencies.each do |dep|
key = "#{dep.language}:#{dep.possible_prod_key}:#{dep.version_requested}"
next if dep_hash.include? key
product = dep.product
product.version = dep.version_requested if !product.nil?
dep_hash[key] = dep
project.dep_number_sum += 1
project.out_number_sum += 1 if dep.outdated
project.unknown_number_sum += 1 if dep.unknown?
project.licenses_unknown_sum += 1 if product.nil? || product.licenses.nil? || product.licenses.empty?
if lwl && red_license?( dep, lwl )
project.licenses_red_sum += 1
end
project.sv_count_sum += dep.sv_ids.count if !dep.sv_ids.empty?
end
dep_hash
end
def self.red_license? projectdependency, whitelist
lcs = projectdependency.license_caches
return false if lcs.nil? || lcs.empty?
if whitelist.pessimistic_mode == true
lcs.each do |lc|
return true if lc.is_whitelisted? == false
end
return false
else
lcs.each do |lc|
return false if lc.is_whitelisted? == true
end
return true
end
end
def self.whitelisted? license_caches, whitelist
if whitelist.pessimistic_mode == true
license_caches.each do |lc|
return false if lc.is_whitelisted? == false
end
return true
else
license_caches.each do |lc|
return true if lc.is_whitelisted? == true
end
return false
end
end
end
|
require 'wulin_master/components/grid/column/column_filter'
require 'wulin_master/components/grid/column/column_attr'
module WulinMaster
class Column
include WulinMaster::ColumnFilter
include WulinMaster::ColumnAttr
attr_accessor :name, :options
def initialize(name, grid_class, opts={})
@name = name
@grid_class = grid_class
@options = {:width => 150, :sortable => true}.merge(opts)
end
def label
@options[:label] || @name.to_s.underscore.humanize
end
def singular_name
@singular_name ||= self.reflection ? ActiveModel::Naming.singular(self.reflection.klass) : name.to_s.singularize
end
def datetime_format
@options[:datetime_format] || WulinMaster.default_datetime_format
end
def relation_table_name
options[:join_aliased_as] || self.reflection.klass.table_name
end
def relation_klass_name
@relation_klass_name ||= self.reflection.klass.name
end
def table_name
self.reflection ? relation_table_name : self.model.table_name.to_s
end
def klass_name
@class_name ||= self.reflection ? relation_klass_name : self.model.name
end
def field_name
self.reflection ? reflection.name : name
end
def to_column_model(screen_name)
@options[:screen] = screen_name
# if the option :choices is a Proc, keep it, and call it when using it
if @options[:original_choices].nil? and @options[:choices].is_a?(Proc)
@options[:original_choices] = @options[:choices].dup
@options[:choices] = @options[:choices].call
elsif @options[:original_choices].is_a?(Proc)
@options[:choices] = @options[:original_choices].call
end
append_distinct_options if @options[:distinct]
sort_col_name = @options[:sort_column] || full_name
column_type = sql_type
new_options = @options.dup
h = {:id => full_name, :column_name => self.name, :singular_name => self.singular_name, :name => self.label, :table => table_name, :klass_name => klass_name, :field => field_name, :type => column_type, :sortColumn => sort_col_name}.merge(new_options)
h.merge!(reflection_options) if reflection
h
end
# Format a value
# Called during json rendering
def format(value)
if @options[:simple_date]
value.respond_to?(:strftime) ? value.strftime('%d %b') : value
elsif @options[:simple_time]
value.respond_to?(:strftime) ? value.strftime('%H:%M') : value
else
if value.class == ActiveSupport::TimeWithZone
value.to_formatted_s(datetime_format)
elsif value.class == Time
value.strftime('%H:%M')
elsif value.class.name == 'BSON::ObjectId'
value.to_s
else
value
end
end
end
# Dynamically add some new options to the column
def add_options(new_options={})
@options.merge!(new_options)
end
def apply_order(query, direction)
return query unless ["ASC", "DESC"].include?(direction)
if @options[:sql_expression]
query.order("#{@options[:sql_expression]} #{direction}, #{model.table_name}.id ASC")
elsif self.reflection
query.order("#{relation_table_name}.#{self.option_text_attribute} #{direction}, #{model.table_name}.id ASC")
elsif is_table_column?
query.order("#{model.table_name}.#{@name} #{direction}, #{model.table_name}.id ASC")
else
Rails.logger.warn "Sorting column ignored because this column can't be sorted: #{self.inspect}"
query
end
end
def model
@model ||= @grid_class.model
end
def model_columns
@model_columns ||= begin
return [] unless model
self.model.respond_to?(:all_columns) ? self.model.all_columns : self.model.columns
end
end
def sql_type
return :unknown if self.model.blank?
if reflection
options[:inner_formatter] ||= (options.delete(:formatter) || reflection.klass.columns.find{|c| c.name.to_s == self.name.to_s}.try(:type))
return association_type
end
column = model_columns.find {|col| col.name.to_s == self.name.to_s}
(column.try(:type) || association_type || :unknown).to_s.to_sym
end
def reflection
@reflection ||= self.model.reflections[(@options[:through] || @name).to_sym]
end
def append_distinct_options
@options[:choices] ||= begin
params_hash = { :grid => @grid_class.name, :column => @name.to_s, :text_attr => form_name, klass: klass_name, :screen => @options[:screen] }
"/wulin_master/fetch_distinct_options?#{params_hash.to_param}"
end
end
def reflection_options
@options[:choices] ||= begin
if self.reflection
params_hash = { :grid => @grid_class.name, :column => @name.to_s, :text_attr => option_text_attribute, :screen => @options[:screen] }
"/wulin_master/fetch_options?#{params_hash.to_param}"
elsif @options[:distinct]
params_hash = { :grid => @grid_class.name, :column => @name.to_s, :text_attr => form_name, klass: klass_name, :screen => @options[:screen] }
"/wulin_master/fetch_distinct_options?#{params_hash.to_param}"
else
[]
end
end
{ :choices => @options[:choices], :optionTextAttribute => self.option_text_attribute }
end
# For belongs_to association, the name of the attribute to display
def option_text_attribute
@options[:option_text_attribute].presence || (@options[:through] ? self.name : :name)
end
def full_name
if @options[:option_text_attribute]
"#{name}_#{@options[:option_text_attribute].to_s}"
elsif @options[:through]
"#{@options[:through]}_#{name}"
elsif !model.column_names.include?(name.to_s) && model.reflections[name.to_sym]
"#{name}_name"
else
name.to_s
end
end
def foreign_key
@foreign_key ||= self.reflection.try(:foreign_key).to_s
end
def form_name
@form_name ||= foreign_key.presence || self.name
end
# Returns the sql names used to generate the select
def sql_names
if is_table_column?
if self.reflection
[self.model.table_name + "."+ foreign_key, self.reflection.klass.table_name + "." + option_text_attribute.to_s]
else
[self.model.table_name + "." + name.to_s]
end
else
nil
end
end
def presence_required?
!!self.model.validators.find{|validator| (validator.class == ActiveModel::Validations::PresenceValidator) && validator.attributes.include?(form_name.to_sym)}
end
# Returns the includes to add to the query
def includes
if self.reflection && (self.reflection.klass < ActiveRecord::Base)
[(@options[:through] || @name).to_sym, association_through ? association_through.to_sym : nil].compact
else
[]
end
end
# Returns the joins to add to the query
def joins
if self.reflection && (self.reflection.klass < ActiveRecord::Base) && presence_required?
[(@options[:through] || @name).to_sym]
else
[]
end
end
# Returns the value for the object in argument
def value(object)
case association_type.to_s
when /^belongs_to$|^has_one$/
object.send(@options[:through] || self.name).try(:send,option_text_attribute).to_s
when 'has_and_belongs_to_many'
ids = object.send("#{self.reflection.klass.name.underscore}_ids")
object.send(self.reflection.name.to_s).map{|x| x.send(option_text_attribute)}.join(',')
when 'has_many'
object.send(self.name.to_s).collect{|obj| obj.send(option_text_attribute)}
else
self.format(object.send(self.name.to_s))
end
end
# Returns the json for the object in argument
def json(object)
case association_type.to_s
when 'belongs_to'
value = "#{self.name}_#{option_text_attribute}" == foreign_key.to_s ? object.send(foreign_key) : object.send(@options[:through] || self.name).try(:send, option_text_attribute)
{reflection.name => {:id => object.send(foreign_key), option_text_attribute => format(value)}}
when 'has_one'
association_object = object.send(@options[:through] || self.name)
{reflection.name => {:id => association_object.try(:id), option_text_attribute => format(association_object.try(:send,option_text_attribute))}}
when 'has_and_belongs_to_many'
ids = object.send("#{self.reflection.klass.name.underscore}_ids")
op_attribute = object.send(self.reflection.name.to_s).map{|x| x.send(option_text_attribute)}.join(',')
{reflection.name => {id: ids, option_text_attribute => op_attribute}}
when 'has_many'
{reflection.name => object.send(self.name.to_s).collect{|obj| {:id => obj.id, option_text_attribute => format(obj.send(option_text_attribute))}}}
else
self.format(object.send(self.name.to_s))
end
end
def valid_in_screen(screen_name)
screen_name = screen_name.to_s
(@options[:only].blank? and @options[:except].blank?) ||
(@options[:only].present? and @options[:only].map(&:to_s).include?(screen_name)) ||
(@options[:except].present? and @options[:except].map(&:to_s).exclude?(screen_name))
end
def sortable?
@options[:sortable] || is_table_column? || is_nosql_field? || related_column_filterable? || @options[:sql_expression]
end
alias_method :filterable?, :sortable?
private
def related_column_filterable?
reflection and reflection.klass.column_names.include?(option_text_attribute.to_s)
end
def complete_column_name
if @options[:sql_expression]
"#{@options[:sql_expression]}"
elsif is_table_column?
"#{model.table_name}.#{self.name}"
elsif self.reflection
"#{self.reflection.klass.table_name}.#{self.name}"
else
self.name
end
end
def column_type(model, column_name)
all_columns = model.respond_to?(:all_columns) ? model.all_columns : model.columns
column = all_columns.find {|col| col.name.to_s == column_name.to_s}
(column.try(:type) || :unknown).to_s.to_sym
end
def is_table_column?
self.model.respond_to?(:column_names) ? self.model.column_names.include?(self.name.to_s) : false
end
def is_nosql_field?
self.model.ancestors.exclude?(ActiveModel::Serializers::JSON)
end
def association_type
self.reflection.try(:macro)
end
def association_through
self.reflection ? self.reflection.try(:options)[:through] : nil
end
end
end
MISC - if the column specifies the type 'DateTime' even if the class is Time, format it as DateTime
require 'wulin_master/components/grid/column/column_filter'
require 'wulin_master/components/grid/column/column_attr'
module WulinMaster
class Column
include WulinMaster::ColumnFilter
include WulinMaster::ColumnAttr
attr_accessor :name, :options
def initialize(name, grid_class, opts={})
@name = name
@grid_class = grid_class
@options = {:width => 150, :sortable => true}.merge(opts)
end
def label
@options[:label] || @name.to_s.underscore.humanize
end
def singular_name
@singular_name ||= self.reflection ? ActiveModel::Naming.singular(self.reflection.klass) : name.to_s.singularize
end
def datetime_format
@options[:datetime_format] || WulinMaster.default_datetime_format
end
def relation_table_name
options[:join_aliased_as] || self.reflection.klass.table_name
end
def relation_klass_name
@relation_klass_name ||= self.reflection.klass.name
end
def table_name
self.reflection ? relation_table_name : self.model.table_name.to_s
end
def klass_name
@class_name ||= self.reflection ? relation_klass_name : self.model.name
end
def field_name
self.reflection ? reflection.name : name
end
def to_column_model(screen_name)
@options[:screen] = screen_name
# if the option :choices is a Proc, keep it, and call it when using it
if @options[:original_choices].nil? and @options[:choices].is_a?(Proc)
@options[:original_choices] = @options[:choices].dup
@options[:choices] = @options[:choices].call
elsif @options[:original_choices].is_a?(Proc)
@options[:choices] = @options[:original_choices].call
end
append_distinct_options if @options[:distinct]
sort_col_name = @options[:sort_column] || full_name
column_type = sql_type
new_options = @options.dup
h = {:id => full_name, :column_name => self.name, :singular_name => self.singular_name, :name => self.label, :table => table_name, :klass_name => klass_name, :field => field_name, :type => column_type, :sortColumn => sort_col_name}.merge(new_options)
h.merge!(reflection_options) if reflection
h
end
# Format a value
# Called during json rendering
def format(value)
if @options[:simple_date]
value.respond_to?(:strftime) ? value.strftime('%d %b') : value
elsif @options[:simple_time]
value.respond_to?(:strftime) ? value.strftime('%H:%M') : value
else
if value.class == ActiveSupport::TimeWithZone or @options[:type] == 'Datetime'
value.to_formatted_s(datetime_format)
elsif value.class == Time
value.strftime('%H:%M')
elsif value.class.name == 'BSON::ObjectId'
value.to_s
else
value
end
end
end
# Dynamically add some new options to the column
def add_options(new_options={})
@options.merge!(new_options)
end
def apply_order(query, direction)
return query unless ["ASC", "DESC"].include?(direction)
if @options[:sql_expression]
query.order("#{@options[:sql_expression]} #{direction}, #{model.table_name}.id ASC")
elsif self.reflection
query.order("#{relation_table_name}.#{self.option_text_attribute} #{direction}, #{model.table_name}.id ASC")
elsif is_table_column?
query.order("#{model.table_name}.#{@name} #{direction}, #{model.table_name}.id ASC")
else
Rails.logger.warn "Sorting column ignored because this column can't be sorted: #{self.inspect}"
query
end
end
def model
@model ||= @grid_class.model
end
def model_columns
@model_columns ||= begin
return [] unless model
self.model.respond_to?(:all_columns) ? self.model.all_columns : self.model.columns
end
end
def sql_type
return :unknown if self.model.blank?
if reflection
options[:inner_formatter] ||= (options.delete(:formatter) || reflection.klass.columns.find{|c| c.name.to_s == self.name.to_s}.try(:type))
return association_type
end
column = model_columns.find {|col| col.name.to_s == self.name.to_s}
(column.try(:type) || association_type || :unknown).to_s.to_sym
end
def reflection
@reflection ||= self.model.reflections[(@options[:through] || @name).to_sym]
end
def append_distinct_options
@options[:choices] ||= begin
params_hash = { :grid => @grid_class.name, :column => @name.to_s, :text_attr => form_name, klass: klass_name, :screen => @options[:screen] }
"/wulin_master/fetch_distinct_options?#{params_hash.to_param}"
end
end
def reflection_options
@options[:choices] ||= begin
if self.reflection
params_hash = { :grid => @grid_class.name, :column => @name.to_s, :text_attr => option_text_attribute, :screen => @options[:screen] }
"/wulin_master/fetch_options?#{params_hash.to_param}"
elsif @options[:distinct]
params_hash = { :grid => @grid_class.name, :column => @name.to_s, :text_attr => form_name, klass: klass_name, :screen => @options[:screen] }
"/wulin_master/fetch_distinct_options?#{params_hash.to_param}"
else
[]
end
end
{ :choices => @options[:choices], :optionTextAttribute => self.option_text_attribute }
end
# For belongs_to association, the name of the attribute to display
def option_text_attribute
@options[:option_text_attribute].presence || (@options[:through] ? self.name : :name)
end
def full_name
if @options[:option_text_attribute]
"#{name}_#{@options[:option_text_attribute].to_s}"
elsif @options[:through]
"#{@options[:through]}_#{name}"
elsif !model.column_names.include?(name.to_s) && model.reflections[name.to_sym]
"#{name}_name"
else
name.to_s
end
end
def foreign_key
@foreign_key ||= self.reflection.try(:foreign_key).to_s
end
def form_name
@form_name ||= foreign_key.presence || self.name
end
# Returns the sql names used to generate the select
def sql_names
if is_table_column?
if self.reflection
[self.model.table_name + "."+ foreign_key, self.reflection.klass.table_name + "." + option_text_attribute.to_s]
else
[self.model.table_name + "." + name.to_s]
end
else
nil
end
end
def presence_required?
!!self.model.validators.find{|validator| (validator.class == ActiveModel::Validations::PresenceValidator) && validator.attributes.include?(form_name.to_sym)}
end
# Returns the includes to add to the query
def includes
if self.reflection && (self.reflection.klass < ActiveRecord::Base)
[(@options[:through] || @name).to_sym, association_through ? association_through.to_sym : nil].compact
else
[]
end
end
# Returns the joins to add to the query
def joins
if self.reflection && (self.reflection.klass < ActiveRecord::Base) && presence_required?
[(@options[:through] || @name).to_sym]
else
[]
end
end
# Returns the value for the object in argument
def value(object)
case association_type.to_s
when /^belongs_to$|^has_one$/
object.send(@options[:through] || self.name).try(:send,option_text_attribute).to_s
when 'has_and_belongs_to_many'
ids = object.send("#{self.reflection.klass.name.underscore}_ids")
object.send(self.reflection.name.to_s).map{|x| x.send(option_text_attribute)}.join(',')
when 'has_many'
object.send(self.name.to_s).collect{|obj| obj.send(option_text_attribute)}
else
self.format(object.send(self.name.to_s))
end
end
# Returns the json for the object in argument
def json(object)
case association_type.to_s
when 'belongs_to'
value = "#{self.name}_#{option_text_attribute}" == foreign_key.to_s ? object.send(foreign_key) : object.send(@options[:through] || self.name).try(:send, option_text_attribute)
{reflection.name => {:id => object.send(foreign_key), option_text_attribute => format(value)}}
when 'has_one'
association_object = object.send(@options[:through] || self.name)
{reflection.name => {:id => association_object.try(:id), option_text_attribute => format(association_object.try(:send,option_text_attribute))}}
when 'has_and_belongs_to_many'
ids = object.send("#{self.reflection.klass.name.underscore}_ids")
op_attribute = object.send(self.reflection.name.to_s).map{|x| x.send(option_text_attribute)}.join(',')
{reflection.name => {id: ids, option_text_attribute => op_attribute}}
when 'has_many'
{reflection.name => object.send(self.name.to_s).collect{|obj| {:id => obj.id, option_text_attribute => format(obj.send(option_text_attribute))}}}
else
self.format(object.send(self.name.to_s))
end
end
def valid_in_screen(screen_name)
screen_name = screen_name.to_s
(@options[:only].blank? and @options[:except].blank?) ||
(@options[:only].present? and @options[:only].map(&:to_s).include?(screen_name)) ||
(@options[:except].present? and @options[:except].map(&:to_s).exclude?(screen_name))
end
def sortable?
@options[:sortable] || is_table_column? || is_nosql_field? || related_column_filterable? || @options[:sql_expression]
end
alias_method :filterable?, :sortable?
private
def related_column_filterable?
reflection and reflection.klass.column_names.include?(option_text_attribute.to_s)
end
def complete_column_name
if @options[:sql_expression]
"#{@options[:sql_expression]}"
elsif is_table_column?
"#{model.table_name}.#{self.name}"
elsif self.reflection
"#{self.reflection.klass.table_name}.#{self.name}"
else
self.name
end
end
def column_type(model, column_name)
all_columns = model.respond_to?(:all_columns) ? model.all_columns : model.columns
column = all_columns.find {|col| col.name.to_s == column_name.to_s}
(column.try(:type) || :unknown).to_s.to_sym
end
def is_table_column?
self.model.respond_to?(:column_names) ? self.model.column_names.include?(self.name.to_s) : false
end
def is_nosql_field?
self.model.ancestors.exclude?(ActiveModel::Serializers::JSON)
end
def association_type
self.reflection.try(:macro)
end
def association_through
self.reflection ? self.reflection.try(:options)[:through] : nil
end
end
end
|
$:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "crowdint_auth/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "crowdint_auth"
s.version = CrowdintAuth::VERSION
s.authors = ["David Padilla"]
s.email = ["david@crowdint.com"]
s.homepage = "https://github.com/dabit/crowdint_auth"
s.summary = "Authenticate Rails apps with CrowdInt's Google Apps and Devise "
s.description = "Authenticate Rails apps with CrowdInt's Google Apps and Devise "
s.files = Dir["{app,config,db,lib}/**/*"] + ["MIT-LICENSE", "Rakefile", "README.md"]
s.add_dependency "rails", "~> 4.0.0"
s.add_dependency "devise", "~> 3.1.1"
s.add_dependency "omniauth-google-apps"
s.add_development_dependency 'rspec-rails'
end
Updated devise version
$:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "crowdint_auth/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "crowdint_auth"
s.version = CrowdintAuth::VERSION
s.authors = ["David Padilla"]
s.email = ["david@crowdint.com"]
s.homepage = "https://github.com/dabit/crowdint_auth"
s.summary = "Authenticate Rails apps with CrowdInt's Google Apps and Devise "
s.description = "Authenticate Rails apps with CrowdInt's Google Apps and Devise "
s.files = Dir["{app,config,db,lib}/**/*"] + ["MIT-LICENSE", "Rakefile", "README.md"]
s.add_dependency "rails", "~> 4.0.0"
s.add_dependency "devise", "~> 3.2.0"
s.add_dependency "omniauth-google-apps"
s.add_development_dependency 'rspec-rails'
end
|
# coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "gym/version"
Gem::Specification.new do |spec|
spec.name = "gym"
spec.version = Gym::VERSION
spec.authors = ["Felix Krause"]
spec.email = ["gym@krausefx.com"]
spec.summary = Gym::DESCRIPTION
spec.description = Gym::DESCRIPTION
spec.homepage = "https://fastlane.tools"
spec.license = "MIT"
spec.required_ruby_version = ">= 2.0.0"
spec.files = Dir["lib/**/*"] + %w(bin/gym bin/💪 README.md LICENSE)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency 'fastlane_core', '>= 0.16.0', '< 1.0.0' # all shared code and dependencies
spec.add_dependency 'xcpretty' # pretty xcodebuild output
spec.add_dependency 'terminal-table' # print out build information
# Development only
spec.add_development_dependency "bundler"
spec.add_development_dependency "fastlane", "~> 1.15.0" # yes, we use fastlane for testing
spec.add_development_dependency "rake"
spec.add_development_dependency "rubocop"
spec.add_development_dependency "rspec", "~> 3.1.0"
spec.add_development_dependency "pry"
spec.add_development_dependency "yard", "~> 0.8.7.4"
spec.add_development_dependency "webmock", "~> 1.19.0"
spec.add_development_dependency "coveralls"
end
Updated fastlane dependency
# coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "gym/version"
Gem::Specification.new do |spec|
spec.name = "gym"
spec.version = Gym::VERSION
spec.authors = ["Felix Krause"]
spec.email = ["gym@krausefx.com"]
spec.summary = Gym::DESCRIPTION
spec.description = Gym::DESCRIPTION
spec.homepage = "https://fastlane.tools"
spec.license = "MIT"
spec.required_ruby_version = ">= 2.0.0"
spec.files = Dir["lib/**/*"] + %w(bin/gym bin/💪 README.md LICENSE)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency 'fastlane_core', '>= 0.16.0', '< 1.0.0' # all shared code and dependencies
spec.add_dependency 'xcpretty' # pretty xcodebuild output
spec.add_dependency 'terminal-table' # print out build information
# Development only
spec.add_development_dependency "bundler"
spec.add_development_dependency "fastlane", ">= 1.25.0" # yes, we use fastlane for testing
spec.add_development_dependency "rake"
spec.add_development_dependency "rubocop"
spec.add_development_dependency "rspec", "~> 3.1.0"
spec.add_development_dependency "pry"
spec.add_development_dependency "yard", "~> 0.8.7.4"
spec.add_development_dependency "webmock", "~> 1.19.0"
spec.add_development_dependency "coveralls"
end
|
Added type definitions for openstack heat templates.
Added top level function Heat for declaring a heat template
Separated type generation out into two classes - now that it sort of works, it needs to be consolidated some - too much copy-paste
TODO:
Need to make sure that the generated heat templates will validate against heat
Need to make sure that cf templates are still valid.
Heat {
Description "Test"
Parameter("One") {
String
Default "Test"
MaxLength 15
}
Parameter('Two') {
String
Default 'Test'
MaxLength 15
}
Output(:One, FnBase64(Ref("One")))
Server("MyInstance") {
flavor "asdfa"
image "asdad"
}
}
|
$LOAD_PATH << File.expand_path('../lib', __FILE__)
require 'stately/version'
Gem::Specification.new do |s|
s.name = 'stately'
s.version = Stately::VERSION
s.authors = ['Ryan Twomey']
s.email = ['rtwomey@gmail.com']
s.homepage = 'http://github.com/rtwomey/stately'
s.summary = 'A simple, elegant state machine for Ruby'
s.description = 'TODO: Description of Stately.'
s.files = `git ls-files`.split('\n')
s.test_files = `git ls-files -- {spec}/*`.split('\n')
s.add_development_dependency 'rspec', '~> 2.0'
s.required_ruby_version = Gem::Requirement.new('>= 1.9.2')
s.require_paths = ['lib']
end
Add gem description
$LOAD_PATH << File.expand_path('../lib', __FILE__)
require 'stately/version'
Gem::Specification.new do |s|
s.name = 'stately'
s.version = Stately::VERSION
s.authors = ['Ryan Twomey']
s.email = ['rtwomey@gmail.com']
s.homepage = 'http://github.com/rtwomey/stately'
s.summary = 'A simple, elegant state machine for Ruby'
s.description = 'Add an elegant state machine to your ruby objects with a simple DSL'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {spec}/*`.split("\n")
s.add_development_dependency 'rspec', '~> 2.0'
s.required_ruby_version = Gem::Requirement.new('>= 1.9.2')
s.require_paths = ['lib']
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{stathat}
s.version = "0.0.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Patrick Crosby"]
s.date = %q{2011-02-08}
s.description = %q{Easily post stats to your StatHat account using this gem. Encapsulates full API.}
s.email = %q{patrick@xblabs.com}
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/stathat.rb",
"stathat.gemspec",
"test/helper.rb",
"test/test_stathat.rb"
]
s.homepage = %q{http://github.com/patrickxb/stathat}
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{gem to access StatHat api}
s.test_files = [
"test/helper.rb",
"test/test_stathat.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<minitest>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<rocco>, [">= 0"])
else
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<rocco>, [">= 0"])
end
else
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<rocco>, [">= 0"])
end
end
updated gemspec
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{stathat}
s.version = "0.0.4"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Patrick Crosby"]
s.date = %q{2012-05-17}
s.description = %q{Easily post stats to your StatHat account using this gem. Encapsulates full API.}
s.email = %q{patrick@xblabs.com}
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/stathat.rb",
"stathat.gemspec",
"test/helper.rb",
"test/test_stathat.rb"
]
s.homepage = %q{http://github.com/patrickxb/stathat}
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.5.0}
s.summary = %q{gem to access StatHat api}
s.test_files = [
"test/helper.rb",
"test/test_stathat.rb"
]
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<minitest>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<rocco>, [">= 0"])
else
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<rocco>, [">= 0"])
end
else
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<rocco>, [">= 0"])
end
end
|
$:.unshift File.expand_path('../lib/', __FILE__)
require 'hss/version'
Gem::Specification.new do |s|
s.name = 'hss'
s.version = HSS::VERSION
s.date = Time.now.strftime("%Y-%m-%d")
s.summary = 'SSH helper'
s.description = 'Regex-based SSH shortcut tool'
s.authors = ['Les Aker']
s.email = 'me@lesaker.org'
s.homepage = 'https://github.com/akerl/hss'
s.license = 'MIT'
s.files = `git ls-files`.split
s.test_files = `git ls-files spec/*`.split
s.executables = ['hss']
s.add_development_dependency 'rubocop', '~> 0.42.0'
s.add_development_dependency 'rake', '~> 11.2.0'
s.add_development_dependency 'codecov', '~> 0.1.1'
s.add_development_dependency 'rspec', '~> 3.5.0'
s.add_development_dependency 'fuubar', '~> 2.1.0'
end
Updated version of fuubar to 2.2.0
$:.unshift File.expand_path('../lib/', __FILE__)
require 'hss/version'
Gem::Specification.new do |s|
s.name = 'hss'
s.version = HSS::VERSION
s.date = Time.now.strftime("%Y-%m-%d")
s.summary = 'SSH helper'
s.description = 'Regex-based SSH shortcut tool'
s.authors = ['Les Aker']
s.email = 'me@lesaker.org'
s.homepage = 'https://github.com/akerl/hss'
s.license = 'MIT'
s.files = `git ls-files`.split
s.test_files = `git ls-files spec/*`.split
s.executables = ['hss']
s.add_development_dependency 'rubocop', '~> 0.42.0'
s.add_development_dependency 'rake', '~> 11.2.0'
s.add_development_dependency 'codecov', '~> 0.1.1'
s.add_development_dependency 'rspec', '~> 3.5.0'
s.add_development_dependency 'fuubar', '~> 2.2.0'
end
|
$:.unshift File.expand_path('../lib/', __FILE__)
require 'hss/version'
Gem::Specification.new do |s|
s.name = 'hss'
s.version = HSS::VERSION
s.date = Time.now.strftime("%Y-%m-%d")
s.summary = 'SSH helper'
s.description = 'Regex-based SSH shortcut tool'
s.authors = ['Les Aker']
s.email = 'me@lesaker.org'
s.homepage = 'https://github.com/akerl/hss'
s.license = 'MIT'
s.files = `git ls-files`.split
s.test_files = `git ls-files spec/*`.split
s.executables = ['hss']
s.add_development_dependency 'rubocop', '~> 0.29.0'
s.add_development_dependency 'rake', '~> 10.4.0'
s.add_development_dependency 'coveralls', '~> 0.7.1'
s.add_development_dependency 'rspec', '~> 3.2.0'
s.add_development_dependency 'fuubar', '~> 2.0.0'
end
Updated version of coveralls to 0.8.0
$:.unshift File.expand_path('../lib/', __FILE__)
require 'hss/version'
Gem::Specification.new do |s|
s.name = 'hss'
s.version = HSS::VERSION
s.date = Time.now.strftime("%Y-%m-%d")
s.summary = 'SSH helper'
s.description = 'Regex-based SSH shortcut tool'
s.authors = ['Les Aker']
s.email = 'me@lesaker.org'
s.homepage = 'https://github.com/akerl/hss'
s.license = 'MIT'
s.files = `git ls-files`.split
s.test_files = `git ls-files spec/*`.split
s.executables = ['hss']
s.add_development_dependency 'rubocop', '~> 0.29.0'
s.add_development_dependency 'rake', '~> 10.4.0'
s.add_development_dependency 'coveralls', '~> 0.8.0'
s.add_development_dependency 'rspec', '~> 3.2.0'
s.add_development_dependency 'fuubar', '~> 2.0.0'
end
|
# DSL bits
module HttpdCookbook
module Helpers
module ModuleInfoDSL
# create big crash hash with other hashes as keys
# {:platform=>"amazon", :httpd_version=>"2.4", :module=>"rev"}=>"mod_revocator",
# {:platform=>"amazon", :httpd_version=>"2.4", :module=>"auth_form"}=>"mod_session",
# {:platform=>"amazon", :httpd_version=>"2.4",:moadule=>"session_dbd"}=>"mod_session"
def modules(options)
options[:are].each do |mod|
key = options[:for].merge(module: mod)
package = options[:found_in_package]
package = package.call(mod) if package.is_a?(Proc)
modules_list[key] = package
end
end
def modules_list
@modules_list ||= {}
end
# dig them out
def find(key)
found_key = modules_list.keys.find { |lock| key.merge(lock) == key }
modules_list[found_key]
end
end
end
end
# Info bits
module HttpdCookbook
module Helpers
class ModuleInfo
extend ModuleInfoDSL
#
# debian packaging for apache 2.2
#
modules for: { platform_family: 'debian', httpd_version: '2.2' },
are: %w(
actions alias asis auth_basic auth_digest authn_alias authn_anon
authn_dbd authn_dbm authn_default authn_file authnz_ldap authz_dbm
authz_default authz_groupfile authz_host authz_owner authz_user
autoindex cache cern_meta cgid cgi charset_lite dav_fs dav_lock dav
dbd deflate dir disk_cache dumpio env expires ext_filter file_cache
filter headers ident imagemap include info ldap log_forensic mem_cache
mime_magic mime negotiation proxy_ajp proxy_balancer proxy_connect
proxy_ftp proxy_http proxy_scgi proxy reqtimeout rewrite setenvif
speling ssl status substitute suexec unique_id userdir usertrack
vhost_alias
),
found_in_package: 'apache2'
modules for: { platform_family: 'debian', httpd_version: '2.2' },
are: %w(
apparmor apreq2 auth_cas auth_kerb auth_memcookie auth_mysql
auth_ntlm_winbind auth_openid auth_pam auth_pgsql auth_plain
auth_pubtkt auth_radius auth_sys_group auth_tkt authn_sasl authn_webid
authn_yubikey authnz_external authz_unixgroup bw dacs defensible dnssd
encoding evasive fcgid fcgid_dbg geoip gnutls jk layout ldap_userdir
ldap_userdir_dbg lisp log_sql log_sql_dbi log_sql_mysql log_sql_ssl
macro mime_xattr modsecurity mono musicindex neko nss ocamlnet parser3
passenger perl2 perl2_dev perl2_doc php5 php5filter proxy_html python
python_doc qos random removeip rivet rivet_doc rpaf ruby ruid2 ruwsgi
ruwsgi_dbg scgi shib2 spamhaus speedycgi suphp upload_progress uwsgi
uwsgi_dbg vhost_hash_alias vhost_ldap wsgi wsgi_py3 xsendfile
),
found_in_package: ->(name) { "libapache2-mod-#{name.tr('_', '-')}" }
#
# debian packaging for apache 2.4
#
modules for: { platform_family: 'debian', httpd_version: '2.4' },
are: %w(
access_compat actions alias allowmethods asis auth_basic
auth_digest auth_form authn_anon authn_core authn_dbd authn_dbm
authn_file authn_socache authnz_ldap authz_core authz_dbd authz_dbm
authz_groupfile authz_host authz_owner authz_user autoindex buffer
cache cache_disk cache_socache cgi cgid charset_lite data dav
dav_fs dav_lock dbd deflate dialup dir dumpio echo env expires
ext_filter file_cache filter headers heartbeat heartmonitor include
info lbmethod_bybusyness lbmethod_byrequests lbmethod_bytraffic
lbmethod_heartbeat ldap log_debug log_forensic macro mime mime_magic
mpm_event mpm_prefork mpm_worker negotiation proxy proxy_ajp
proxy_balancer proxy_connect proxy_express proxy_fcgi proxy_fdpass
proxy_ftp proxy_html proxy_http proxy_scgi proxy_wstunnel ratelimit
reflector remoteip reqtimeout request rewrite sed session
session_cookie session_crypto session_dbd setenvif slotmem_plain
slotmem_shm socache_dbm socache_memcache socache_shmcb speling ssl
status substitute suexec unique_id userdir usertrack vhost_alias
xml2enc
),
found_in_package: 'apache2'
modules for: { platform_family: 'debian', httpd_version: '2.4' },
are: %w(
apparmor auth_mysql auth_pgsql auth_plain perl2 perl2_dev
perl2_doc php5 python python_doc wsgi reload_perl fastcgi
authcassimple_perl authcookie_perl authenntlm_perl apreq2 auth_cas
auth_kerb auth_mellon auth_memcookie auth_ntlm_winbind auth_openid
auth_pubtkt auth_radius auth_tkt authn_sasl authn_webid
authn_yubikey authnz_external authz_unixgroup axis2c bw dacs
defensible dnssd encoding evasive fcgid fcgid_dbg geoip gnutls jk
ldap_userdir ldap_userdir_dbg lisp log_slow log_sql log_sql_dbi
log_sql_mysql log_sql_ssl mapcache mime_xattr mono musicindex neko
netcgi_apache nss parser3 passenger php5filter qos removeip rivet
rivet_doc rpaf ruid2 ruwsgi ruwsgi_dbg scgi security2 shib2
spamhaus suphp svn upload_progress uwsgi uwsgi_dbg vhost_ldap
watchcat webauth webauthldap webkdc wsgi_py3 xsendfile modsecurity
mpm_itk request_perl sitecontrol_perl svn webauth webkdc
),
found_in_package: ->(name) { "libapache2-mod-#{name.tr('_', '-')}" }
#
# rhel-5
#
# shipped in server package
modules for: { platform_family: 'rhel', platform_version: '5', httpd_version: '2.2' },
are: %w(
actions alias asis auth_basic auth_digest authn_alias authn_anon
authn_dbd authn_dbm authn_default authn_file authnz_ldap
authz_dbm authz_default authz_groupfile authz_host authz_owner
authz_user autoindex cache cern_meta cgi cgid dav dav_fs dbd deflate
dir disk_cache dumpio env expires ext_filter file_cache filter
headers ident imagemap include info ldap log_config log_forensic
logio mem_cache mime mime_magic negotiation proxy proxy proxy_ajp
proxy_balancer proxy_connect proxy_ftp proxy_http reqtimeout rewrite
setenvif speling status substitute suexec unique_id userdir
usertrack version vhost_alias
),
found_in_package: 'httpd'
# predictable package naming
modules for: { platform_family: 'rhel', platform_version: '5', httpd_version: '2.2' },
are: %w(
auth_mysql ssl auth_kerb auth_pgsql authz_ldap dav_svn mono nss
perl python revocator
),
found_in_package: ->(name) { "mod_#{name}" }
# outliers
modules for: { platform_family: 'rhel', platform_version: '5', httpd_version: '2.2' },
are: %w(authz_svn),
found_in_package: ->(_name) { 'mod_dav_svn' }
modules for: { platform_family: 'rhel', platform_version: '5', httpd_version: '2.2' },
are: %w(php),
found_in_package: ->(_name) { 'php' }
modules for: { platform_family: 'rhel', platform_version: '5', httpd_version: '2.2' },
are: %w(php-zts),
found_in_package: ->(_name) { 'php-zts' }
#
# rhel-6
#
# shipped in server package
modules for: { platform_family: 'rhel', platform_version: '6', httpd_version: '2.2' },
are: %w(
actions alias asis auth_basic auth_digest authn_alias authn_anon
authn_dbd authn_dbm authn_default authn_file authnz_ldap authz_dbm
authz_default authz_groupfile authz_host authz_owner authz_user
autoindex cache cern_meta cgi cgid dav dav_fs dbd deflate dir
disk_cache dumpio env expires ext_filter filter headers ident
include info ldap log_config log_forensic logio mime mime_magic
negotiation proxy proxy proxy_ajp proxy_balancer proxy_connect
proxy_ftp proxy_http proxy_scgi reqtimeout rewrite setenvif speling
status substitute suexec unique_id userdir usertrack version
vhost_alias
),
found_in_package: 'httpd'
# predictable package naming
modules for: { platform_family: 'rhel', platform_version: '6', httpd_version: '2.2' },
are: %w(
auth_kerb auth_mysql auth_pgsql authz_ldap dav_svn dnssd nss
perl revocator ssl wsgi
),
found_in_package: ->(name) { "mod_#{name}" }
# outliers
modules for: { platform_family: 'rhel', platform_version: '6', httpd_version: '2.2' },
are: %w(authz_svn),
found_in_package: ->(_name) { 'mod_dav_svn' }
# outliers
modules for: { platform_family: 'rhel', platform_version: '6', httpd_version: '2.2' },
are: %w(php),
found_in_package: ->(_name) { 'php' }
modules for: { platform_family: 'rhel', platform_version: '6', httpd_version: '2.2' },
are: %w(php-zts),
found_in_package: ->(_name) { 'php-zts' }
#
# rhel-7
#
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(
access_compat actions alias allowmethods asis auth_basic
auth_digest authn_anon authn_core authn_dbd authn_dbm authn_file
authn_socache authz_core authz_dbd authz_dbm authz_groupfile
authz_host authz_owner authz_user autoindex buffer cache cache_disk
cache_socache cgi cgid charset_lite data dav dav_fs dav_lock dbd
deflate dialup dir dumpio echo env expires ext_filter file_cache
filter headers heartbeat heartmonitor include info
lbmethod_bybusyness lbmethod_byrequests lbmethod_bytraffic
lbmethod_heartbeat log_config log_debug log_forensic logio lua
macro mime mime_magic mpm_event mpm_prefork mpm_worker negotiation
proxy proxy_ajp proxy_balancer proxy_connect proxy_express
proxy_fcgi proxy_fdpass proxy_ftp proxy_http proxy_scgi
proxy_wstunnel ratelimit reflector remoteip reqtimeout request
rewrite sed setenvif slotmem_plain slotmem_shm socache_dbm
socache_memcache socache_shmcb speling status substitute suexec
systemd unique_id unixd userdir usertrack version vhost_alias watchdog
),
found_in_package: 'httpd'
# predictable package naming
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(
auth_kerb dav_svn fcgid ldap nss proxy_html revocator security
session ssl wsgi
),
found_in_package: ->(name) { "mod_#{name}" }
# outliers
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(authz_svn dontdothat),
found_in_package: ->(_name) { 'mod_dav_svn' }
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(authnz_ldap),
found_in_package: ->(_name) { 'mod_ldap' }
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(xml2enc),
found_in_package: ->(_name) { 'mod_proxy_html' }
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(rev),
found_in_package: ->(_name) { 'mod_revocator' }
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(auth_form session_cookie session_crypto session_dbd),
found_in_package: ->(_name) { 'mod_session' }
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(php),
found_in_package: ->(_name) { 'php' }
#
# fedora
#
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(
access_compat actions alias allowmethods asis auth_basic
auth_digest authn_anon authn_core authn_dbd authn_dbm authn_file
authn_socache authz_core authz_dbd authz_dbm authz_groupfile
authz_host authz_owner authz_user autoindex buffer cache cache_disk
cache_socache cgi cgid charset_lite data dav dav_fs dav_lock dbd
deflate dialup dir dumpio echo env expires ext_filter file_cache
filter headers heartbeat heartmonitor include info
lbmethod_bybusyness lbmethod_byrequests lbmethod_bytraffic
lbmethod_heartbeat log_config log_debug log_forensic logio lua
macro mime mime_magic mpm_event mpm_prefork mpm_worker negotiation
proxy proxy_ajp proxy_balancer proxy_connect proxy_express
proxy_fcgi proxy_fdpass proxy_ftp proxy_http proxy_scgi
proxy_wstunnel ratelimit reflector remoteip reqtimeout request
rewrite sed setenvif slotmem_plain slotmem_shm socache_dbm
socache_memcache socache_shmcb speling status substitute suexec
systemd unique_id unixd userdir usertrack version vhost_alias watchdog
),
found_in_package: 'httpd'
# predictable package naming
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(
auth_kerb dav_svn fcgid ldap nss proxy_html revocator security
session ssl wsgi
),
found_in_package: ->(name) { "mod_#{name}" }
# outliers
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(authz_svn dontdothat),
found_in_package: ->(_name) { 'mod_dav_svn' }
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(authnz_ldap),
found_in_package: ->(_name) { 'mod_ldap' }
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(xml2enc),
found_in_package: ->(_name) { 'mod_proxy_html' }
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(rev),
found_in_package: ->(_name) { 'mod_revocator' }
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(auth_form session_cookie session_crypto session_dbd),
found_in_package: ->(_name) { 'mod_session' }
# Yeah I don't get it either
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(php),
found_in_package: ->(_name) { 'php' }
#
# amazon
#
modules for: { platform: 'amazon', httpd_version: '2.2' },
are: %w(
actions alias asis auth_basic auth_digest authn_alias authn_anon
authn_dbd authn_dbm authn_default authn_file authnz_ldap authz_dbm
authz_default authz_groupfile authz_host authz_owner authz_user
autoindex cache cern_meta cgi cgid dav dav_fs dbd deflate dir
disk_cache dumpio env expires ext_filter file_cache filter headers
ident include info ldap log_config log_forensic logio mime
mime_magic negotiation proxy proxy proxy_ajp proxy_balancer
proxy_connect proxy_ftp proxy_http proxy_scgi reqtimeout rewrite
setenvif speling status substitute suexec unique_id userdir
usertrack version vhost_alias
),
found_in_package: 'httpd'
modules for: { platform: 'amazon', httpd_version: '2.2' },
are: %w(
perl-devel auth_kerb auth_mysql auth_pgsql
authz_ldap dav_svn fcgid geoip nss perl proxy_html python security
ssl wsgi
),
found_in_package: ->(name) { "mod_#{name}" }
modules for: { platform: 'amazon', httpd_version: '2.2' },
are: %w(authz_svn),
found_in_package: ->(_name) { 'mod_dav_svn' }
modules for: { platform: 'amazon', httpd_version: '2.4' },
are: %w(
access_compat actions alias allowmethods asis auth_basic
auth_digest authn_anon authn_core authn_dbd authn_dbm authn_file
authn_socache authz_core authz_dbd authz_dbm authz_groupfile
authz_host authz_owner authz_user autoindex buffer cache cache_disk
cache_socache cgi cgid charset_lite data dav dav_fs dav_lock dbd
deflate dialup dir dumpio echo env expires ext_filter file_cache
filter headers heartbeat heartmonitor include info
lbmethod_bybusyness lbmethod_byrequests lbmethod_bytraffic
lbmethod_heartbeat log_config log_debug log_forensic logio lua
macro mime mime_magic mpm_event mpm_prefork mpm_worker negotiation
proxy proxy_ajp proxy_balancer proxy_connect proxy_express
proxy_fcgi proxy_fdpass proxy_ftp proxy_http proxy_scgi
proxy_wstunnel ratelimit reflector remoteip reqtimeout request
rewrite sed setenvif slotmem_plain slotmem_shm socache_dbm
socache_memcache socache_shmcb speling status substitute suexec
unique_id unixd userdir usertrack version vhost_alias watchdog
),
found_in_package: 'httpd24'
modules for: { platform: 'amazon', httpd_version: '2.4' },
are: %w(
auth_kerb fcgid geoip ldap nss perl proxy_html security session
ssl wsgi wsgi_py27
),
found_in_package: ->(name) { "mod24_#{name}" }
modules for: { platform: 'amazon', httpd_version: '2.4' },
are: %w(authz_svn dontdothat),
found_in_package: ->(_name) { 'mod_dav_svn' }
modules for: { platform: 'amazon', httpd_version: '2.4' },
are: %w(authnz_ldap),
found_in_package: ->(_name) { 'mod_ldap' }
modules for: { platform: 'amazon', httpd_version: '2.4' },
are: %w(xml2enc),
found_in_package: ->(_name) { 'mod_proxy_html' }
modules for: { platform: 'amazon', httpd_version: '2.4' },
are: %w(rev),
found_in_package: ->(_name) { 'mod_revocator' }
modules for: { platform: 'amazon', httpd_version: '2.4' },
are: %w(auth_form session_cookie session_crypto session_dbd),
found_in_package: ->(_name) { 'mod_session' }
modules for: { platform: 'amazon', platform_version: '2015.03', httpd_version: '2.4' },
are: %w(php),
found_in_package: ->(_name) { 'php56' }
#
# suse
#
modules for: { platform_family: 'suse', httpd_version: '2.4' },
are: %w(
access_compat actions alias allowmethods asis auth_basic auth_digest
auth_form authn_anon authn_core authn_dbd authn_dbm authn_file
authn_socache authnz_ldap authz_core authz_dbd authz_dbm authz_groupfile
authz_host authz_owner authz_user autoindex bucketeer buffer cache
cache_disk cache_socache case_filter case_filter_in charset_lite
core data dav dav_fs dav_lock dbd deflate dialup dir dumpio echo
env expires ext_filter file_cache filter headers heartmonitor http
imagemap include info lbmethod_bybusyness lbmethod_byrequests
lbmethod_bytraffic lbmethod_heartbeat ldap log_config log_debug
log_forensic logio lua macro mime mime_magic mpm_prefork negotiation
proxy proxy_ajp proxy_balancer proxy_connect proxy_express proxy_fcgi
proxy_fdpass proxy_ftp proxy_html proxy_http proxy_scgi proxy_wstunnel
ratelimit reflector remoteip reqtimeout request rewrite sed session
session_cookie session_crypto session_dbd setenvif slotmem_plain
slotmem_shm so socache_dbm socache_memcache socache_shmcb speling
ssl status substitute suexec systemd unique_id unixd userdir
usertrack version vhost_alias watchdog xml2enc
),
found_in_package: 'apache2'
modules for: { platform_family: 'suse', httpd_version: '2.4' },
are: %w(
apparmor ntlm_winbind authn_otp dnssd evasive fcgid jk mono nss
perl php5 proxy_uwsgi scgi security2 tidy uwsgi wsgi
),
found_in_package: ->(name) { "apache2-mod_#{name}" }
modules for: { platform_family: 'suse', httpd_version: '2.4' },
are: %w( mpm_worker mpm_prefork mpm_event),
found_in_package: ->(name) { "apache2-#{name.gsub('mpm_', '')}" }
end
def platform_version_key(platform, platform_family, platform_version)
return platform_version.to_i.to_s if platform_family == 'rhel' && platform != 'amazon'
return platform_version.to_i.to_s if platform_family == 'debian' && !(platform == 'ubuntu' || platform_version =~ /sid$/)
return platform_version.to_i.to_s if platform_family == 'freebsd'
platform_version
end
def package_name_for_module(name, httpd_version, platform, platform_family, platform_version)
ModuleInfo.find(
module: name,
httpd_version: httpd_version,
platform: platform,
platform_family: platform_family,
platform_version: platform_version_key(platform, platform_family, platform_version)
)
end
end
end
Signed-off-by: Jennifer Davis <iennae@gmail.com>
Updating module hints so that security2 can be used on RHEL 7.2 based on @kareiva's suggested fix.
# DSL bits
module HttpdCookbook
module Helpers
module ModuleInfoDSL
# create big crash hash with other hashes as keys
# {:platform=>"amazon", :httpd_version=>"2.4", :module=>"rev"}=>"mod_revocator",
# {:platform=>"amazon", :httpd_version=>"2.4", :module=>"auth_form"}=>"mod_session",
# {:platform=>"amazon", :httpd_version=>"2.4",:moadule=>"session_dbd"}=>"mod_session"
def modules(options)
options[:are].each do |mod|
key = options[:for].merge(module: mod)
package = options[:found_in_package]
package = package.call(mod) if package.is_a?(Proc)
modules_list[key] = package
end
end
def modules_list
@modules_list ||= {}
end
# dig them out
def find(key)
found_key = modules_list.keys.find { |lock| key.merge(lock) == key }
modules_list[found_key]
end
end
end
end
# Info bits
module HttpdCookbook
module Helpers
class ModuleInfo
extend ModuleInfoDSL
#
# debian packaging for apache 2.2
#
modules for: { platform_family: 'debian', httpd_version: '2.2' },
are: %w(
actions alias asis auth_basic auth_digest authn_alias authn_anon
authn_dbd authn_dbm authn_default authn_file authnz_ldap authz_dbm
authz_default authz_groupfile authz_host authz_owner authz_user
autoindex cache cern_meta cgid cgi charset_lite dav_fs dav_lock dav
dbd deflate dir disk_cache dumpio env expires ext_filter file_cache
filter headers ident imagemap include info ldap log_forensic mem_cache
mime_magic mime negotiation proxy_ajp proxy_balancer proxy_connect
proxy_ftp proxy_http proxy_scgi proxy reqtimeout rewrite setenvif
speling ssl status substitute suexec unique_id userdir usertrack
vhost_alias
),
found_in_package: 'apache2'
modules for: { platform_family: 'debian', httpd_version: '2.2' },
are: %w(
apparmor apreq2 auth_cas auth_kerb auth_memcookie auth_mysql
auth_ntlm_winbind auth_openid auth_pam auth_pgsql auth_plain
auth_pubtkt auth_radius auth_sys_group auth_tkt authn_sasl authn_webid
authn_yubikey authnz_external authz_unixgroup bw dacs defensible dnssd
encoding evasive fcgid fcgid_dbg geoip gnutls jk layout ldap_userdir
ldap_userdir_dbg lisp log_sql log_sql_dbi log_sql_mysql log_sql_ssl
macro mime_xattr modsecurity mono musicindex neko nss ocamlnet parser3
passenger perl2 perl2_dev perl2_doc php5 php5filter proxy_html python
python_doc qos random removeip rivet rivet_doc rpaf ruby ruid2 ruwsgi
ruwsgi_dbg scgi shib2 spamhaus speedycgi suphp upload_progress uwsgi
uwsgi_dbg vhost_hash_alias vhost_ldap wsgi wsgi_py3 xsendfile
),
found_in_package: ->(name) { "libapache2-mod-#{name.tr('_', '-')}" }
#
# debian packaging for apache 2.4
#
modules for: { platform_family: 'debian', httpd_version: '2.4' },
are: %w(
access_compat actions alias allowmethods asis auth_basic
auth_digest auth_form authn_anon authn_core authn_dbd authn_dbm
authn_file authn_socache authnz_ldap authz_core authz_dbd authz_dbm
authz_groupfile authz_host authz_owner authz_user autoindex buffer
cache cache_disk cache_socache cgi cgid charset_lite data dav
dav_fs dav_lock dbd deflate dialup dir dumpio echo env expires
ext_filter file_cache filter headers heartbeat heartmonitor include
info lbmethod_bybusyness lbmethod_byrequests lbmethod_bytraffic
lbmethod_heartbeat ldap log_debug log_forensic macro mime mime_magic
mpm_event mpm_prefork mpm_worker negotiation proxy proxy_ajp
proxy_balancer proxy_connect proxy_express proxy_fcgi proxy_fdpass
proxy_ftp proxy_html proxy_http proxy_scgi proxy_wstunnel ratelimit
reflector remoteip reqtimeout request rewrite sed session
session_cookie session_crypto session_dbd setenvif slotmem_plain
slotmem_shm socache_dbm socache_memcache socache_shmcb speling ssl
status substitute suexec unique_id userdir usertrack vhost_alias
xml2enc
),
found_in_package: 'apache2'
modules for: { platform_family: 'debian', httpd_version: '2.4' },
are: %w(
apparmor auth_mysql auth_pgsql auth_plain perl2 perl2_dev
perl2_doc php5 python python_doc wsgi reload_perl fastcgi
authcassimple_perl authcookie_perl authenntlm_perl apreq2 auth_cas
auth_kerb auth_mellon auth_memcookie auth_ntlm_winbind auth_openid
auth_pubtkt auth_radius auth_tkt authn_sasl authn_webid
authn_yubikey authnz_external authz_unixgroup axis2c bw dacs
defensible dnssd encoding evasive fcgid fcgid_dbg geoip gnutls jk
ldap_userdir ldap_userdir_dbg lisp log_slow log_sql log_sql_dbi
log_sql_mysql log_sql_ssl mapcache mime_xattr mono musicindex neko
netcgi_apache nss parser3 passenger php5filter qos removeip rivet
rivet_doc rpaf ruid2 ruwsgi ruwsgi_dbg scgi security2 shib2
spamhaus suphp svn upload_progress uwsgi uwsgi_dbg vhost_ldap
watchcat webauth webauthldap webkdc wsgi_py3 xsendfile modsecurity
mpm_itk request_perl sitecontrol_perl svn webauth webkdc
),
found_in_package: ->(name) { "libapache2-mod-#{name.tr('_', '-')}" }
#
# rhel-5
#
# shipped in server package
modules for: { platform_family: 'rhel', platform_version: '5', httpd_version: '2.2' },
are: %w(
actions alias asis auth_basic auth_digest authn_alias authn_anon
authn_dbd authn_dbm authn_default authn_file authnz_ldap
authz_dbm authz_default authz_groupfile authz_host authz_owner
authz_user autoindex cache cern_meta cgi cgid dav dav_fs dbd deflate
dir disk_cache dumpio env expires ext_filter file_cache filter
headers ident imagemap include info ldap log_config log_forensic
logio mem_cache mime mime_magic negotiation proxy proxy proxy_ajp
proxy_balancer proxy_connect proxy_ftp proxy_http reqtimeout rewrite
setenvif speling status substitute suexec unique_id userdir
usertrack version vhost_alias
),
found_in_package: 'httpd'
# predictable package naming
modules for: { platform_family: 'rhel', platform_version: '5', httpd_version: '2.2' },
are: %w(
auth_mysql ssl auth_kerb auth_pgsql authz_ldap dav_svn mono nss
perl python revocator
),
found_in_package: ->(name) { "mod_#{name}" }
# outliers
modules for: { platform_family: 'rhel', platform_version: '5', httpd_version: '2.2' },
are: %w(authz_svn),
found_in_package: ->(_name) { 'mod_dav_svn' }
modules for: { platform_family: 'rhel', platform_version: '5', httpd_version: '2.2' },
are: %w(php),
found_in_package: ->(_name) { 'php' }
modules for: { platform_family: 'rhel', platform_version: '5', httpd_version: '2.2' },
are: %w(php-zts),
found_in_package: ->(_name) { 'php-zts' }
#
# rhel-6
#
# shipped in server package
modules for: { platform_family: 'rhel', platform_version: '6', httpd_version: '2.2' },
are: %w(
actions alias asis auth_basic auth_digest authn_alias authn_anon
authn_dbd authn_dbm authn_default authn_file authnz_ldap authz_dbm
authz_default authz_groupfile authz_host authz_owner authz_user
autoindex cache cern_meta cgi cgid dav dav_fs dbd deflate dir
disk_cache dumpio env expires ext_filter filter headers ident
include info ldap log_config log_forensic logio mime mime_magic
negotiation proxy proxy proxy_ajp proxy_balancer proxy_connect
proxy_ftp proxy_http proxy_scgi reqtimeout rewrite setenvif speling
status substitute suexec unique_id userdir usertrack version
vhost_alias
),
found_in_package: 'httpd'
# predictable package naming
modules for: { platform_family: 'rhel', platform_version: '6', httpd_version: '2.2' },
are: %w(
auth_kerb auth_mysql auth_pgsql authz_ldap dav_svn dnssd nss
perl revocator ssl wsgi
),
found_in_package: ->(name) { "mod_#{name}" }
# outliers
modules for: { platform_family: 'rhel', platform_version: '6', httpd_version: '2.2' },
are: %w(authz_svn),
found_in_package: ->(_name) { 'mod_dav_svn' }
# outliers
modules for: { platform_family: 'rhel', platform_version: '6', httpd_version: '2.2' },
are: %w(php),
found_in_package: ->(_name) { 'php' }
modules for: { platform_family: 'rhel', platform_version: '6', httpd_version: '2.2' },
are: %w(php-zts),
found_in_package: ->(_name) { 'php-zts' }
#
# rhel-7
#
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(
access_compat actions alias allowmethods asis auth_basic
auth_digest authn_anon authn_core authn_dbd authn_dbm authn_file
authn_socache authz_core authz_dbd authz_dbm authz_groupfile
authz_host authz_owner authz_user autoindex buffer cache cache_disk
cache_socache cgi cgid charset_lite data dav dav_fs dav_lock dbd
deflate dialup dir dumpio echo env expires ext_filter file_cache
filter headers heartbeat heartmonitor include info
lbmethod_bybusyness lbmethod_byrequests lbmethod_bytraffic
lbmethod_heartbeat log_config log_debug log_forensic logio lua
macro mime mime_magic mpm_event mpm_prefork mpm_worker negotiation
proxy proxy_ajp proxy_balancer proxy_connect proxy_express
proxy_fcgi proxy_fdpass proxy_ftp proxy_http proxy_scgi
proxy_wstunnel ratelimit reflector remoteip reqtimeout request
rewrite sed setenvif slotmem_plain slotmem_shm socache_dbm
socache_memcache socache_shmcb speling status substitute suexec
systemd unique_id unixd userdir usertrack version vhost_alias watchdog
),
found_in_package: 'httpd'
# predictable package naming
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(
auth_kerb dav_svn fcgid ldap nss proxy_html revocator security
session ssl wsgi
),
found_in_package: ->(name) { "mod_#{name}" }
# outliers
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(authz_svn dontdothat),
found_in_package: ->(_name) { 'mod_dav_svn' }
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(authnz_ldap),
found_in_package: ->(_name) { 'mod_ldap' }
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(xml2enc),
found_in_package: ->(_name) { 'mod_proxy_html' }
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(rev),
found_in_package: ->(_name) { 'mod_revocator' }
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(security2),
found_in_package: ->(_name) { 'mod_security' }
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(auth_form session_cookie session_crypto session_dbd),
found_in_package: ->(_name) { 'mod_session' }
modules for: { platform_family: 'rhel', platform_version: '7', httpd_version: '2.4' },
are: %w(php),
found_in_package: ->(_name) { 'php' }
#
# fedora
#
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(
access_compat actions alias allowmethods asis auth_basic
auth_digest authn_anon authn_core authn_dbd authn_dbm authn_file
authn_socache authz_core authz_dbd authz_dbm authz_groupfile
authz_host authz_owner authz_user autoindex buffer cache cache_disk
cache_socache cgi cgid charset_lite data dav dav_fs dav_lock dbd
deflate dialup dir dumpio echo env expires ext_filter file_cache
filter headers heartbeat heartmonitor include info
lbmethod_bybusyness lbmethod_byrequests lbmethod_bytraffic
lbmethod_heartbeat log_config log_debug log_forensic logio lua
macro mime mime_magic mpm_event mpm_prefork mpm_worker negotiation
proxy proxy_ajp proxy_balancer proxy_connect proxy_express
proxy_fcgi proxy_fdpass proxy_ftp proxy_http proxy_scgi
proxy_wstunnel ratelimit reflector remoteip reqtimeout request
rewrite sed setenvif slotmem_plain slotmem_shm socache_dbm
socache_memcache socache_shmcb speling status substitute suexec
systemd unique_id unixd userdir usertrack version vhost_alias watchdog
),
found_in_package: 'httpd'
# predictable package naming
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(
auth_kerb dav_svn fcgid ldap nss proxy_html revocator security
session ssl wsgi
),
found_in_package: ->(name) { "mod_#{name}" }
# outliers
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(authz_svn dontdothat),
found_in_package: ->(_name) { 'mod_dav_svn' }
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(authnz_ldap),
found_in_package: ->(_name) { 'mod_ldap' }
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(xml2enc),
found_in_package: ->(_name) { 'mod_proxy_html' }
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(rev),
found_in_package: ->(_name) { 'mod_revocator' }
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(auth_form session_cookie session_crypto session_dbd),
found_in_package: ->(_name) { 'mod_session' }
# Yeah I don't get it either
modules for: { platform_family: 'fedora', httpd_version: '2.4' },
are: %w(php),
found_in_package: ->(_name) { 'php' }
#
# amazon
#
modules for: { platform: 'amazon', httpd_version: '2.2' },
are: %w(
actions alias asis auth_basic auth_digest authn_alias authn_anon
authn_dbd authn_dbm authn_default authn_file authnz_ldap authz_dbm
authz_default authz_groupfile authz_host authz_owner authz_user
autoindex cache cern_meta cgi cgid dav dav_fs dbd deflate dir
disk_cache dumpio env expires ext_filter file_cache filter headers
ident include info ldap log_config log_forensic logio mime
mime_magic negotiation proxy proxy proxy_ajp proxy_balancer
proxy_connect proxy_ftp proxy_http proxy_scgi reqtimeout rewrite
setenvif speling status substitute suexec unique_id userdir
usertrack version vhost_alias
),
found_in_package: 'httpd'
modules for: { platform: 'amazon', httpd_version: '2.2' },
are: %w(
perl-devel auth_kerb auth_mysql auth_pgsql
authz_ldap dav_svn fcgid geoip nss perl proxy_html python security
ssl wsgi
),
found_in_package: ->(name) { "mod_#{name}" }
modules for: { platform: 'amazon', httpd_version: '2.2' },
are: %w(authz_svn),
found_in_package: ->(_name) { 'mod_dav_svn' }
modules for: { platform: 'amazon', httpd_version: '2.4' },
are: %w(
access_compat actions alias allowmethods asis auth_basic
auth_digest authn_anon authn_core authn_dbd authn_dbm authn_file
authn_socache authz_core authz_dbd authz_dbm authz_groupfile
authz_host authz_owner authz_user autoindex buffer cache cache_disk
cache_socache cgi cgid charset_lite data dav dav_fs dav_lock dbd
deflate dialup dir dumpio echo env expires ext_filter file_cache
filter headers heartbeat heartmonitor include info
lbmethod_bybusyness lbmethod_byrequests lbmethod_bytraffic
lbmethod_heartbeat log_config log_debug log_forensic logio lua
macro mime mime_magic mpm_event mpm_prefork mpm_worker negotiation
proxy proxy_ajp proxy_balancer proxy_connect proxy_express
proxy_fcgi proxy_fdpass proxy_ftp proxy_http proxy_scgi
proxy_wstunnel ratelimit reflector remoteip reqtimeout request
rewrite sed setenvif slotmem_plain slotmem_shm socache_dbm
socache_memcache socache_shmcb speling status substitute suexec
unique_id unixd userdir usertrack version vhost_alias watchdog
),
found_in_package: 'httpd24'
modules for: { platform: 'amazon', httpd_version: '2.4' },
are: %w(
auth_kerb fcgid geoip ldap nss perl proxy_html security session
ssl wsgi wsgi_py27
),
found_in_package: ->(name) { "mod24_#{name}" }
modules for: { platform: 'amazon', httpd_version: '2.4' },
are: %w(authz_svn dontdothat),
found_in_package: ->(_name) { 'mod_dav_svn' }
modules for: { platform: 'amazon', httpd_version: '2.4' },
are: %w(authnz_ldap),
found_in_package: ->(_name) { 'mod_ldap' }
modules for: { platform: 'amazon', httpd_version: '2.4' },
are: %w(xml2enc),
found_in_package: ->(_name) { 'mod_proxy_html' }
modules for: { platform: 'amazon', httpd_version: '2.4' },
are: %w(rev),
found_in_package: ->(_name) { 'mod_revocator' }
modules for: { platform: 'amazon', httpd_version: '2.4' },
are: %w(auth_form session_cookie session_crypto session_dbd),
found_in_package: ->(_name) { 'mod_session' }
modules for: { platform: 'amazon', platform_version: '2015.03', httpd_version: '2.4' },
are: %w(php),
found_in_package: ->(_name) { 'php56' }
#
# suse
#
modules for: { platform_family: 'suse', httpd_version: '2.4' },
are: %w(
access_compat actions alias allowmethods asis auth_basic auth_digest
auth_form authn_anon authn_core authn_dbd authn_dbm authn_file
authn_socache authnz_ldap authz_core authz_dbd authz_dbm authz_groupfile
authz_host authz_owner authz_user autoindex bucketeer buffer cache
cache_disk cache_socache case_filter case_filter_in charset_lite
core data dav dav_fs dav_lock dbd deflate dialup dir dumpio echo
env expires ext_filter file_cache filter headers heartmonitor http
imagemap include info lbmethod_bybusyness lbmethod_byrequests
lbmethod_bytraffic lbmethod_heartbeat ldap log_config log_debug
log_forensic logio lua macro mime mime_magic mpm_prefork negotiation
proxy proxy_ajp proxy_balancer proxy_connect proxy_express proxy_fcgi
proxy_fdpass proxy_ftp proxy_html proxy_http proxy_scgi proxy_wstunnel
ratelimit reflector remoteip reqtimeout request rewrite sed session
session_cookie session_crypto session_dbd setenvif slotmem_plain
slotmem_shm so socache_dbm socache_memcache socache_shmcb speling
ssl status substitute suexec systemd unique_id unixd userdir
usertrack version vhost_alias watchdog xml2enc
),
found_in_package: 'apache2'
modules for: { platform_family: 'suse', httpd_version: '2.4' },
are: %w(
apparmor ntlm_winbind authn_otp dnssd evasive fcgid jk mono nss
perl php5 proxy_uwsgi scgi security2 tidy uwsgi wsgi
),
found_in_package: ->(name) { "apache2-mod_#{name}" }
modules for: { platform_family: 'suse', httpd_version: '2.4' },
are: %w( mpm_worker mpm_prefork mpm_event),
found_in_package: ->(name) { "apache2-#{name.gsub('mpm_', '')}" }
end
def platform_version_key(platform, platform_family, platform_version)
return platform_version.to_i.to_s if platform_family == 'rhel' && platform != 'amazon'
return platform_version.to_i.to_s if platform_family == 'debian' && !(platform == 'ubuntu' || platform_version =~ /sid$/)
return platform_version.to_i.to_s if platform_family == 'freebsd'
platform_version
end
def package_name_for_module(name, httpd_version, platform, platform_family, platform_version)
ModuleInfo.find(
module: name,
httpd_version: httpd_version,
platform: platform,
platform_family: platform_family,
platform_version: platform_version_key(platform, platform_family, platform_version)
)
end
end
end
|
Pod::Spec.new do |s|
s.name = 'NotificationObserverHelper'
s.version = '1.2'
s.ios.deployment_target = '10.0'
s.source = { :git => 'https://github.com/srpoucse/Notification-Observer-Helper.git', :tag => "#{s.version}"}
s.homepage = 'https://github.com/srpoucse/Notification-Observer-Helper.git'
s.summary = 'iOS Notification Observer Helper Utility'
s.license = { :type => 'MIT', :file => 'LICENSE'}
s.authors = { 'RATNA PAUL SAKA' => 'pauljason89442@gmail.com' }
s.source_files = 'NotificationObserverHelper/NotificationObserverHelper.swift'
end
Update swift version to 4.2 in podspec.
Pod::Spec.new do |s|
s.name = 'NotificationObserverHelper'
s.version = '1.2'
s.ios.deployment_target = '10.0'
s.source = { :git => 'https://github.com/srpoucse/Notification-Observer-Helper.git', :tag => "#{s.version}"}
s.homepage = 'https://github.com/srpoucse/Notification-Observer-Helper.git'
s.summary = 'iOS Notification Observer Helper Utility'
s.license = { :type => 'MIT', :file => 'LICENSE'}
s.authors = { 'RATNA PAUL SAKA' => 'pauljason89442@gmail.com' }
s.source_files = 'NotificationObserverHelper/NotificationObserverHelper.swift'
s.swift_version = '4.2'
end |
require "vagrant"
module VagrantPlugins
module Salt
module Errors
class SaltError < Vagrant::Errors::VagrantError
error_namespace("salt")
end
end
end
end
Fix salt provisioner error namespace to be more consistent with other provisioners
require "vagrant"
module VagrantPlugins
module Salt
module Errors
class SaltError < Vagrant::Errors::VagrantError
error_namespace("vagrant.provisioners.salt")
end
end
end
end |
Add monitor to check for the status of a raid array
This monitor will read '/proc/mdstat' and parse it for several values.
[n/m] where n is the total number of devices and m is the number of working devics.
[UUU_UU] where U means the specific component is up and _ means that it is down
It will also check for a recovery status in the case of a failed component
The alerting breaks down as follows
[green]:
n == m
no recovery state
[yellow]
n > m
recovery state
[red]
n > m
no recovery state
array is !active
When defining active using mdstats, active implies that the device is up. It can be in one of
several states such as sync, clean, etc
#! /usr/bin/env ruby
# Exit status codes
EXIT_OK = 0
EXIT_WARNING = 1
EXIT_CRIT = 2
exit_code = EXIT_OK
RAID_INFO = "/Users/mjones/projects/elzar/org_cookbooks/monitor/data.test"
def read_file(raid_info)
a = File.open(raid_info,"r")
data = a.read
a.close
return data
end
raid_status= read_file(RAID_INFO)
matt_test = raid_status.split(/(md[0-9]*)/)
h = Hash.new
n = 0
k = ""
v = ""
matt_test.each do |data|
if n.even? and n != 0
v = data
h.store(k,v)
elsif n.odd?
k = data
end
n = n + 1
end
h.each do |key, value|
raid_state = value.split()[1]
total_dev = value.match(/[0-9]*\/[0-9]*/).to_s[0]
working_dev = value.match(/[0-9]*\/[0-9]*/).to_s[2]
failed_dev = value.match(/\[[U,_]*\]/).to_s.count "_"
recovery_state = value.include? "recovery"
puts recovery_state.inspect
line_out = "#{key} is #{raid_state}
#{total_dev} total devices
#{working_dev} working devices
#{failed_dev} failed devices"
# OPTIMIXE
# this should/can be written as a switch statement
if raid_state == "active" && working_dev >= total_dev && !recovery_state
puts line_out
elsif raid_state == "active" && working_dev < total_dev && recovery_state
puts line_out.concat " \n\t\t *RECOVERING*"
exit_code = EXIT_WARNING if exit_code <= EXIT_WARNING
elsif raid_state == "active" && working_dev < total_dev && !recovery_state
puts line_out.concat "\n\t\t *NOT RECOVERING*"
exit_code = EXIT_CRIT if exit_code <= EXIT_CRIT
elsif raid_state != "active"
puts line_out
exit_code = EXIT_CRIT if exit_code <= EXIT_CRIT
end
end
exit(exit_code)
|
class ImageTagBuilder
attr_accessor :source_height, :source_width
attr_accessor :window_device_pixel_ratio, :window_inner_width
def image_call_width
source_width
end
end
Shameless green first test is passing.
class ImageTagBuilder
attr_accessor :source_height, :source_width
attr_accessor :window_device_pixel_ratio, :window_inner_width
def image_call_width
source_width
end
def image_call_height
source_height
end
def attribute_width
source_width
end
def attribute_height
source_height
end
end
|
# frozen_string_literal: true
require "rack/session/abstract/id"
require "action_controller/metal/exceptions"
require "active_support/security_utils"
require "active_support/core_ext/string/strip"
module ActionController #:nodoc:
class InvalidAuthenticityToken < ActionControllerError #:nodoc:
end
class InvalidCrossOriginRequest < ActionControllerError #:nodoc:
end
# Controller actions are protected from Cross-Site Request Forgery (CSRF) attacks
# by including a token in the rendered HTML for your application. This token is
# stored as a random string in the session, to which an attacker does not have
# access. When a request reaches your application, \Rails verifies the received
# token with the token in the session. All requests are checked except GET requests
# as these should be idempotent. Keep in mind that all session-oriented requests
# should be CSRF protected, including JavaScript and HTML requests.
#
# Since HTML and JavaScript requests are typically made from the browser, we
# need to ensure to verify request authenticity for the web browser. We can
# use session-oriented authentication for these types of requests, by using
# the <tt>protect_from_forgery</tt> method in our controllers.
#
# GET requests are not protected since they don't have side effects like writing
# to the database and don't leak sensitive information. JavaScript requests are
# an exception: a third-party site can use a <script> tag to reference a JavaScript
# URL on your site. When your JavaScript response loads on their site, it executes.
# With carefully crafted JavaScript on their end, sensitive data in your JavaScript
# response may be extracted. To prevent this, only XmlHttpRequest (known as XHR or
# Ajax) requests are allowed to make GET requests for JavaScript responses.
#
# It's important to remember that XML or JSON requests are also affected and if
# you're building an API you should change forgery protection method in
# <tt>ApplicationController</tt> (by default: <tt>:exception</tt>):
#
# class ApplicationController < ActionController::Base
# protect_from_forgery unless: -> { request.format.json? }
# end
#
# CSRF protection is turned on with the <tt>protect_from_forgery</tt> method.
# By default <tt>protect_from_forgery</tt> protects your session with
# <tt>:null_session</tt> method, which provides an empty session
# during request.
#
# We may want to disable CSRF protection for APIs since they are typically
# designed to be state-less. That is, the request API client will handle
# the session for you instead of Rails.
#
# The token parameter is named <tt>authenticity_token</tt> by default. The name and
# value of this token must be added to every layout that renders forms by including
# <tt>csrf_meta_tags</tt> in the HTML +head+.
#
# Learn more about CSRF attacks and securing your application in the
# {Ruby on Rails Security Guide}[http://guides.rubyonrails.org/security.html].
module RequestForgeryProtection
extend ActiveSupport::Concern
include AbstractController::Helpers
include AbstractController::Callbacks
included do
# Sets the token parameter name for RequestForgery. Calling +protect_from_forgery+
# sets it to <tt>:authenticity_token</tt> by default.
config_accessor :request_forgery_protection_token
self.request_forgery_protection_token ||= :authenticity_token
# Holds the class which implements the request forgery protection.
config_accessor :forgery_protection_strategy
self.forgery_protection_strategy = nil
# Controls whether request forgery protection is turned on or not. Turned off by default only in test mode.
config_accessor :allow_forgery_protection
self.allow_forgery_protection = true if allow_forgery_protection.nil?
# Controls whether a CSRF failure logs a warning. On by default.
config_accessor :log_warning_on_csrf_failure
self.log_warning_on_csrf_failure = true
# Controls whether the Origin header is checked in addition to the CSRF token.
config_accessor :forgery_protection_origin_check
self.forgery_protection_origin_check = false
# Controls whether form-action/method specific CSRF tokens are used.
config_accessor :per_form_csrf_tokens
self.per_form_csrf_tokens = false
# Controls whether forgery protection is enabled by default.
config_accessor :default_protect_from_forgery
self.default_protect_from_forgery = false
helper_method :form_authenticity_token
helper_method :protect_against_forgery?
end
module ClassMethods
# Turn on request forgery protection. Bear in mind that GET and HEAD requests are not checked.
#
# class ApplicationController < ActionController::Base
# protect_from_forgery
# end
#
# class FooController < ApplicationController
# protect_from_forgery except: :index
# end
#
# You can disable forgery protection on controller by skipping the verification before_action:
#
# skip_before_action :verify_authenticity_token
#
# Valid Options:
#
# * <tt>:only/:except</tt> - Only apply forgery protection to a subset of actions. For example <tt>only: [ :create, :create_all ]</tt>.
# * <tt>:if/:unless</tt> - Turn off the forgery protection entirely depending on the passed Proc or method reference.
# * <tt>:prepend</tt> - By default, the verification of the authentication token will be added at the position of the
# protect_from_forgery call in your application. This means any callbacks added before are run first. This is useful
# when you want your forgery protection to depend on other callbacks, like authentication methods (Oauth vs Cookie auth).
#
# If you need to add verification to the beginning of the callback chain, use <tt>prepend: true</tt>.
# * <tt>:with</tt> - Set the method to handle unverified request.
#
# Valid unverified request handling methods are:
# * <tt>:exception</tt> - Raises ActionController::InvalidAuthenticityToken exception.
# * <tt>:reset_session</tt> - Resets the session.
# * <tt>:null_session</tt> - Provides an empty session during request but doesn't reset it completely. Used as default if <tt>:with</tt> option is not specified.
def protect_from_forgery(options = {})
options = options.reverse_merge(prepend: false)
self.forgery_protection_strategy = protection_method_class(options[:with] || :null_session)
self.request_forgery_protection_token ||= :authenticity_token
before_action :verify_authenticity_token, options
append_after_action :verify_same_origin_request
end
# Turn off request forgery protection. This is a wrapper for:
#
# skip_before_action :verify_authenticity_token
#
# See +skip_before_action+ for allowed options.
def skip_forgery_protection(options = {})
skip_before_action :verify_authenticity_token, options
end
private
def protection_method_class(name)
ActionController::RequestForgeryProtection::ProtectionMethods.const_get(name.to_s.classify)
rescue NameError
raise ArgumentError, "Invalid request forgery protection method, use :null_session, :exception, or :reset_session"
end
end
module ProtectionMethods
class NullSession
def initialize(controller)
@controller = controller
end
# This is the method that defines the application behavior when a request is found to be unverified.
def handle_unverified_request
request = @controller.request
request.session = NullSessionHash.new(request)
request.flash = nil
request.session_options = { skip: true }
request.cookie_jar = NullCookieJar.build(request, {})
end
private
class NullSessionHash < Rack::Session::Abstract::SessionHash #:nodoc:
def initialize(req)
super(nil, req)
@data = {}
@loaded = true
end
# no-op
def destroy; end
def exists?
true
end
end
class NullCookieJar < ActionDispatch::Cookies::CookieJar #:nodoc:
def write(*)
# nothing
end
end
end
class ResetSession
def initialize(controller)
@controller = controller
end
def handle_unverified_request
@controller.reset_session
end
end
class Exception
def initialize(controller)
@controller = controller
end
def handle_unverified_request
raise ActionController::InvalidAuthenticityToken
end
end
end
private
# The actual before_action that is used to verify the CSRF token.
# Don't override this directly. Provide your own forgery protection
# strategy instead. If you override, you'll disable same-origin
# <tt><script></tt> verification.
#
# Lean on the protect_from_forgery declaration to mark which actions are
# due for same-origin request verification. If protect_from_forgery is
# enabled on an action, this before_action flags its after_action to
# verify that JavaScript responses are for XHR requests, ensuring they
# follow the browser's same-origin policy.
def verify_authenticity_token # :doc:
mark_for_same_origin_verification!
if !verified_request?
if logger && log_warning_on_csrf_failure
if valid_request_origin?
logger.warn "Can't verify CSRF token authenticity."
else
logger.warn "HTTP Origin header (#{request.origin}) didn't match request.base_url (#{request.base_url})"
end
end
handle_unverified_request
end
end
def handle_unverified_request # :doc:
forgery_protection_strategy.new(self).handle_unverified_request
end
#:nodoc:
CROSS_ORIGIN_JAVASCRIPT_WARNING = "Security warning: an embedded " \
"<script> tag on another site requested protected JavaScript. " \
"If you know what you're doing, go ahead and disable forgery " \
"protection on this action to permit cross-origin JavaScript embedding."
private_constant :CROSS_ORIGIN_JAVASCRIPT_WARNING
# :startdoc:
# If +verify_authenticity_token+ was run (indicating that we have
# forgery protection enabled for this request) then also verify that
# we aren't serving an unauthorized cross-origin response.
def verify_same_origin_request # :doc:
if marked_for_same_origin_verification? && non_xhr_javascript_response?
if logger && log_warning_on_csrf_failure
logger.warn CROSS_ORIGIN_JAVASCRIPT_WARNING
end
raise ActionController::InvalidCrossOriginRequest, CROSS_ORIGIN_JAVASCRIPT_WARNING
end
end
# GET requests are checked for cross-origin JavaScript after rendering.
def mark_for_same_origin_verification! # :doc:
@marked_for_same_origin_verification = request.get?
end
# If the +verify_authenticity_token+ before_action ran, verify that
# JavaScript responses are only served to same-origin GET requests.
def marked_for_same_origin_verification? # :doc:
@marked_for_same_origin_verification ||= false
end
# Check for cross-origin JavaScript responses.
def non_xhr_javascript_response? # :doc:
content_type =~ %r(\Atext/javascript) && !request.xhr?
end
AUTHENTICITY_TOKEN_LENGTH = 32
# Returns true or false if a request is verified. Checks:
#
# * Is it a GET or HEAD request? GETs should be safe and idempotent
# * Does the form_authenticity_token match the given token value from the params?
# * Does the X-CSRF-Token header match the form_authenticity_token?
def verified_request? # :doc:
!protect_against_forgery? || request.get? || request.head? ||
(valid_request_origin? && any_authenticity_token_valid?)
end
# Checks if any of the authenticity tokens from the request are valid.
def any_authenticity_token_valid? # :doc:
request_authenticity_tokens.any? do |token|
valid_authenticity_token?(session, token)
end
end
# Possible authenticity tokens sent in the request.
def request_authenticity_tokens # :doc:
[form_authenticity_param, request.x_csrf_token]
end
# Sets the token value for the current session.
def form_authenticity_token(form_options: {})
masked_authenticity_token(session, form_options: form_options)
end
# Creates a masked version of the authenticity token that varies
# on each request. The masking is used to mitigate SSL attacks
# like BREACH.
def masked_authenticity_token(session, form_options: {}) # :doc:
action, method = form_options.values_at(:action, :method)
raw_token = if per_form_csrf_tokens && action && method
action_path = normalize_action_path(action)
per_form_csrf_token(session, action_path, method)
else
real_csrf_token(session)
end
one_time_pad = SecureRandom.random_bytes(AUTHENTICITY_TOKEN_LENGTH)
encrypted_csrf_token = xor_byte_strings(one_time_pad, raw_token)
masked_token = one_time_pad + encrypted_csrf_token
Base64.strict_encode64(masked_token)
end
# Checks the client's masked token to see if it matches the
# session token. Essentially the inverse of
# +masked_authenticity_token+.
def valid_authenticity_token?(session, encoded_masked_token) # :doc:
if encoded_masked_token.nil? || encoded_masked_token.empty? || !encoded_masked_token.is_a?(String)
return false
end
begin
masked_token = Base64.strict_decode64(encoded_masked_token)
rescue ArgumentError # encoded_masked_token is invalid Base64
return false
end
# See if it's actually a masked token or not. In order to
# deploy this code, we should be able to handle any unmasked
# tokens that we've issued without error.
if masked_token.length == AUTHENTICITY_TOKEN_LENGTH
# This is actually an unmasked token. This is expected if
# you have just upgraded to masked tokens, but should stop
# happening shortly after installing this gem.
compare_with_real_token masked_token, session
elsif masked_token.length == AUTHENTICITY_TOKEN_LENGTH * 2
csrf_token = unmask_token(masked_token)
compare_with_real_token(csrf_token, session) ||
valid_per_form_csrf_token?(csrf_token, session)
else
false # Token is malformed.
end
end
def unmask_token(masked_token) # :doc:
# Split the token into the one-time pad and the encrypted
# value and decrypt it.
one_time_pad = masked_token[0...AUTHENTICITY_TOKEN_LENGTH]
encrypted_csrf_token = masked_token[AUTHENTICITY_TOKEN_LENGTH..-1]
xor_byte_strings(one_time_pad, encrypted_csrf_token)
end
def compare_with_real_token(token, session) # :doc:
ActiveSupport::SecurityUtils.fixed_length_secure_compare(token, real_csrf_token(session))
end
def valid_per_form_csrf_token?(token, session) # :doc:
if per_form_csrf_tokens
correct_token = per_form_csrf_token(
session,
normalize_action_path(request.fullpath),
request.request_method
)
ActiveSupport::SecurityUtils.fixed_length_secure_compare(token, correct_token)
else
false
end
end
def real_csrf_token(session) # :doc:
session[:_csrf_token] ||= SecureRandom.base64(AUTHENTICITY_TOKEN_LENGTH)
Base64.strict_decode64(session[:_csrf_token])
end
def per_form_csrf_token(session, action_path, method) # :doc:
OpenSSL::HMAC.digest(
OpenSSL::Digest::SHA256.new,
real_csrf_token(session),
[action_path, method.downcase].join("#")
)
end
def xor_byte_strings(s1, s2) # :doc:
s2_bytes = s2.bytes
s1.each_byte.with_index { |c1, i| s2_bytes[i] ^= c1 }
s2_bytes.pack("C*")
end
# The form's authenticity parameter. Override to provide your own.
def form_authenticity_param # :doc:
params[request_forgery_protection_token]
end
# Checks if the controller allows forgery protection.
def protect_against_forgery? # :doc:
allow_forgery_protection
end
NULL_ORIGIN_MESSAGE = <<-MSG.strip_heredoc
The browser returned a 'null' origin for a request with origin-based forgery protection turned on. This usually
means you have the 'no-referrer' Referrer-Policy header enabled, or that you the request came from a site that
refused to give its origin. This makes it impossible for Rails to verify the source of the requests. Likely the
best solution is to change your referrer policy to something less strict like same-origin or strict-same-origin.
If you cannot change the referrer policy, you can disable origin checking with the
Rails.application.config.action_controller.forgery_protection_origin_check setting.
MSG
# Checks if the request originated from the same origin by looking at the
# Origin header.
def valid_request_origin? # :doc:
if forgery_protection_origin_check
# We accept blank origin headers because some user agents don't send it.
raise InvalidAuthenticityToken, NULL_ORIGIN_MESSAGE if request.origin == "null"
request.origin.nil? || request.origin == request.base_url
else
true
end
end
def normalize_action_path(action_path) # :doc:
uri = URI.parse(action_path)
uri.path.chomp("/")
end
end
end
fixes #27157 CSRF protection documentation
* removed reference to GET requests where it applies also to other HTTP verbs
* updated documentation to try and better explain how CSRF protection
works with XHR, and the potential exposure with CORS
# frozen_string_literal: true
require "rack/session/abstract/id"
require "action_controller/metal/exceptions"
require "active_support/security_utils"
require "active_support/core_ext/string/strip"
module ActionController #:nodoc:
class InvalidAuthenticityToken < ActionControllerError #:nodoc:
end
class InvalidCrossOriginRequest < ActionControllerError #:nodoc:
end
# Controller actions are protected from Cross-Site Request Forgery (CSRF) attacks
# by including a token in the rendered HTML for your application. This token is
# stored as a random string in the session, to which an attacker does not have
# access. When a request reaches your application, \Rails verifies the received
# token with the token in the session. All requests are checked except GET requests
# as these should be idempotent. Keep in mind that all session-oriented requests
# are CSRF protected by default, including JavaScript and HTML requests.
#
# Since HTML and JavaScript requests are typically made from the browser, we
# need to ensure to verify request authenticity for the web browser. We can
# use session-oriented authentication for these types of requests, by using
# the <tt>protect_from_forgery</tt> method in our controllers.
#
# GET requests are not protected since they don't have side effects like writing
# to the database and don't leak sensitive information. JavaScript requests are
# an exception: a third-party site can use a <script> tag to reference a JavaScript
# URL on your site. When your JavaScript response loads on their site, it executes.
# With carefully crafted JavaScript on their end, sensitive data in your JavaScript
# response may be extracted. To prevent this, only XmlHttpRequest (known as XHR or
# Ajax) requests are allowed to make requests for JavaScript responses.
#
# It's important to remember that XML or JSON requests are also checked by default. If
# you're building an API or an SPA you could change forgery protection method in
# <tt>ApplicationController</tt> (by default: <tt>:exception</tt>):
#
# class ApplicationController < ActionController::Base
# protect_from_forgery unless: -> { request.format.json? }
# end
#
# It is generally safe to exclude XHR requests from CSRF protection
# (like the code snippet above does), because XHR requests can only be made from
# the same origin. Note however that any cross-origin third party domain
# allowed via {CORS}[https://en.wikipedia.org/wiki/Cross-origin_resource_sharing]
# will also be able to create XHR requests. Be sure to check your
# CORS whitelist before disabling forgery protection for XHR.
#
# CSRF protection is turned on with the <tt>protect_from_forgery</tt> method.
# By default <tt>protect_from_forgery</tt> protects your session with
# <tt>:null_session</tt> method, which provides an empty session
# during request.
#
# We may want to disable CSRF protection for APIs since they are typically
# designed to be state-less. That is, the request API client will handle
# the session for you instead of Rails.
#
# The token parameter is named <tt>authenticity_token</tt> by default. The name and
# value of this token must be added to every layout that renders forms by including
# <tt>csrf_meta_tags</tt> in the HTML +head+.
#
# Learn more about CSRF attacks and securing your application in the
# {Ruby on Rails Security Guide}[http://guides.rubyonrails.org/security.html].
module RequestForgeryProtection
extend ActiveSupport::Concern
include AbstractController::Helpers
include AbstractController::Callbacks
included do
# Sets the token parameter name for RequestForgery. Calling +protect_from_forgery+
# sets it to <tt>:authenticity_token</tt> by default.
config_accessor :request_forgery_protection_token
self.request_forgery_protection_token ||= :authenticity_token
# Holds the class which implements the request forgery protection.
config_accessor :forgery_protection_strategy
self.forgery_protection_strategy = nil
# Controls whether request forgery protection is turned on or not. Turned off by default only in test mode.
config_accessor :allow_forgery_protection
self.allow_forgery_protection = true if allow_forgery_protection.nil?
# Controls whether a CSRF failure logs a warning. On by default.
config_accessor :log_warning_on_csrf_failure
self.log_warning_on_csrf_failure = true
# Controls whether the Origin header is checked in addition to the CSRF token.
config_accessor :forgery_protection_origin_check
self.forgery_protection_origin_check = false
# Controls whether form-action/method specific CSRF tokens are used.
config_accessor :per_form_csrf_tokens
self.per_form_csrf_tokens = false
# Controls whether forgery protection is enabled by default.
config_accessor :default_protect_from_forgery
self.default_protect_from_forgery = false
helper_method :form_authenticity_token
helper_method :protect_against_forgery?
end
module ClassMethods
# Turn on request forgery protection. Bear in mind that GET and HEAD requests are not checked.
#
# class ApplicationController < ActionController::Base
# protect_from_forgery
# end
#
# class FooController < ApplicationController
# protect_from_forgery except: :index
# end
#
# You can disable forgery protection on controller by skipping the verification before_action:
#
# skip_before_action :verify_authenticity_token
#
# Valid Options:
#
# * <tt>:only/:except</tt> - Only apply forgery protection to a subset of actions. For example <tt>only: [ :create, :create_all ]</tt>.
# * <tt>:if/:unless</tt> - Turn off the forgery protection entirely depending on the passed Proc or method reference.
# * <tt>:prepend</tt> - By default, the verification of the authentication token will be added at the position of the
# protect_from_forgery call in your application. This means any callbacks added before are run first. This is useful
# when you want your forgery protection to depend on other callbacks, like authentication methods (Oauth vs Cookie auth).
#
# If you need to add verification to the beginning of the callback chain, use <tt>prepend: true</tt>.
# * <tt>:with</tt> - Set the method to handle unverified request.
#
# Valid unverified request handling methods are:
# * <tt>:exception</tt> - Raises ActionController::InvalidAuthenticityToken exception.
# * <tt>:reset_session</tt> - Resets the session.
# * <tt>:null_session</tt> - Provides an empty session during request but doesn't reset it completely. Used as default if <tt>:with</tt> option is not specified.
def protect_from_forgery(options = {})
options = options.reverse_merge(prepend: false)
self.forgery_protection_strategy = protection_method_class(options[:with] || :null_session)
self.request_forgery_protection_token ||= :authenticity_token
before_action :verify_authenticity_token, options
append_after_action :verify_same_origin_request
end
# Turn off request forgery protection. This is a wrapper for:
#
# skip_before_action :verify_authenticity_token
#
# See +skip_before_action+ for allowed options.
def skip_forgery_protection(options = {})
skip_before_action :verify_authenticity_token, options
end
private
def protection_method_class(name)
ActionController::RequestForgeryProtection::ProtectionMethods.const_get(name.to_s.classify)
rescue NameError
raise ArgumentError, "Invalid request forgery protection method, use :null_session, :exception, or :reset_session"
end
end
module ProtectionMethods
class NullSession
def initialize(controller)
@controller = controller
end
# This is the method that defines the application behavior when a request is found to be unverified.
def handle_unverified_request
request = @controller.request
request.session = NullSessionHash.new(request)
request.flash = nil
request.session_options = { skip: true }
request.cookie_jar = NullCookieJar.build(request, {})
end
private
class NullSessionHash < Rack::Session::Abstract::SessionHash #:nodoc:
def initialize(req)
super(nil, req)
@data = {}
@loaded = true
end
# no-op
def destroy; end
def exists?
true
end
end
class NullCookieJar < ActionDispatch::Cookies::CookieJar #:nodoc:
def write(*)
# nothing
end
end
end
class ResetSession
def initialize(controller)
@controller = controller
end
def handle_unverified_request
@controller.reset_session
end
end
class Exception
def initialize(controller)
@controller = controller
end
def handle_unverified_request
raise ActionController::InvalidAuthenticityToken
end
end
end
private
# The actual before_action that is used to verify the CSRF token.
# Don't override this directly. Provide your own forgery protection
# strategy instead. If you override, you'll disable same-origin
# <tt><script></tt> verification.
#
# Lean on the protect_from_forgery declaration to mark which actions are
# due for same-origin request verification. If protect_from_forgery is
# enabled on an action, this before_action flags its after_action to
# verify that JavaScript responses are for XHR requests, ensuring they
# follow the browser's same-origin policy.
def verify_authenticity_token # :doc:
mark_for_same_origin_verification!
if !verified_request?
if logger && log_warning_on_csrf_failure
if valid_request_origin?
logger.warn "Can't verify CSRF token authenticity."
else
logger.warn "HTTP Origin header (#{request.origin}) didn't match request.base_url (#{request.base_url})"
end
end
handle_unverified_request
end
end
def handle_unverified_request # :doc:
forgery_protection_strategy.new(self).handle_unverified_request
end
#:nodoc:
CROSS_ORIGIN_JAVASCRIPT_WARNING = "Security warning: an embedded " \
"<script> tag on another site requested protected JavaScript. " \
"If you know what you're doing, go ahead and disable forgery " \
"protection on this action to permit cross-origin JavaScript embedding."
private_constant :CROSS_ORIGIN_JAVASCRIPT_WARNING
# :startdoc:
# If +verify_authenticity_token+ was run (indicating that we have
# forgery protection enabled for this request) then also verify that
# we aren't serving an unauthorized cross-origin response.
def verify_same_origin_request # :doc:
if marked_for_same_origin_verification? && non_xhr_javascript_response?
if logger && log_warning_on_csrf_failure
logger.warn CROSS_ORIGIN_JAVASCRIPT_WARNING
end
raise ActionController::InvalidCrossOriginRequest, CROSS_ORIGIN_JAVASCRIPT_WARNING
end
end
# GET requests are checked for cross-origin JavaScript after rendering.
def mark_for_same_origin_verification! # :doc:
@marked_for_same_origin_verification = request.get?
end
# If the +verify_authenticity_token+ before_action ran, verify that
# JavaScript responses are only served to same-origin GET requests.
def marked_for_same_origin_verification? # :doc:
@marked_for_same_origin_verification ||= false
end
# Check for cross-origin JavaScript responses.
def non_xhr_javascript_response? # :doc:
content_type =~ %r(\Atext/javascript) && !request.xhr?
end
AUTHENTICITY_TOKEN_LENGTH = 32
# Returns true or false if a request is verified. Checks:
#
# * Is it a GET or HEAD request? GETs should be safe and idempotent
# * Does the form_authenticity_token match the given token value from the params?
# * Does the X-CSRF-Token header match the form_authenticity_token?
def verified_request? # :doc:
!protect_against_forgery? || request.get? || request.head? ||
(valid_request_origin? && any_authenticity_token_valid?)
end
# Checks if any of the authenticity tokens from the request are valid.
def any_authenticity_token_valid? # :doc:
request_authenticity_tokens.any? do |token|
valid_authenticity_token?(session, token)
end
end
# Possible authenticity tokens sent in the request.
def request_authenticity_tokens # :doc:
[form_authenticity_param, request.x_csrf_token]
end
# Sets the token value for the current session.
def form_authenticity_token(form_options: {})
masked_authenticity_token(session, form_options: form_options)
end
# Creates a masked version of the authenticity token that varies
# on each request. The masking is used to mitigate SSL attacks
# like BREACH.
def masked_authenticity_token(session, form_options: {}) # :doc:
action, method = form_options.values_at(:action, :method)
raw_token = if per_form_csrf_tokens && action && method
action_path = normalize_action_path(action)
per_form_csrf_token(session, action_path, method)
else
real_csrf_token(session)
end
one_time_pad = SecureRandom.random_bytes(AUTHENTICITY_TOKEN_LENGTH)
encrypted_csrf_token = xor_byte_strings(one_time_pad, raw_token)
masked_token = one_time_pad + encrypted_csrf_token
Base64.strict_encode64(masked_token)
end
# Checks the client's masked token to see if it matches the
# session token. Essentially the inverse of
# +masked_authenticity_token+.
def valid_authenticity_token?(session, encoded_masked_token) # :doc:
if encoded_masked_token.nil? || encoded_masked_token.empty? || !encoded_masked_token.is_a?(String)
return false
end
begin
masked_token = Base64.strict_decode64(encoded_masked_token)
rescue ArgumentError # encoded_masked_token is invalid Base64
return false
end
# See if it's actually a masked token or not. In order to
# deploy this code, we should be able to handle any unmasked
# tokens that we've issued without error.
if masked_token.length == AUTHENTICITY_TOKEN_LENGTH
# This is actually an unmasked token. This is expected if
# you have just upgraded to masked tokens, but should stop
# happening shortly after installing this gem.
compare_with_real_token masked_token, session
elsif masked_token.length == AUTHENTICITY_TOKEN_LENGTH * 2
csrf_token = unmask_token(masked_token)
compare_with_real_token(csrf_token, session) ||
valid_per_form_csrf_token?(csrf_token, session)
else
false # Token is malformed.
end
end
def unmask_token(masked_token) # :doc:
# Split the token into the one-time pad and the encrypted
# value and decrypt it.
one_time_pad = masked_token[0...AUTHENTICITY_TOKEN_LENGTH]
encrypted_csrf_token = masked_token[AUTHENTICITY_TOKEN_LENGTH..-1]
xor_byte_strings(one_time_pad, encrypted_csrf_token)
end
def compare_with_real_token(token, session) # :doc:
ActiveSupport::SecurityUtils.fixed_length_secure_compare(token, real_csrf_token(session))
end
def valid_per_form_csrf_token?(token, session) # :doc:
if per_form_csrf_tokens
correct_token = per_form_csrf_token(
session,
normalize_action_path(request.fullpath),
request.request_method
)
ActiveSupport::SecurityUtils.fixed_length_secure_compare(token, correct_token)
else
false
end
end
def real_csrf_token(session) # :doc:
session[:_csrf_token] ||= SecureRandom.base64(AUTHENTICITY_TOKEN_LENGTH)
Base64.strict_decode64(session[:_csrf_token])
end
def per_form_csrf_token(session, action_path, method) # :doc:
OpenSSL::HMAC.digest(
OpenSSL::Digest::SHA256.new,
real_csrf_token(session),
[action_path, method.downcase].join("#")
)
end
def xor_byte_strings(s1, s2) # :doc:
s2_bytes = s2.bytes
s1.each_byte.with_index { |c1, i| s2_bytes[i] ^= c1 }
s2_bytes.pack("C*")
end
# The form's authenticity parameter. Override to provide your own.
def form_authenticity_param # :doc:
params[request_forgery_protection_token]
end
# Checks if the controller allows forgery protection.
def protect_against_forgery? # :doc:
allow_forgery_protection
end
NULL_ORIGIN_MESSAGE = <<-MSG.strip_heredoc
The browser returned a 'null' origin for a request with origin-based forgery protection turned on. This usually
means you have the 'no-referrer' Referrer-Policy header enabled, or that you the request came from a site that
refused to give its origin. This makes it impossible for Rails to verify the source of the requests. Likely the
best solution is to change your referrer policy to something less strict like same-origin or strict-same-origin.
If you cannot change the referrer policy, you can disable origin checking with the
Rails.application.config.action_controller.forgery_protection_origin_check setting.
MSG
# Checks if the request originated from the same origin by looking at the
# Origin header.
def valid_request_origin? # :doc:
if forgery_protection_origin_check
# We accept blank origin headers because some user agents don't send it.
raise InvalidAuthenticityToken, NULL_ORIGIN_MESSAGE if request.origin == "null"
request.origin.nil? || request.origin == request.base_url
else
true
end
end
def normalize_action_path(action_path) # :doc:
uri = URI.parse(action_path)
uri.path.chomp("/")
end
end
end
|
require 'active_record/connection_adapters/abstract_adapter'
module ActiveRecord
module ConnectionAdapters
class PostgreSQLAdapter < AbstractAdapter
module OID
class Wtf
def type_cast(value)
p :wtf => value
value
end
end
class Identity
def type_cast(value)
value
end
end
class Bytea
def type_cast(value)
PGconn.unescape_bytea value if value
end
end
class Money
def type_cast(value)
# Because money output is formatted according to the locale, there are two
# cases to consider (note the decimal separators):
# (1) $12,345,678.12
# (2) $12.345.678,12
case value
when /^-?\D+[\d,]+\.\d{2}$/ # (1)
value.gsub!(/[^-\d.]/, '')
when /^-?\D+[\d.]+,\d{2}$/ # (2)
value.gsub!(/[^-\d,]/, '').sub!(/,/, '.')
end
ConnectionAdapters::Column.value_to_decimal value
end
end
class Vector
attr_reader :delim, :subtype
# +delim+ corresponds to the `typdelim` column in the pg_types
# table. +subtype+ is derived from the `typelem` column in the
# pg_types table.
def initialize(delim, subtype)
@delim = delim
@subtype = subtype
end
# FIXME: this should probably split on +delim+ and use +subtype+
# to cast the values. Unfortunately, the current Rails behavior
# is to just return the string.
def type_cast(value)
value
end
end
class Integer
def type_cast(value)
value.to_i
end
end
class Boolean
def type_cast(value)
ConnectionAdapters::Column.value_to_boolean value
end
end
class Timestamp
def type_cast(value)
# FIXME: probably we can improve this since we know it is PG
# specific
ConnectionAdapters::Column.string_to_time value
end
end
class Date
def type_cast(value)
# FIXME: probably we can improve this since we know it is PG
# specific
ConnectionAdapters::Column.value_to_date value
end
end
class Time
def type_cast(value)
# FIXME: probably we can improve this since we know it is PG
# specific
ConnectionAdapters::Column.string_to_dummy_time value
end
end
class Float
def type_cast(value)
value.to_f
end
end
TYPE_MAP = {} # :nodoc:
TYPE_MAP[23] = OID::Integer.new # int4
TYPE_MAP[20] = TYPE_MAP[23] # int8
TYPE_MAP[21] = TYPE_MAP[23] # int2
TYPE_MAP[26] = TYPE_MAP[23] # oid
TYPE_MAP[25] = OID::Identity.new # text
TYPE_MAP[19] = TYPE_MAP[25] # name
TYPE_MAP[1043] = TYPE_MAP[25] # varchar
# FIXME: why are we keeping these types as strings?
TYPE_MAP[3614] = TYPE_MAP[25] # tsvector
TYPE_MAP[1186] = TYPE_MAP[25] # interval
TYPE_MAP[650] = TYPE_MAP[25] # cidr
TYPE_MAP[869] = TYPE_MAP[25] # inet
TYPE_MAP[829] = TYPE_MAP[25] # macaddr
TYPE_MAP[1560] = TYPE_MAP[25] # bit
TYPE_MAP[1562] = TYPE_MAP[25] # varbit
# FIXME: I don't think this is correct. We should probably be returning a parsed date,
# but the tests pass with a string returned.
TYPE_MAP[1184] = OID::Identity.new # timestamptz
TYPE_MAP[790] = OID::Money.new # money
TYPE_MAP[17] = OID::Bytea.new # bytea
TYPE_MAP[16] = OID::Boolean.new # bool
TYPE_MAP[700] = OID::Float.new # float4
TYPE_MAP[701] = TYPE_MAP[700] # float8
TYPE_MAP[1114] = OID::Timestamp.new # timestamp
TYPE_MAP[1082] = OID::Date.new # date
TYPE_MAP[1083] = OID::Time.new # time
TYPE_MAP[1009] = OID::Vector.new(',', TYPE_MAP[25]) # _text
TYPE_MAP[1007] = OID::Vector.new(',', TYPE_MAP[23]) # _int4
TYPE_MAP[600] = OID::Vector.new(',', TYPE_MAP[701]) # point
TYPE_MAP[601] = OID::Vector.new(',', TYPE_MAP[600]) # lseg
TYPE_MAP[602] = OID::Identity.new # path
TYPE_MAP[603] = OID::Vector.new(';', TYPE_MAP[600]) # box
TYPE_MAP[604] = OID::Identity.new # polygon
TYPE_MAP[718] = OID::Identity.new # circle
end
end
end
end
return early from typecasting if the value is nil
require 'active_record/connection_adapters/abstract_adapter'
module ActiveRecord
module ConnectionAdapters
class PostgreSQLAdapter < AbstractAdapter
module OID
class Wtf
def type_cast(value)
p :wtf => value
value
end
end
class Identity
def type_cast(value)
value
end
end
class Bytea
def type_cast(value)
PGconn.unescape_bytea value if value
end
end
class Money
def type_cast(value)
return if value.nil?
# Because money output is formatted according to the locale, there are two
# cases to consider (note the decimal separators):
# (1) $12,345,678.12
# (2) $12.345.678,12
case value
when /^-?\D+[\d,]+\.\d{2}$/ # (1)
value.gsub!(/[^-\d.]/, '')
when /^-?\D+[\d.]+,\d{2}$/ # (2)
value.gsub!(/[^-\d,]/, '').sub!(/,/, '.')
end
ConnectionAdapters::Column.value_to_decimal value
end
end
class Vector
attr_reader :delim, :subtype
# +delim+ corresponds to the `typdelim` column in the pg_types
# table. +subtype+ is derived from the `typelem` column in the
# pg_types table.
def initialize(delim, subtype)
@delim = delim
@subtype = subtype
end
# FIXME: this should probably split on +delim+ and use +subtype+
# to cast the values. Unfortunately, the current Rails behavior
# is to just return the string.
def type_cast(value)
value
end
end
class Integer
def type_cast(value)
return if value.nil?
value.to_i rescue value ? 1 : 0
end
end
class Boolean
def type_cast(value)
return if value.nil?
ConnectionAdapters::Column.value_to_boolean value
end
end
class Timestamp
def type_cast(value)
return if value.nil?
# FIXME: probably we can improve this since we know it is PG
# specific
ConnectionAdapters::Column.string_to_time value
end
end
class Date
def type_cast(value)
return if value.nil?
# FIXME: probably we can improve this since we know it is PG
# specific
ConnectionAdapters::Column.value_to_date value
end
end
class Time
def type_cast(value)
return if value.nil?
# FIXME: probably we can improve this since we know it is PG
# specific
ConnectionAdapters::Column.string_to_dummy_time value
end
end
class Float
def type_cast(value)
return if value.nil?
value.to_f
end
end
TYPE_MAP = {} # :nodoc:
TYPE_MAP[23] = OID::Integer.new # int4
TYPE_MAP[20] = TYPE_MAP[23] # int8
TYPE_MAP[21] = TYPE_MAP[23] # int2
TYPE_MAP[26] = TYPE_MAP[23] # oid
TYPE_MAP[25] = OID::Identity.new # text
TYPE_MAP[19] = TYPE_MAP[25] # name
TYPE_MAP[1043] = TYPE_MAP[25] # varchar
# FIXME: why are we keeping these types as strings?
TYPE_MAP[3614] = TYPE_MAP[25] # tsvector
TYPE_MAP[1186] = TYPE_MAP[25] # interval
TYPE_MAP[650] = TYPE_MAP[25] # cidr
TYPE_MAP[869] = TYPE_MAP[25] # inet
TYPE_MAP[829] = TYPE_MAP[25] # macaddr
TYPE_MAP[1560] = TYPE_MAP[25] # bit
TYPE_MAP[1562] = TYPE_MAP[25] # varbit
# FIXME: I don't think this is correct. We should probably be returning a parsed date,
# but the tests pass with a string returned.
TYPE_MAP[1184] = OID::Identity.new # timestamptz
TYPE_MAP[790] = OID::Money.new # money
TYPE_MAP[17] = OID::Bytea.new # bytea
TYPE_MAP[16] = OID::Boolean.new # bool
TYPE_MAP[700] = OID::Float.new # float4
TYPE_MAP[701] = TYPE_MAP[700] # float8
TYPE_MAP[1114] = OID::Timestamp.new # timestamp
TYPE_MAP[1082] = OID::Date.new # date
TYPE_MAP[1083] = OID::Time.new # time
TYPE_MAP[1009] = OID::Vector.new(',', TYPE_MAP[25]) # _text
TYPE_MAP[1007] = OID::Vector.new(',', TYPE_MAP[23]) # _int4
TYPE_MAP[600] = OID::Vector.new(',', TYPE_MAP[701]) # point
TYPE_MAP[601] = OID::Vector.new(',', TYPE_MAP[600]) # lseg
TYPE_MAP[602] = OID::Identity.new # path
TYPE_MAP[603] = OID::Vector.new(';', TYPE_MAP[600]) # box
TYPE_MAP[604] = OID::Identity.new # polygon
TYPE_MAP[718] = OID::Identity.new # circle
end
end
end
end
|
module PickupRandomizer
VILLAGER_NAME_TO_EVENT_FLAG = {
:villagerjacob => 0x2A,
:villagerabram => 0x2D,
:villageraeon => 0x3C,
:villagereugen => 0x38,
:villagermonica => 0x4F,
:villagerlaura => 0x32,
:villagermarcel => 0x40,
:villagerserge => 0x47,
:villageranna => 0x4B,
:villagerdaniela => 0x57,
:villageririna => 0x53,
:villagergeorge => 0x0D,
}
RANDOMIZABLE_VILLAGER_NAMES = VILLAGER_NAME_TO_EVENT_FLAG.keys
PORTRAIT_NAME_TO_DATA = {
:portraitcityofhaze => {subtype: 0x1A, area_index: 1, sector_index: 0, room_index: 0x1A},
:portraitsandygrave => {subtype: 0x1A, area_index: 3, sector_index: 0, room_index: 0},
:portraitnationoffools => {subtype: 0x1A, area_index: 5, sector_index: 0, room_index: 0x21},
:portraitforestofdoom => {subtype: 0x1A, area_index: 7, sector_index: 0, room_index: 0},
:portraitdarkacademy => {subtype: 0x76, area_index: 8, sector_index: 1, room_index: 6},
:portraitburntparadise => {subtype: 0x76, area_index: 6, sector_index: 0, room_index: 0x20},
:portraitforgottencity => {subtype: 0x76, area_index: 4, sector_index: 0, room_index: 0},
:portrait13thstreet => {subtype: 0x76, area_index: 2, sector_index: 0, room_index: 7},
:portraitnestofevil => {subtype: 0x86, area_index: 9, sector_index: 0, room_index: 0},
}
PORTRAIT_NAME_TO_DATA.each do |portrait_name, portrait_data|
portrait_data[:var_a] = portrait_data[:area_index]
portrait_data[:var_b] = ((portrait_data[:sector_index] << 6) & 0x03C0) | (portrait_data[:room_index] & 0x003F)
end
PORTRAIT_NAMES = PORTRAIT_NAME_TO_DATA.keys
AREA_INDEX_TO_PORTRAIT_NAME = PORTRAIT_NAME_TO_DATA.map do |name, data|
[data[:area_index], name]
end.to_h
PORTRAIT_NAME_TO_AREA_INDEX = PORTRAIT_NAME_TO_DATA.map do |name, data|
[name, data[:area_index]]
end.to_h
PORTRAIT_NAME_TO_DEFAULT_ENTITY_LOCATION = {
:portraitcityofhaze => "00-01-00_00",
:portraitsandygrave => "00-04-12_00",
:portraitnationoffools => "00-06-01_00",
:portraitforestofdoom => "00-08-01_02",
:portraitdarkacademy => "00-0B-00_04",
:portraitburntparadise => "00-0B-00_03",
:portraitforgottencity => "00-0B-00_01",
:portrait13thstreet => "00-0B-00_02",
:portraitnestofevil => "00-00-05_00",
}
EARLY_GAME_PORTRAITS = [
:portraitcityofhaze,
:portraitsandygrave,
:portraitnationoffools,
:portraitforestofdoom,
]
LATE_GAME_PORTRAITS = [
:portraitdarkacademy,
:portraitburntparadise,
:portraitforgottencity,
:portrait13thstreet
]
def randomize_pickups_completably(&block)
spoiler_log.puts
spoiler_log.puts "Randomizing pickups:"
case GAME
when "dos"
checker.add_item(0x43) # knife
checker.add_item(0x91) # casual clothes
checker.add_item(0x3D) # seal 1
if options[:unlock_boss_doors]
checker.add_item(0x3E) # seal 2
checker.add_item(0x3F) # seal 3
checker.add_item(0x40) # seal 4
checker.add_item(0x41) # seal 5
end
when "por"
checker.add_item(0x61) # starting vampire killer
checker.add_item(0x6C) # encyclopedia
checker.add_item(0xAA) # casual clothes
# In the corridor where Behemoth chases you, change the code of the platform to not permanently disappear.
# This is so the player can't get stuck if they miss an important item up there.
game.fs.load_overlay(79)
game.fs.write(0x022EC638, [0xEA000003].pack("V"))
# Room in Sandy Grave that has two overlapping Charm Necklaces.
# We don't want these to overlap as the player could easily think it's just one item and not see the one beneath it.
# Move one a bit to the left and the other a bit to the right. Also give one a different pickup flag.
item_a = game.areas[3].sectors[0].rooms[0x13].entities[0]
item_b = game.areas[3].sectors[0].rooms[0x13].entities[1]
item_a.x_pos = 0x120
item_b.x_pos = 0x140
pickup_flag = get_unused_pickup_flag()
item_b.var_a = pickup_flag
use_pickup_flag(pickup_flag)
item_a.write_to_rom()
item_b.write_to_rom()
when "ooe"
checker.add_item(0xE6) # casual clothes
checker.add_item(0x6F) # lizard tail
checker.add_item(0x72) # glyph union
checker.add_item(0x1E) # torpor. the player will get enough of these as it is
# Give the player the glyph sleeve in Ecclesia like in hard mode.
# To do this just get rid of the entity hider that hides it on normal mode.
entity_hider = game.areas[2].sectors[0].rooms[4].entities[6]
entity_hider.type = 0
entity_hider.write_to_rom()
# But we also need to give the chest a unique flag, because it shares the flag with the one from Minera in normal mode.
sleeve_chest = game.areas[2].sectors[0].rooms[4].entities[7]
pickup_flag = get_unused_pickup_flag()
sleeve_chest.var_b = pickup_flag
use_pickup_flag(pickup_flag)
sleeve_chest.write_to_rom()
# We also make sure the chest in Minera appears even on hard mode.
entity_hider = game.areas[8].sectors[2].rooms[7].entities[1]
entity_hider.type = 0
entity_hider.write_to_rom()
checker.add_item(0x73) # glyph sleeve
# Room in the Final Approach that has two overlapping chests both containing diamonds.
# We don't want these to overlap as the player could easily think it's just one item and not see the one beneath it.
# Move one a bit to the left and the other a bit to the right. Also give one a different pickup flag.
chest_a = game.areas[0].sectors[0xA].rooms[0xB].entities[1]
chest_b = game.areas[0].sectors[0xA].rooms[0xB].entities[2]
chest_a.x_pos = 0xE0
chest_b.x_pos = 0x130
pickup_flag = get_unused_pickup_flag()
chest_b.var_b = pickup_flag
use_pickup_flag(pickup_flag)
chest_a.write_to_rom()
chest_b.write_to_rom()
end
@locations_randomized_to_have_useful_pickups = []
@rooms_by_progression_order_accessed = []
@rooms_that_already_have_an_event = []
game.each_room do |room|
room.entities.each do |entity|
if entity.is_special_object? && (0x5F..0x88).include?(entity.subtype)
room_str = "%02X-%02X-%02X" % [room.area_index, room.sector_index, room.room_index]
@rooms_that_already_have_an_event << room_str
break
end
end
end
if @progression_fill_mode == :forward
total_progression_pickups = checker.all_progression_pickups.length
place_progression_pickups_forward_fill() do |progression_pickups_placed|
percent_done = progression_pickups_placed.to_f / total_progression_pickups
yield percent_done
end
elsif @progression_fill_mode == :assumed
place_progression_pickups_assumed_fill()
else
raise "Unknown progression fill mode: #{@progression_fill_mode}"
end
if !checker.game_beatable?
item_names = checker.current_items.map do |global_id|
if global_id.is_a?(Symbol)
global_id
else
checker.defs.invert[global_id]
end
end.compact
raise "Bug: Game is not beatable on this seed!\nThis error shouldn't happen.\nSeed: #{@seed}\n\nItems:\n#{item_names.join(", ")}"
end
if GAME == "por" && options[:randomize_portraits]
# Remove the extra portraits at the end of 13th Street, Forgotten City, Burnt Paradise, and Dark Academy.
# (The one return portrait back to where you entered this portrait from is not removed, and is updated elsewhere in the code.)
[
"02-02-16_01",
"02-02-16_03",
"02-02-16_04",
"04-01-07_02",
"04-01-07_03",
"04-01-07_04",
"06-00-06_01",
"06-00-06_02",
"06-00-06_04",
"08-00-08_01",
"08-00-08_02",
"08-00-08_03",
].each do |entity_str|
portrait = game.entity_by_str(entity_str)
portrait.type = 0
portrait.write_to_rom()
end
end
rescue StandardError => e
#output_map_rando_error_debug_info()
raise e
end
def place_progression_pickups_assumed_fill
verbose = false
# This attribute is modified when adding a villager to a room.
orig_rooms_that_already_have_an_event = @rooms_that_already_have_an_event.dup
# First place things that are not randomized in their normal locations.
nonrandomized_item_locations = get_nonrandomized_item_locations()
orig_current_items = checker.current_items.dup
if room_rando?
orig_return_portraits = checker.return_portraits.dup
end
pickups_available = checker.all_progression_pickups - checker.current_items - nonrandomized_item_locations.values
# Because DoS has two bat transformation souls, which both allow a ton of progression, at least one of them tends to be placed very early.
# So we change it so that only one of the two is available to be randomly placed to reduce the chance of early bat.
# (The remaining second one will be placed non-randomly later.)
if GAME == "dos" && pickups_available.include?(0x104) && pickups_available.include?(0xFC)
bat_to_keep = [0x104, 0xFC].sample(random: rng)
bat_to_remove = (bat_to_keep == 0x104 ? 0xFC : 0x104)
pickups_available.delete(bat_to_remove)
elsif GAME == "dos" && pickups_available.include?(0xFC)
bat_to_keep = 0x104
bat_to_remove = 0xFC
pickups_available.delete(bat_to_remove)
else
bat_to_keep = nil
bat_to_remove = nil
end
if room_rando?
# Temporarily give all progress items and check what locations are available.
# Those are all the valid locations on this seed, excluding rooms, subrooms, and portraits that are unused.
checker.all_progression_pickups.each do |pickup|
next if checker.current_items.include?(pickup)
checker.add_item(pickup)
end
locations_available, _ = checker.get_accessible_locations_and_doors()
checker.restore_current_items(orig_current_items)
else
locations_available = checker.all_locations.keys
# Don't put items in removed portraits.
if GAME == "por" && options[:por_short_mode]
area_indexes_of_removed_portraits = @portraits_to_remove.map do |portrait_name|
PickupRandomizer::PORTRAIT_NAME_TO_AREA_INDEX[portrait_name]
end
locations_available.reject! do |location|
area_indexes_of_removed_portraits.include?(location[0,2].to_i(16))
end
end
end
locations_available -= nonrandomized_item_locations.keys
locations_accessible_at_start = nil
if room_rando?
locations_accessible_at_start, _ = checker.get_accessible_locations_and_doors()
end
# Place pickups in completely random locations, and then check if the resulting seed is beatable.
# Repeat this until a beatable seed is found.
num_failures = 0
while true
@done_item_locations = nonrandomized_item_locations.dup
@rooms_that_already_have_an_event = orig_rooms_that_already_have_an_event.dup
progression_spheres = decide_progression_pickups_for_assumed_fill(
pickups_available,
locations_available,
locations_accessible_at_start: locations_accessible_at_start
)
if progression_spheres != :failure
puts "Total number of assumed fill failures: #{num_failures}"
break
end
num_failures += 1
puts "Assumed fill failure ##{num_failures}" if num_failures % 100 == 0
checker.restore_current_items(orig_current_items)
if room_rando?
checker.restore_return_portraits(orig_return_portraits)
end
end
# Restore this since any villagers we decided on during the previous step haven't actually been placed yet.
@rooms_that_already_have_an_event = orig_rooms_that_already_have_an_event.dup
@progression_spheres = progression_spheres
if bat_to_keep
# If we had to remove one of the two bats from being randomly placed, we now go and place it non-randomly.
# We simply place it in the last possible progression sphere we can find.
# (Which specific location within that sphere is still chosen randomly.)
placed_bat_to_remove = false
@progression_spheres.reverse_each do |sphere|
locations_accessed_in_this_sphere = sphere[:locs]
progress_locations_accessed_in_this_sphere = sphere[:progress_locs]
unused_locs = locations_accessed_in_this_sphere - progress_locations_accessed_in_this_sphere
valid_unused_locs = filter_locations_valid_for_pickup(
unused_locs,
bat_to_remove
)
if valid_unused_locs.any?
location_for_bat_to_remove = valid_unused_locs.sample(random: rng)
@done_item_locations[location_for_bat_to_remove] = bat_to_remove
progress_locations_accessed_in_this_sphere = locations_accessed_in_this_sphere.select do |location|
progress_locations_accessed_in_this_sphere.include?(location) || location == location_for_bat_to_remove
end
sphere[:progress_locs] = progress_locations_accessed_in_this_sphere
placed_bat_to_remove = true
break
end
end
if !placed_bat_to_remove
raise "Couldn't place #{checker.defs.invert[bat_to_remove]}} anywhere"
end
end
# Now actually place the pickups in the locations we decided on, and write to the spoiler log.
already_seen_room_strs = []
sphere_index = 0
@progression_spheres.each do |sphere|
progress_locations_accessed_in_this_sphere = sphere[:progress_locs]
doors_accessed_in_this_sphere = sphere[:doors]
spoiler_str = "#{sphere_index+1}:"
spoiler_log.puts spoiler_str
puts spoiler_str if verbose
progress_locations_accessed_in_this_sphere.each do |location|
@locations_randomized_to_have_useful_pickups << location
next if nonrandomized_item_locations.has_key?(location)
pickup_global_id = @done_item_locations[location]
change_entity_location_to_pickup_global_id(location, pickup_global_id)
spoiler_str = get_item_placement_spoiler_string(location, pickup_global_id)
spoiler_log.puts spoiler_str
puts spoiler_str if verbose
end
if room_rando?
rooms_accessed_in_this_sphere = doors_accessed_in_this_sphere.map{|door_str| door_str[0,8]}
# Remove duplicate rooms caused by accessing a new door in an old room.
rooms_accessed_in_this_sphere -= already_seen_room_strs
@rooms_by_progression_order_accessed << rooms_accessed_in_this_sphere
already_seen_room_strs += rooms_accessed_in_this_sphere
end
sphere_index += 1
end
end
def decide_progression_pickups_for_assumed_fill(pickups_available, locations_available, locations_accessible_at_start: nil)
remaining_progress_items = pickups_available.dup
remaining_locations = locations_available.dup
if GAME == "por" && options[:randomize_starting_room] && options[:randomize_portraits]
starting_portrait_name = AREA_INDEX_TO_PORTRAIT_NAME[@starting_room.area_index]
if starting_portrait_name
starting_portrait_location_in_castle = pick_starting_portrait_location_in_castle()
@done_item_locations[starting_portrait_location_in_castle] = starting_portrait_name
return_portrait = get_primary_return_portrait_for_portrait(starting_portrait_name)
checker.add_return_portrait(return_portrait.room.room_str, starting_portrait_location_in_castle)
remaining_progress_items.delete(starting_portrait_name)
end
end
if room_rando?
# Place the very first item somewhere that is definitely reachable within the first sphere.
# This is for the sake of performance - tons of attempts where there isn't a single item accessible at the start is just a waste of time.
if locations_accessible_at_start.nil?
locations_accessible_at_start, _ = checker.get_accessible_locations_and_doors()
end
possible_first_items = remaining_progress_items.dup
if GAME == "por" && (!room_rando? || !options[:rebalance_enemies_in_room_rando])
# Don't allow putting late game portraits at the very start.
possible_first_items -= LATE_GAME_PORTRAITS
end
possible_first_items.shuffle!(random: rng)
while true
if possible_first_items.empty?
raise "No possible item to place first in assumed fill"
end
possible_first_item = possible_first_items.pop()
possible_locations = filter_locations_valid_for_pickup(locations_accessible_at_start, possible_first_item)
if possible_locations.empty?
next
end
remaining_progress_items.delete(possible_first_item)
location = possible_locations.sample(random: rng)
remaining_locations.delete(location)
@done_item_locations[location] = possible_first_item
if RANDOMIZABLE_VILLAGER_NAMES.include?(possible_first_item)
# Villager
room_str = location[0,8]
@rooms_that_already_have_an_event << room_str
end
break
end
end
remaining_progress_items.each do |pickup_global_id|
possible_locations = filter_locations_valid_for_pickup(remaining_locations, pickup_global_id)
if possible_locations.empty?
raise "No locations to place pickup"
end
location = possible_locations.sample(random: rng)
remaining_locations.delete(location)
@done_item_locations[location] = pickup_global_id
if RANDOMIZABLE_VILLAGER_NAMES.include?(pickup_global_id)
# Villager
room_str = location[0,8]
@rooms_that_already_have_an_event << room_str
end
end
inaccessible_progress_locations = @done_item_locations.keys
accessible_progress_locations = []
accessible_doors = []
progression_spheres = []
while true
if room_rando?
curr_accessible_locations, curr_accessible_doors = checker.get_accessible_locations_and_doors()
locations_accessed_in_this_sphere = curr_accessible_locations
doors_accessed_in_this_sphere = curr_accessible_doors - accessible_doors
else
locations_accessed_in_this_sphere = checker.get_accessible_locations()
end
progress_locations_accessed_in_this_sphere = locations_accessed_in_this_sphere & inaccessible_progress_locations
if progress_locations_accessed_in_this_sphere.empty?
#if room_rando?
# puts "Starting room: #{@starting_room}"
# puts "Num progression spheres at time of failure: #{progression_spheres.size}"
# puts "Num accessible locations at time of failure: #{curr_accessible_locations.size}"
# accesible_progress_locs = (@done_item_locations.keys & curr_accessible_locations)
# puts "Num accessible progress locations at time of failure: #{accesible_progress_locs.size}"
# puts "Total progress locations at time of failure: #{@done_item_locations.keys.size}"
# accessible_area_indexes = curr_accessible_doors.map{|x| x[0,2].to_i(16)}.uniq
# puts "All accessible areas: #{accessible_area_indexes}"
#
# inaccessible_item_locations = (@done_item_locations.keys - accesible_progress_locs)
# puts "Inaccessible item locations:"
# p inaccessible_item_locations
# puts "Inaccessible items:"
# p inaccessible_item_locations.map{|loc| @done_item_locations[loc]}
#else
# puts "Starting room: #{@starting_room}"
# puts "Num progression spheres at time of failure: #{progression_spheres.size}"
# puts "Total progress locations at time of failure: #{@done_item_locations.keys.size}"
# puts "Num accessible progress locations at time of failure: #{accessible_progress_locations.size}"
# puts "Num inaccessible progress locations at time of failure: #{inaccessible_progress_locations.size}"
# puts "Inaccessible locations: #{inaccessible_progress_locations}"
# accessible_area_indexes = (accessible_progress_locations+locations_accessed_in_this_sphere).map{|x| x[0,2].to_i(16)}.uniq
# puts "All accessible areas: #{accessible_area_indexes}"
#end
return :failure
end
pickups_obtained_in_this_sphere = []
progress_locations_accessed_in_this_sphere.each do |location|
pickup_global_id = @done_item_locations[location]
pickups_obtained_in_this_sphere << pickup_global_id
checker.add_item(pickup_global_id)
end
if GAME == "por" && progression_spheres.size == 0 && (!room_rando? || !options[:rebalance_enemies_in_room_rando])
# If portraits are randomized but we can't rebalance enemies, try to avoid placing late game portraits in the early game.
if (pickups_obtained_in_this_sphere & LATE_GAME_PORTRAITS).any?
return :failure
end
end
accessible_progress_locations += progress_locations_accessed_in_this_sphere
inaccessible_progress_locations -= progress_locations_accessed_in_this_sphere
progression_spheres << {
locs: locations_accessed_in_this_sphere,
progress_locs: progress_locations_accessed_in_this_sphere,
doors: doors_accessed_in_this_sphere,
}
if inaccessible_progress_locations.empty?
break
end
end
return progression_spheres
end
def get_nonrandomized_item_locations
nonrandomized_done_item_locations = {}
if !options[:randomize_boss_souls] && ["dos", "ooe"].include?(GAME)
# Vanilla boss souls.
checker.enemy_locations.each do |location|
pickup_global_id = get_entity_skill_drop_by_entity_location(location)
nonrandomized_done_item_locations[location] = pickup_global_id
@locations_randomized_to_have_useful_pickups << location
end
end
if !options[:randomize_villagers] && GAME == "ooe"
# Vanilla villagers.
checker.villager_locations.each do |location|
pickup_global_id = get_villager_name_by_entity_location(location)
nonrandomized_done_item_locations[location] = pickup_global_id
@locations_randomized_to_have_useful_pickups << location
end
end
if !options[:randomize_portraits] && GAME == "por"
# Vanilla portraits.
checker.portrait_locations.each do |location|
# Don't count removed portraits in short mode as portrait locations.
next if @portrait_locations_to_remove.include?(location)
pickup_global_id = get_portrait_name_by_entity_location(location)
nonrandomized_done_item_locations[location] = pickup_global_id
@locations_randomized_to_have_useful_pickups << location
end
end
return nonrandomized_done_item_locations
end
def place_progression_pickups_forward_fill(&block)
previous_accessible_locations = []
progression_pickups_placed = 0
total_progression_pickups = checker.all_progression_pickups.length
on_leftovers = false
if GAME == "por" && options[:randomize_starting_room] && options[:randomize_portraits]
starting_portrait_name = AREA_INDEX_TO_PORTRAIT_NAME[@starting_room.area_index]
if starting_portrait_name
starting_portrait_location_in_castle = pick_starting_portrait_location_in_castle()
change_entity_location_to_pickup_global_id(starting_portrait_location_in_castle, starting_portrait_name)
@locations_randomized_to_have_useful_pickups << starting_portrait_location_in_castle
end
end
verbose = false
# First place progression pickups needed to beat the game.
spoiler_log.puts "Placing main route progression pickups:"
while true
if room_rando?
possible_locations, accessible_doors = checker.get_accessible_locations_and_doors()
accessible_rooms = accessible_doors.map{|door_str| door_str[0,8]}
@rooms_by_progression_order_accessed << accessible_rooms
else
possible_locations = checker.get_accessible_locations()
end
possible_locations -= @locations_randomized_to_have_useful_pickups
puts "Total possible locations: #{possible_locations.size}" if verbose
pickups_by_locations = checker.pickups_by_current_num_locations_they_access()
if starting_portrait_name
# Don't place the starting portrait anywhere, it's already in Dracula's Castle.
pickups_by_locations.delete(starting_portrait_name)
end
if GAME == "por" && options[:randomize_portraits] && (!room_rando? || !options[:rebalance_enemies_in_room_rando])
# If portraits are randomized but we can't rebalance enemies, try to avoid placing late game portraits in the early game.
if progression_pickups_placed < 5
pickups_by_locations_filtered = pickups_by_locations.reject do |pickup, usefulness|
LATE_GAME_PORTRAITS.include?(pickup)
end
if pickups_by_locations_filtered.any?
pickups_by_locations = pickups_by_locations_filtered
end
end
end
pickups_by_usefulness = pickups_by_locations.select{|pickup, num_locations| num_locations > 0}
currently_useless_pickups = pickups_by_locations.select{|pickup, num_locations| num_locations == 0}
puts "Num useless pickups: #{currently_useless_pickups.size}" if verbose
placing_currently_useless_pickup = false
if pickups_by_usefulness.any?
max_usefulness = pickups_by_usefulness.values.max
weights = pickups_by_usefulness.map do |pickup, usefulness|
# Weight less useful pickups as being more likely to be chosen.
weight = max_usefulness - usefulness + 1
weight = Math.sqrt(weight)
if checker.preferences[pickup]
weight *= checker.preferences[pickup]
end
weight
end
ps = weights.map{|w| w.to_f / weights.reduce(:+)}
useful_pickups = pickups_by_usefulness.keys
weighted_useful_pickups = useful_pickups.zip(ps).to_h
pickup_global_id = weighted_useful_pickups.max_by{|_, weight| rng.rand ** (1.0 / weight)}.first
weighted_useful_pickups_names = weighted_useful_pickups.map do |global_id, weight|
"%.2f %s" % [weight, checker.defs.invert[global_id]]
end
#puts "Weighted less useful pickups: [" + weighted_useful_pickups_names.join(", ") + "]"
elsif pickups_by_locations.any? && checker.game_beatable?
# The player can access all locations.
# So we just randomly place one progression pickup.
if !on_leftovers
spoiler_log.puts "Placing leftover progression pickups:"
on_leftovers = true
end
pickup_global_id = pickups_by_locations.keys.sample(random: rng)
elsif pickups_by_locations.any?
# No locations can access new areas, but the game isn't beatable yet.
# This means any new areas will need at least two new items to access.
# So just place a random pickup for now.
valid_pickups = pickups_by_locations.keys
if GAME == "ooe" && options[:randomize_villagers]
valid_villagers = valid_pickups & RANDOMIZABLE_VILLAGER_NAMES
if checker.albus_fight_accessible?
if valid_villagers.any?
# Once Albus is accessible, prioritize placing villagers over other pickups.
valid_pickups = valid_villagers
end
else
# Don't start placing villagers until Albus is accessible.
valid_pickups -= RANDOMIZABLE_VILLAGER_NAMES
end
if valid_pickups.empty?
# But if the only things left to place are villagers, we have no choice but to place them before Albus is accessible.
valid_pickups = pickups_by_locations.keys
end
elsif GAME == "dos" && room_rando? && accessible_rooms.include?("00-06-00")
# Player has access to the Subterranean Hell room with the huge spikes.
# To get through this room you need either rahab and bone ark or rahab, puppet master, and skeleton ape.
# The logic can have trouble placing the items necessary to get through this room, since skeleton ape and bone ark are useless everywhere else, and rahab is only useful in a handful of rooms - so if the player doesn't have access to any places that make rahab useful by itself, the randomizer might just try to place every other item, filling up all available item locations, and never place rahab.
# So we add a special case here to 100% guaranteed place rahab (assuming the player has access to under 15 item locations). From there the randomizer can figure out that it should place bone ark or puppet master and skeleton ape.
if valid_pickups.include?(0x145) && possible_locations.length < 15
valid_pickups = [0x145] # Rahab
end
end
pickup_global_id = valid_pickups.sample(random: rng)
placing_currently_useless_pickup = true
puts "Placing currently useless pickup." if verbose
else
# All progression pickups placed.
break
end
pickup_name = checker.defs.invert[pickup_global_id].to_s
puts "Trying to place #{pickup_name}" if verbose
if !options[:randomize_boss_souls]
# If randomize boss souls option is off, don't allow putting random things in these locations.
accessible_unused_boss_locations = possible_locations & checker.enemy_locations
accessible_unused_boss_locations.each do |location|
possible_locations.delete(location)
@locations_randomized_to_have_useful_pickups << location
# Also, give the player what this boss drops so the checker takes this into account.
pickup_global_id = get_entity_skill_drop_by_entity_location(location)
checker.add_item(pickup_global_id) unless pickup_global_id.nil?
end
next if accessible_unused_boss_locations.length > 0
end
if !options[:randomize_villagers] && GAME == "ooe"
# If randomize villagers option is off, don't allow putting random things in these locations.
accessible_unused_villager_locations = possible_locations & checker.villager_locations
accessible_unused_villager_locations.each do |location|
possible_locations.delete(location)
@locations_randomized_to_have_useful_pickups << location
# Also, give the player this villager so the checker takes this into account.
villager_name = get_villager_name_by_entity_location(location)
checker.add_item(villager_name)
end
next if accessible_unused_villager_locations.length > 0
end
if !options[:randomize_portraits] && GAME == "por"
# If randomize portraits option is off, don't allow putting random things in these locations.
accessible_unused_portrait_locations = possible_locations & checker.portrait_locations
accessible_unused_portrait_locations -= @portrait_locations_to_remove # Don't count removed portraits in short mode as portrait locations.
accessible_unused_portrait_locations.each do |location|
possible_locations.delete(location)
@locations_randomized_to_have_useful_pickups << location
# Also, give the player this portrait so the checker takes this into account.
portrait_name = get_portrait_name_by_entity_location(location)
checker.add_item(portrait_name)
end
next if accessible_unused_portrait_locations.length > 0
end
new_possible_locations = possible_locations - previous_accessible_locations.flatten
filtered_new_possible_locations = filter_locations_valid_for_pickup(new_possible_locations, pickup_global_id)
puts "Filtered new possible locations: #{filtered_new_possible_locations.size}" if verbose
puts " " + filtered_new_possible_locations.join(", ") if verbose
valid_previous_accessible_regions = previous_accessible_locations.map do |previous_accessible_region|
possible_locations = previous_accessible_region.dup
possible_locations -= @locations_randomized_to_have_useful_pickups
possible_locations = filter_locations_valid_for_pickup(possible_locations, pickup_global_id)
possible_locations = nil if possible_locations.empty?
possible_locations
end.compact
possible_locations_to_choose_from = filtered_new_possible_locations.dup
if placing_currently_useless_pickup
# Place items that don't immediately open up new areas anywhere in the game, with no weighting towards later areas.
valid_accessible_locations = previous_accessible_locations.map do |previous_accessible_region|
possible_locations = previous_accessible_region.dup
possible_locations -= @locations_randomized_to_have_useful_pickups
possible_locations = filter_locations_valid_for_pickup(possible_locations, pickup_global_id)
possible_locations = nil if possible_locations.empty?
possible_locations
end.compact.flatten
valid_accessible_locations += filtered_new_possible_locations
possible_locations_to_choose_from = valid_accessible_locations
elsif filtered_new_possible_locations.empty? && valid_previous_accessible_regions.any?
# No new locations, so select an old location.
if on_leftovers
# Just placing a leftover progression pickup.
# Weighted to be more likely to select locations you got access to later rather than earlier.
i = 1
weights = valid_previous_accessible_regions.map do |region|
# Weight later accessible regions as more likely than earlier accessible regions (exponential)
weight = i**2
i += 1
weight
end
ps = weights.map{|w| w.to_f / weights.reduce(:+)}
weighted_accessible_regions = valid_previous_accessible_regions.zip(ps).to_h
previous_accessible_region = weighted_accessible_regions.max_by{|_, weight| rng.rand ** (1.0 / weight)}.first
possible_locations_to_choose_from = previous_accessible_region
else
# Placing a main route progression pickup, just not one that immediately opens up new areas.
# Always place in the most recent accessible region.
possible_locations_to_choose_from = valid_previous_accessible_regions.last
puts "No new locations, using previous accessible location, total available: #{valid_previous_accessible_regions.last.size}" if verbose
end
elsif filtered_new_possible_locations.empty? && valid_previous_accessible_regions.empty?
# No new locations, but there's no old locations either.
if @locations_randomized_to_have_useful_pickups.size < 2
# If we're still very early in placing items yet there's no accessible spots, then the room/map randomizer must have resulted in a bad start.
# So we place the this progression item in the starting room.
entity = @starting_room.add_new_entity()
entity.x_pos = @starting_x_pos
entity.y_pos = @starting_y_pos
@coll = RoomCollision.new(@starting_room, game.fs)
floor_y = coll.get_floor_y(entity, allow_jumpthrough: true)
entity.y_pos = floor_y - 0x18
location = "#{@starting_room.room_str}_%02X" % (@starting_room.entities.length-1)
possible_locations_to_choose_from = [location]
else
possible_locations_to_choose_from = []
end
elsif filtered_new_possible_locations.size <= 5 && valid_previous_accessible_regions.last && valid_previous_accessible_regions.last.size >= 15
# There aren't many new locations unlocked by the last item we placed.
# But there are a lot of other locations unlocked by the one we placed before that.
# So we give it a chance to put it in one of those last spots, instead of the new spots.
# The chance is proportional to how few new locations there are. 1 = 70%, 2 = 60%, 3 = 50%, 4 = 40%, 5 = 30%.
chance = 0.30 + (5-filtered_new_possible_locations.size)*10
if rng.rand() <= chance
possible_locations_to_choose_from = valid_previous_accessible_regions.last
puts "Not many new locations, using previous accessible location, total available: #{valid_previous_accessible_regions.last.size}" if verbose
end
end
previous_accessible_locations << new_possible_locations
if possible_locations_to_choose_from.empty?
item_names = checker.current_items.map do |global_id|
checker.defs.invert[global_id]
end.compact
raise "Bug: Failed to find any spots to place pickup.\nSeed: #{@seed}\n\nItems:\n#{item_names.join(", ")}"
end
#puts "Possible locations: #{possible_locations_to_choose_from.join(", ")}" if verbose
location = possible_locations_to_choose_from.sample(random: rng)
@locations_randomized_to_have_useful_pickups << location
spoiler_str = get_item_placement_spoiler_string(location, pickup_global_id)
spoiler_log.puts spoiler_str
puts spoiler_str if verbose
change_entity_location_to_pickup_global_id(location, pickup_global_id)
checker.add_item(pickup_global_id)
progression_pickups_placed += 1
yield(progression_pickups_placed)
end
if room_rando? && false
File.open("accessible_doors.txt", "w") do |f|
accessible_doors.each do |accessible_door|
f.puts accessible_door
end
end
end
spoiler_log.puts "All progression pickups placed successfully."
end
def pick_starting_portrait_location_in_castle
if GAME != "por" || !options[:randomize_starting_room] || !options[:randomize_portraits]
raise "Cannot choose random location for starting portrait with these settings"
end
starting_portrait_name = AREA_INDEX_TO_PORTRAIT_NAME[@starting_room.area_index]
if starting_portrait_name.nil?
raise "Starting area is not in a portrait"
end
# The starting room randomizer started the player in a portrait.
# This is problematic because the portrait randomizer will traditionally never place a portrait back to Dracula's castle, making it inaccessible.
# So we need to place the starting portrait at a random location in Dracula's Castle and register it with the logic.
# First pick a random valid location.
possible_portrait_locations = checker.all_locations.keys
possible_portrait_locations = filter_locations_valid_for_pickup(possible_portrait_locations, starting_portrait_name)
unused_room_strs = @unused_rooms.map{|room| room.room_str}
possible_portrait_locations.reject! do |location|
room_str = location[0,8]
unused_room_strs.include?(room_str)
end
possible_portrait_locations.select! do |location|
area_index = location[0,2].to_i(16)
area_index == 0
end
starting_portrait_location_in_castle = possible_portrait_locations.sample(random: rng)
return starting_portrait_location_in_castle
end
def get_primary_return_portrait_for_portrait(portrait_name)
portrait_data = PORTRAIT_NAME_TO_DATA[portrait_name]
dest_area_index = portrait_data[:area_index]
dest_sector_index = portrait_data[:sector_index]
dest_room_index = portrait_data[:room_index]
dest_room = game.areas[dest_area_index].sectors[dest_sector_index].rooms[dest_room_index]
return_portrait = dest_room.entities.find do |entity|
entity.is_special_object? && [0x1A, 0x76, 0x86, 0x87].include?(entity.subtype)
end
return return_portrait
end
def get_item_placement_spoiler_string(location, pickup_global_id)
if RANDOMIZABLE_VILLAGER_NAMES.include?(pickup_global_id)
# Villager
# Remove the word villager and capitalize the name.
pickup_str = pickup_global_id[8..-1].capitalize
elsif PORTRAIT_NAMES.include?(pickup_global_id)
# Portrait
# Remove the word portrait and capitalize the name.
pickup_str = pickup_global_id[8..-1].capitalize
else
pickup_str = checker.defs.invert[pickup_global_id].to_s
pickup_str = pickup_str.tr("_", " ").split.map(&:capitalize).join(" ")
end
location =~ /^(\h\h)-(\h\h)-(\h\h)_(\h+)$/
area_index, sector_index, room_index, entity_index = $1.to_i(16), $2.to_i(16), $3.to_i(16), $4.to_i(16)
if SECTOR_INDEX_TO_SECTOR_NAME[area_index]
area_name = SECTOR_INDEX_TO_SECTOR_NAME[area_index][sector_index]
if area_name == "Condemned Tower & Mine of Judgment"
if MapRandomizer::CONDEMNED_TOWER_ROOM_INDEXES.include?(room_index)
area_name = "Condemned Tower"
else
area_name = "Mine of Judgment"
end
end
else
area_name = AREA_INDEX_TO_AREA_NAME[area_index]
end
is_enemy_str = checker.enemy_locations.include?(location) ? " (Boss)" : ""
is_event_str = checker.event_locations.include?(location) ? " (Event)" : ""
is_easter_egg_str = checker.easter_egg_locations.include?(location) ? " (Easter Egg)" : ""
is_hidden_str = checker.hidden_locations.include?(location) ? " (Hidden)" : ""
is_mirror_str = checker.mirror_locations.include?(location) ? " (Mirror)" : ""
location_str = "#{area_name} (#{location})#{is_enemy_str}#{is_event_str}#{is_easter_egg_str}#{is_hidden_str}#{is_mirror_str}"
spoiler_str = " %-18s %s" % [pickup_str+":", location_str]
return spoiler_str
end
def output_map_rando_error_debug_info
return unless options[:randomize_maps]
# When debugging logic errors in map rando, output a list of what room strings were accessible at the end.
File.open("./logs/accessed rooms debug #{GAME} #{seed}.txt", "w") do |f|
for room_str in @rooms_by_progression_order_accessed.flatten.uniq
f.puts(room_str)
end
end
# And also output an image of the map with accessible rooms highlighted in red.
unique_rooms_accessed = @rooms_by_progression_order_accessed.flatten.uniq
game.areas.each_index do |area_index|
map = game.get_map(area_index, 0)
for tile in map.tiles
if tile.sector_index.nil? || tile.room_index.nil?
next
end
room_str_for_tile = "%02X-%02X-%02X" % [area_index, tile.sector_index, tile.room_index]
if unique_rooms_accessed.include?(room_str_for_tile)
tile.is_save = true
tile.is_warp = false
tile.is_entrance = false
else
tile.is_save = false
tile.is_warp = false
tile.is_entrance = false
end
end
hardcoded_transition_rooms = (GAME == "dos" ? @transition_rooms : [])
filename = "./logs/map debug #{GAME} area %02X #{seed}.png" % area_index
renderer.render_map(map, scale=3, hardcoded_transition_rooms=hardcoded_transition_rooms).save(filename)
end
end
def place_non_progression_pickups
remaining_locations = checker.get_accessible_locations() - @locations_randomized_to_have_useful_pickups
remaining_locations.shuffle!(random: rng)
# In room rando, some items may be unreachable.
# We don't want the player to see these items in a different subroom and think the randomizer is bugged, so we delete them.
inaccessible_remaining_locations = checker.all_locations.keys - @locations_randomized_to_have_useful_pickups - remaining_locations
remove_inaccessible_items(inaccessible_remaining_locations)
if GAME == "ooe"
# Do event glyphs first. This is so they don't reuse a glyph already used by a glyph statue.
# If the player got the one from the glyph statue first then the one in the event/puzzle wouldn't appear, breaking the event/puzzle.
ooe_event_glyph_locations = remaining_locations.select{|location| checker.event_locations.include?(location)}
ooe_event_glyph_locations.each do |location|
pickup_global_id = get_unplaced_non_progression_skill()
change_entity_location_to_pickup_global_id(location, pickup_global_id)
end
remaining_locations -= ooe_event_glyph_locations
end
chaos_ring_placed = false
remaining_locations.each_with_index do |location, i|
if checker.enemy_locations.include?(location)
# Boss
pickup_global_id = get_unplaced_non_progression_skill()
elsif ["dos", "por"].include?(GAME) && (checker.event_locations.include?(location) || checker.easter_egg_locations.include?(location))
# Event item
pickup_global_id = get_unplaced_non_progression_item()
elsif GAME == "ooe" && location == "08-02-06_01"
# Tin man's strength ring blue chest. Can't be a glyph.
pickup_global_id = get_unplaced_non_progression_item_that_can_be_an_arm_shifted_immediate()
elsif GAME == "dos" && checker.mirror_locations.include?(location)
# Soul candles shouldn't be placed in mirrors, as they will appear even outside the mirror.
pickup_global_id = get_unplaced_non_progression_item()
elsif GAME == "dos" && !chaos_ring_placed
pickup_global_id = 0xCD
chaos_ring_placed = true
elsif GAME == "por" && !chaos_ring_placed
pickup_global_id = 0x12C
chaos_ring_placed = true
else
# Pickup
# Select the type of pickup weighed by difficulty options.
weights = {
money: @difficulty_settings[:money_placement_weight],
item: @difficulty_settings[:item_placement_weight],
}
if GAME == "por" || GAME == "ooe"
weights[:max_up] = @difficulty_settings[:max_up_placement_weight]
end
case GAME
when "dos"
weights[:skill] = @difficulty_settings[:soul_candle_placement_weight]
when "por"
weights[:skill] = @difficulty_settings[:por_skill_placement_weight]
when "ooe"
weights[:skill] = @difficulty_settings[:glyph_placement_weight]
end
weighted_pickup_types = {}
weights_sum = weights.values.reduce(:+)
weights.each do |type, weight|
weighted_pickup_types[type] = weight.to_f / weights_sum
end
random_pickup_type = weighted_pickup_types.max_by{|_, weight| rng.rand ** (1.0 / weight)}.first
case random_pickup_type
when :money
pickup_global_id = :money
when :max_up
pickup_global_id = @max_up_items.sample(random: rng)
when :skill
pickup_global_id = get_unplaced_non_progression_skill()
when :item
if checker.hidden_locations.include?(location)
# Don't let relics be inside breakable walls in OoE.
# This is because they need to be inside a chest, and chests can't be hidden.
pickup_global_id = get_unplaced_non_progression_item_except_ooe_relics()
else
pickup_global_id = get_unplaced_non_progression_item()
end
end
end
if all_non_progression_pickups.include?(pickup_global_id)
@used_non_progression_pickups << pickup_global_id
end
change_entity_location_to_pickup_global_id(location, pickup_global_id)
end
end
def initialize_all_non_progression_pickups
if !@all_non_progression_pickups.nil?
raise "all_non_progression_pickups was initialized too early."
end
@all_non_progression_pickups = begin
all_non_progression_pickups = PICKUP_GLOBAL_ID_RANGE.to_a - checker.all_progression_pickups
all_non_progression_pickups -= NONRANDOMIZABLE_PICKUP_GLOBAL_IDS
all_non_progression_pickups -= @max_up_items
if needs_infinite_magical_tickets?
all_non_progression_pickups -= [MAGICAL_TICKET_GLOBAL_ID]
end
all_non_progression_pickups
end
end
def filter_locations_valid_for_pickup(locations, pickup_global_id)
locations = locations.dup
if ITEM_GLOBAL_ID_RANGE.include?(pickup_global_id)
# If the pickup is an item instead of a skill, don't let bosses drop it.
locations -= checker.enemy_locations
end
# Don't let progression items be in certain problematic locations. (This function is only called for progression items.)
locations -= checker.no_progression_locations
if GAME == "dos" && SKILL_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Don't let events give you souls in DoS.
locations -= checker.event_locations
locations -= checker.easter_egg_locations
# Don't let soul candles be inside mirrors. They don't get hidden, and are accessible without Paranoia.
locations -= checker.mirror_locations
# Don't let soul candles be inside specific locations that can be broken without reaching them.
locations -= checker.no_soul_locations
end
if GAME == "dos" && MAGIC_SEAL_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Magic seals can't be given by easter egg locations.
locations -= checker.easter_egg_locations
end
if GAME == "ooe" && ITEM_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Don't let events give you items in OoE.
locations -= checker.event_locations
end
if GAME == "ooe" && !ITEM_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Glyphs/villagers can't be in the special blue chest spawned by the searchlights when you kill a Tin Man.
locations -= ["08-02-06_01"]
end
if GAME == "ooe" && (!pickup_global_id.is_a?(Integer) || !game.fs.check_integer_can_be_an_arm_shifted_immediate?(pickup_global_id))
# The pickup ID is a hardcoded arm shifted immediate for the special blue chest spawned by the searchlights when you kill a Tin Man.
locations -= ["08-02-06_01"]
end
if GAME == "ooe" && (0x6F..0x74).include?(pickup_global_id)
# Don't let relics be inside breakable walls in OoE.
# This is because they need to be inside a chest, and chests can't be hidden.
locations -= checker.hidden_locations
end
if RANDOMIZABLE_VILLAGER_NAMES.include?(pickup_global_id)
# Villagers can't be hidden, an event glyph, or a boss drop.
locations -= checker.hidden_locations
locations -= checker.event_locations
locations -= checker.enemy_locations
# Villagers can't appear in Dracula's Castle since the castle can't be unlocked until you have all villagers.
locations.reject! do |location|
area_index = location[0,2].to_i(16)
area_index == 0
end
# Locations too close to the top of the room shouldn't be villagers, as the Torpor glyph would spawn above the screen and not be absorbable.
locations_too_high_to_be_a_villager = ["00-05-07_01", "00-05-07_02", "00-05-08_02", "00-05-08_03", "00-05-0C_01", "00-06-09_00", "0D-00-04_00", "0D-00-0C_00"]
locations -= locations_too_high_to_be_a_villager
# Two villagers shouldn't be placed in the same room, or their events will conflict and not work correctly.
locations.reject! do |location|
room_str = location[0,8]
@rooms_that_already_have_an_event.include?(room_str)
end
end
if PORTRAIT_NAMES.include?(pickup_global_id)
bad_portrait_locations = [
"05-02-0C_01", # Legion's room. If a portrait gets placed here the player won't be able to activate Legion because using a portrait doesn't set the pickup flag Legion checks.
"05-01-13_00", # This location overlaps a ring of flaming skulls that would damage the player on return.
"06-01-0D_02", # This location overlaps a ring of flaming skulls that would damage the player on return.
"03-00-12_00", # Enemies overlap this location.
"04-00-12_00", # Enemies overlap this location.
]
locations.select! do |location|
!bad_portrait_locations.include?(location)
end
if !room_rando? && pickup_global_id != :portraitnestofevil
# This is the location where Nest of Evil was in vanilla.
# If room rando is off, you need to do the quest with the map percentages to unlock this location.
# That quest requires you to be able to access the other 8 portraits, so we can't allow any of them to be placed here.
locations -= ["00-00-05_00"]
end
end
if GAME == "ooe" && SKILL_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Don't put progression glyph in certain locations where the player could easily get them early.
locations -= checker.no_glyph_locations
end
locations
end
def get_unplaced_non_progression_pickup(valid_ids: PICKUP_GLOBAL_ID_RANGE.to_a)
valid_possible_items = @unplaced_non_progression_pickups.select do |pickup_global_id|
valid_ids.include?(pickup_global_id)
end
pickup_global_id = valid_possible_items.sample(random: rng)
if pickup_global_id.nil?
# Ran out of unplaced pickups, so place a duplicate instead.
@unplaced_non_progression_pickups += all_non_progression_pickups().select do |pickup_global_id|
valid_ids.include?(pickup_global_id)
end
@unplaced_non_progression_pickups -= checker.current_items
# If a glyph has already been placed as an event glyph, do not place it again somewhere.
# If the player gets one from a glyph statue first, then the one in the event/puzzle won't appear.
@unplaced_non_progression_pickups -= @glyphs_placed_as_event_glyphs
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
@unplaced_non_progression_pickups.delete(pickup_global_id)
@used_non_progression_pickups << pickup_global_id
return pickup_global_id
end
def get_unplaced_non_progression_item
return get_unplaced_non_progression_pickup(valid_ids: ITEM_GLOBAL_ID_RANGE.to_a)
end
def get_unplaced_non_progression_item_that_can_be_an_arm_shifted_immediate
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a
valid_ids.select!{|item_id| game.fs.check_integer_can_be_an_arm_shifted_immediate?(item_id+1)}
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_skill
return get_unplaced_non_progression_pickup(valid_ids: SKILL_GLOBAL_ID_RANGE.to_a)
end
def get_unplaced_non_progression_item_except_ooe_relics
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a
if GAME == "ooe"
valid_ids -= (0x6F..0x74).to_a
end
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_projectile_glyph
projectile_glyph_ids = (0x16..0x18).to_a + (0x1C..0x32).to_a + (0x34..0x36).to_a
return get_unplaced_non_progression_pickup(valid_ids: projectile_glyph_ids)
end
def get_unplaced_non_progression_pickup_for_enemy_drop
valid_ids = PICKUP_GLOBAL_ID_RANGE.to_a - ITEMS_WITH_OP_HARDCODED_EFFECT
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_item_for_enemy_drop
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a - ITEMS_WITH_OP_HARDCODED_EFFECT
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_item_except_ooe_relics_for_enemy_drop
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a - ITEMS_WITH_OP_HARDCODED_EFFECT
if GAME == "ooe"
valid_ids -= (0x6F..0x74).to_a
end
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_entity_by_location_str(location)
location =~ /^(\h\h)-(\h\h)-(\h\h)_(\h+)$/
area_index, sector_index, room_index, entity_index = $1.to_i(16), $2.to_i(16), $3.to_i(16), $4.to_i(16)
room = game.areas[area_index].sectors[sector_index].rooms[room_index]
entity = room.entities[entity_index]
return entity
end
def change_entity_location_to_pickup_global_id(location, pickup_global_id)
entity = get_entity_by_location_str(location)
if checker.event_locations.include?(location) || checker.easter_egg_locations.include?(location)
# Event with a hardcoded item/glyph.
change_hardcoded_event_pickup(entity, pickup_global_id)
return
end
if GAME == "ooe" && location == "08-02-06_01" # Strength Ring blue chest spawned by the searchlights after you kill the Tin Man
if entity.var_a != 2
raise "Searchlights are not of type 2 (Tin Man spawn)"
end
game.fs.replace_arm_shifted_immediate_integer(0x022A194C, pickup_global_id+1)
elsif RANDOMIZABLE_VILLAGER_NAMES.include?(pickup_global_id)
# Villager
if GAME != "ooe"
raise "Tried to place villager in #{GAME}"
end
room_str = location[0,8]
@rooms_that_already_have_an_event << room_str
entity.type = 2
entity.subtype = 0x89
entity.var_a = VILLAGER_NAME_TO_EVENT_FLAG[pickup_global_id]
entity.var_b = 0
entity.write_to_rom()
if pickup_global_id == :villageranna
# Anna must have Tom in her room, or her event will crash the game.
room = entity.room
cat = Entity.new(room, room.fs)
cat.x_pos = entity.x_pos
cat.y_pos = entity.y_pos
cat.type = 2
cat.subtype = 0x3F
cat.var_a = 3
cat.var_b = 1
room.entities << cat
room.write_entities_to_rom()
# Remove the Tom in Anna's original room since he's not needed there.
original_cat = game.areas[7].sectors[0].rooms[6].entities[2]
original_cat.type = 0
original_cat.write_to_rom()
end
elsif PORTRAIT_NAMES.include?(pickup_global_id)
# Portrait
if GAME != "por"
raise "Tried to place portrait in #{GAME}"
end
portrait_data = PORTRAIT_NAME_TO_DATA[pickup_global_id]
entity.type = SPECIAL_OBJECT_ENTITY_TYPE
entity.subtype = portrait_data[:subtype]
entity.var_a = portrait_data[:var_a]
entity.var_b = portrait_data[:var_b]
# Move the portrait to a short distance above the closest floor so it looks good and is enterable.
coll = RoomCollision.new(entity.room, game.fs)
floor_y = coll.get_floor_y(entity, allow_jumpthrough: true)
entity_original_y_pos = entity.y_pos
entity.y_pos = floor_y - 0x50 # Portraits should float 5 tiles off the ground.
entity.write_to_rom()
curr_area_index = entity.room.area_index
curr_sector_index = entity.room.sector_index
curr_room_index = entity.room.room_index
# Find the return portrait.
dest_area_index = portrait_data[:area_index]
dest_sector_index = portrait_data[:sector_index]
dest_room_index = portrait_data[:room_index]
dest_room = game.areas[dest_area_index].sectors[dest_sector_index].rooms[dest_room_index]
dest_portrait = dest_room.entities.find{|entity| entity.is_special_object? && [0x1A, 0x76, 0x86, 0x87].include?(entity.subtype)}
return_portraits = [dest_portrait]
# Update the list of x/y positions the player returns at in the por_distinct_return_portrait_positions patch.
return_x = entity.x_pos
return_y = floor_y
game.fs.write(0x02309010+dest_area_index*4, [return_x, return_y].pack("vv"))
# If there's a small breakable wall containing this portrait we remove it.
# Not only does the breakable wall not hide the portrait, but when the player returns they would be put out of bounds by it.
breakable_wall_x_range = (entity.x_pos-8..entity.x_pos+8)
breakable_wall_y_range = (entity_original_y_pos-8..entity_original_y_pos+8)
breakable_wall_entity = entity.room.entities.find do |e|
e.is_special_object? && e.subtype == 0x3B && breakable_wall_x_range.include?(e.x_pos) && breakable_wall_y_range.include?(e.y_pos)
end
if breakable_wall_entity
breakable_wall_entity.type = 0
breakable_wall_entity.write_to_rom()
end
# Also update the bonus return portrait at the end of some areas.
case dest_area_index
when 2 # 13th Street
return_portraits << game.entity_by_str("02-02-16_02")
when 4 # Forgotten City
return_portraits << game.entity_by_str("04-01-07_01")
when 6 # Burnt Paradise
return_portraits << game.entity_by_str("06-00-06_03")
when 8 # Dark Academy
return_portraits << game.entity_by_str("08-00-08_04")
end
return_portraits.each do |return_portrait|
return_portrait.var_a = curr_area_index
return_portrait.var_b = ((curr_sector_index & 0xF) << 6) | (curr_room_index & 0x3F)
return_portrait.subtype = case curr_area_index
when 1, 3, 5, 7 # City of Haze, Sandy Grave, Nation of Fools, or Forest of Doom.
0x1A
when 2, 4, 6, 8 # 13th Street, Forgotten City, Burnt Paradise, or Dark Academy.
0x76
when 0, 9 # Dracula's Castle or Nest of Evil.
if [2, 4, 6, 8].include?(dest_area_index)
# Use the alt portrait frame when returning to Dracula's Castle from 13th Street, Forgotten City, Burnt Paradise, or Dark Academy.
0x87
else
0x86
end
else
puts "Unknown area to portrait into: %02X" % curr_area_index
end
# Set highest bit of var B to indicate that this is a return portrait to the por_distinct_return_portrait_positions patch.
return_portrait.var_b = 0x8000 | return_portrait.var_b
return_portrait.write_to_rom()
if room_rando?
# Tell the room rando logic about this return portrait.
checker.add_return_portrait(return_portrait.room.room_str, location)
end
end
if dest_area_index == 7 # Forest of Doom
# Remove the event from the original Forest of Doom portrait room since the portrait is no longer there.
forest_event = game.entity_by_str("00-08-01_03")
forest_event.type = 0
forest_event.write_to_rom()
end
elsif entity.type == 1
# Boss
item_type, item_index = game.get_item_type_and_index_by_global_id(pickup_global_id)
if !PICKUP_SUBTYPES_FOR_SKILLS.include?(item_type)
raise "Can't make boss drop required item"
end
if GAME == "dos" && entity.room.sector_index == 9 && entity.room.room_index == 1
# Aguni. He's not placed in the room so we hardcode him.
enemy_dna = game.enemy_dnas[0x70]
else
enemy_dna = game.enemy_dnas[entity.subtype]
end
case GAME
when "dos"
enemy_dna["Soul"] = item_index
when "ooe"
enemy_dna["Glyph"] = pickup_global_id + 1
else
raise "Boss soul randomizer is bugged for #{LONG_GAME_NAME}."
end
enemy_dna.write_to_rom()
elsif GAME == "dos" || GAME == "por"
if GAME == "por" && location == "05-02-0C_01"
# Cog's location. We always make this location use pickup flag 0x10 since Legion is hardcoded to check that flag, not whether you own the cog.
pickup_flag = 0x10
is_cog = true
else
pickup_flag = get_unused_pickup_flag_for_entity(entity)
is_cog = false
end
if pickup_global_id == :money
if entity.is_hidden_pickup? || is_cog || rng.rand <= 0.80
# 80% chance to be a money bag
# Hidden pickups have to be a bag since chests can't be hidden in a wall.
# The cog location has to be a bag since chests can't have a pickup flag so they wouldn't be able to activate legion.
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = 1
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = rng.rand(4..6) # 500G, 1000G, 2000G
else
# 20% chance to be a money chest
entity.type = 2
entity.subtype = 1
if GAME == "dos"
entity.var_a = 0x10
else
entity.var_a = [0xE, 0xF, 0x12].sample(random: rng)
end
# We didn't use the pickup flag, so put it back
@unused_pickup_flags << pickup_flag
end
entity.write_to_rom()
return
end
# Make sure Chaos/Magus Ring isn't easily available.
if GAME == "dos" && pickup_global_id == 0xCD # Chaos Ring
entity.type = 2
entity.subtype = 0x4C # All-souls-owned item
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
entity.write_to_rom()
return
elsif GAME == "por" && pickup_global_id == 0x12C # Magus Ring
entity.type = 6 # All-quests-complete item
entity.subtype = 7
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = 6
entity.write_to_rom()
return
end
item_type, item_index = game.get_item_type_and_index_by_global_id(pickup_global_id)
if PICKUP_SUBTYPES_FOR_SKILLS.include?(item_type)
case GAME
when "dos"
# Soul candle
entity.type = 2
entity.subtype = 1
entity.var_a = 0
entity.var_b = item_index
# We didn't use the pickup flag, so put it back
@unused_pickup_flags << pickup_flag
when "por"
# Skill
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = item_type
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = item_index
end
else
# Item
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = item_type
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = item_index
end
entity.write_to_rom()
elsif GAME == "ooe"
pickup_flag = get_unused_pickup_flag_for_entity(entity)
if entity.is_glyph? && !entity.is_hidden_pickup?
entity.y_pos += 0x20
end
if pickup_global_id == :money
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = 1
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = rng.rand(4..6) # 500G, 1000G, 2000G
entity.write_to_rom()
return
end
if (0x6F..0x74).include?(pickup_global_id)
# Relic. Must go in a chest, if you leave it lying on the ground it won't autoequip.
entity.type = 2
entity.subtype = 0x16
entity.var_a = pickup_global_id + 1
entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
entity.write_to_rom()
return
end
if pickup_global_id >= 0x6F
# Item
if entity.is_hidden_pickup?
entity.type = 7
entity.subtype = 0xFF
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
case rng.rand
when 0.00..0.70
# 70% chance for a red chest
entity.type = 2
entity.subtype = 0x16
entity.var_a = pickup_global_id + 1
entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
when 0.70..0.95
# 15% chance for an item on the ground
entity.type = 4
entity.subtype = 0xFF
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
# 5% chance for a hidden blue chest
entity.type = 2
entity.subtype = 0x17
entity.var_a = pickup_global_id + 1
entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
end
end
else
# Glyph
if entity.is_hidden_pickup?
entity.type = 7
entity.subtype = 2
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
puzzle_glyph_ids = [0x1D, 0x1F, 0x20, 0x22, 0x24, 0x26, 0x27, 0x2A, 0x2B, 0x2F, 0x30, 0x31, 0x32, 0x46, 0x4E]
if puzzle_glyph_ids.include?(pickup_global_id)
# Free glyph
entity.type = 4
entity.subtype = 2
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
# Glyph statue
entity.type = 2
entity.subtype = 2
entity.var_a = 0
entity.var_b = pickup_global_id + 1
# We didn't use the pickup flag, so put it back
@unused_pickup_flags << pickup_flag
end
end
end
if entity.is_glyph? && !entity.is_hidden_pickup?
entity.y_pos -= 0x20
end
entity.write_to_rom()
end
end
def remove_inaccessible_items(inaccessible_remaining_locations)
inaccessible_remaining_locations.each do |location|
entity = get_entity_by_location_str(location)
if checker.event_locations.include?(location) || entity.type == 1
# Don't delete inaccessible events/bosses, just in case.
next
end
entity.type = 0
entity.write_to_rom()
end
end
def get_unused_pickup_flag_for_entity(entity)
if entity.is_item_chest?
pickup_flag = entity.var_b
elsif entity.is_pickup?
pickup_flag = entity.var_a
elsif GAME == "dos" && entity.is_special_object? && entity.subtype == 0x4D # Easter egg item
pickup_flag = entity.var_b
elsif GAME == "dos" && entity.is_special_object? && entity.subtype == 0x4C # All-souls-obtained item
pickup_flag = entity.var_a
end
if GAME == "ooe" && (0..0x51).include?(pickup_flag)
# In OoE, these pickup flags are used by glyph statues automatically and we can't control those.
# Therefore we need to reassign pickups that were free glyphs in the original game a new pickup flag, so it doesn't conflict with where those glyphs (Rapidus Fio and Volaticus) got moved to when randomized.
pickup_flag = nil
end
if pickup_flag.nil? || @used_pickup_flags.include?(pickup_flag)
pickup_flag = @unused_pickup_flags.pop()
if pickup_flag.nil?
raise "No pickup flag for this item, this error shouldn't happen"
end
end
return pickup_flag
end
def get_unused_pickup_flag()
pickup_flag = @unused_pickup_flags.pop()
if pickup_flag.nil?
raise "No pickup flag for this item, this error shouldn't happen"
end
return pickup_flag
end
def use_pickup_flag(pickup_flag)
@used_pickup_flags << pickup_flag
@unused_pickup_flags -= @used_pickup_flags
end
def get_entity_skill_drop_by_entity_location(location)
entity = get_entity_by_location_str(location)
if entity.type != 1
raise "Not an enemy: #{location}"
end
if GAME == "dos" && entity.room.sector_index == 9 && entity.room.room_index == 1
# Aguni. He's not placed in the room so we hardcode him.
enemy_dna = game.enemy_dnas[0x70]
else
enemy_dna = game.enemy_dnas[entity.subtype]
end
case GAME
when "dos"
skill_local_id = enemy_dna["Soul"]
if skill_local_id == 0xFF
return nil
end
when "ooe"
skill_local_id = enemy_dna["Glyph"] - 1
else
raise "Boss soul randomizer is bugged for #{LONG_GAME_NAME}."
end
skill_global_id = skill_local_id + SKILL_GLOBAL_ID_RANGE.begin
return skill_global_id
end
def get_villager_name_by_entity_location(location)
entity = get_entity_by_location_str(location)
if GAME == "ooe" && entity.type == 2 && [0x89, 0x6D].include?(entity.subtype)
villager_name = VILLAGER_NAME_TO_EVENT_FLAG.invert[entity.var_a]
return villager_name
else
raise "Not a villager: #{location}"
end
end
def get_portrait_name_by_entity_location(location)
entity = get_entity_by_location_str(location)
if GAME == "por" && entity.is_special_object? && [0x1A, 0x76, 0x86, 0x87].include?(entity.subtype)
portrait_name = AREA_INDEX_TO_PORTRAIT_NAME[entity.var_a]
return portrait_name
else
raise "Not a portrait: #{location} #{entity.inspect}"
end
end
def change_hardcoded_event_pickup(event_entity, pickup_global_id)
case GAME
when "dos"
dos_change_hardcoded_event_pickup(event_entity, pickup_global_id)
when "por"
por_change_hardcoded_event_pickup(event_entity, pickup_global_id)
when "ooe"
ooe_change_hardcoded_event_pickup(event_entity, pickup_global_id)
end
end
def dos_change_hardcoded_event_pickup(event_entity, pickup_global_id)
event_entity.room.sector.load_necessary_overlay()
if event_entity.subtype == 0x65 # Mina's Talisman
item_type, item_index = game.get_item_type_and_index_by_global_id(pickup_global_id)
if MAGIC_SEAL_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Magic seal. These need to call a different function to be properly given.
seal_index = pickup_global_id - 0x3D
# Seal given when watching the event
game.fs.write(0x021CB9F4, [seal_index].pack("C"))
game.fs.write(0x021CB9FC, [0xEB006ECF].pack("V")) # Call func 021E7540
# Seal given when skipping the event
game.fs.write(0x021CBC14, [seal_index].pack("C"))
game.fs.write(0x021CBC1C, [0xEB006E47].pack("V")) # Call func 021E7540
else
# Regular item.
# Item given when watching the event
game.fs.write(0x021CB9F4, [item_type].pack("C"))
game.fs.write(0x021CB9F8, [item_index].pack("C"))
# Item given when skipping the event
game.fs.write(0x021CBC14, [item_type].pack("C"))
game.fs.write(0x021CBC18, [item_index].pack("C"))
end
# Item name shown in the corner of the screen when watching the event.
game.fs.write(0x021CBA08, [item_type].pack("C"))
game.fs.write(0x021CBA0C, [item_index].pack("C"))
# Also display the item's name in the corner when skipping the event.
# We add a few new lines of code in free space for this.
code = [0xE3A00000, 0xE3A010F0, 0xEBFDB6FD, 0xE1A00005, 0xEA042E64]
game.fs.write(0x020C027C, code.pack("V*"))
game.fs.write(0x020C027C, [pickup_global_id+1].pack("C"))
game.fs.write(0x021CBC20, [0xEAFBD195].pack("V"))
elsif event_entity.subtype == 0x4D # Easter egg item
# Change what item is actually placed into your inventory when you get the easter egg.
easter_egg_index = event_entity.var_a
game.fs.write(0x0222BE34 + easter_egg_index*0xC, [pickup_global_id+1].pack("v"))
# Update the pickup flag.
pickup_flag = get_unused_pickup_flag_for_entity(event_entity)
event_entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
# Make the easter egg special object use the same palette list as actual item icons, since that gives access to all 3 icon palettes, while the actual object's palette only has the first.
sprite_info = SpecialObjectType.new(0x4D, game.fs).extract_gfx_and_palette_and_sprite_from_create_code
item = game.items[pickup_global_id]
icon_palette_pointer = 0x022C4684
game.fs.write(0x021AF5CC, [icon_palette_pointer].pack("V"))
icon_palette_index = (item["Icon"] & 0xFF00) >> 8
sprite = sprite_info.sprite
sprite.frames[easter_egg_index].parts.first.palette_index = icon_palette_index
sprite.write_to_rom()
# Now update the actual item visual on the object's GFX page so it visually shows the correct item.
sprite_info = SpecialObjectType.new(0x4D, game.fs).extract_gfx_and_palette_and_sprite_from_create_code # We extract sprite info again to get the updated palette pointer after we changed it.
gfx = sprite_info.gfx_pages.first
palettes = renderer.generate_palettes(sprite_info.palette_pointer, 16)
chunky_image = renderer.render_gfx_page(gfx, palettes[icon_palette_index], gfx.canvas_width)
new_icon = renderer.render_icon_by_item(item)
x_offset = 16*easter_egg_index
y_offset = 0
chunky_image.replace!(new_icon, x_offset, y_offset)
renderer.save_gfx_page(chunky_image, gfx, sprite_info.palette_pointer, 16, icon_palette_index)
end
end
def por_change_hardcoded_event_pickup(event_entity, pickup_global_id)
event_entity.room.sector.load_necessary_overlay()
end
def ooe_change_hardcoded_event_pickup(event_entity, pickup_global_id)
event_entity.room.sector.load_necessary_overlay()
@glyphs_placed_as_event_glyphs << pickup_global_id
if event_entity.subtype == 0x8A # Magnes
# Get rid of the event, turn it into a normal free glyph
# We can't keep the event because it automatically equips Magnes even if the glyph it gives is not Magnes.
# Changing what it equips would just make the event not work right, so we may as well remove it.
pickup_flag = get_unused_pickup_flag()
event_entity.type = 4
event_entity.subtype = 2
event_entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
event_entity.var_b = pickup_global_id + 1
event_entity.x_pos = 0x80
event_entity.y_pos = 0x2B0
event_entity.write_to_rom()
elsif event_entity.subtype == 0x69 # Dominus Hatred
game.fs.write(0x02230A7C, [pickup_global_id+1].pack("C"))
game.fs.write(0x022C25D8, [pickup_global_id+1].pack("C"))
elsif event_entity.subtype == 0x6F # Dominus Anger
game.fs.write(0x02230A84, [pickup_global_id+1].pack("C"))
game.fs.write(0x022C25DC, [pickup_global_id+1].pack("C"))
elsif event_entity.subtype == 0x81 # Cerberus
# Get rid of the event, turn it into a normal free glyph
# We can't keep the event because it has special programming to always spawn them in order even if you get to the locations out of order.
pickup_flag = get_unused_pickup_flag()
event_entity.type = 4
event_entity.subtype = 2
event_entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
event_entity.var_b = pickup_global_id + 1
event_entity.x_pos = 0x80
event_entity.y_pos = 0x60
event_entity.write_to_rom()
other_cerberus_events = event_entity.room.entities.select{|e| e.is_special_object? && [0x82, 0x83].include?(e.subtype)}
other_cerberus_events.each do |event|
# Delete these others, we don't want the events.
event.type = 0
event.write_to_rom()
end
else
glyph_id_location, pickup_flag_read_location, pickup_flag_write_location, second_pickup_flag_read_location = case event_entity.subtype
when 0x2F # Luminatio
[0x022C4894, 0x022C483C, 0x022C4888]
when 0x3B # Pneuma
[0x022C28E8, 0x022C2880, 0x022C28DC, 0x022C279C]
when 0x44 # Lapiste
[0x022C2CB0, 0x022C2C24, 0x022C2CA0]
when 0x54 # Vol Umbra
[0x022C2FBC, 0x022C2F70, 0x022C2FB4]
when 0x4C # Vol Fulgur
[0x022C2490, 0x022C2404, 0x022C2480]
when 0x52 # Vol Ignis
[0x0221F1A0, 0x0221F148, 0x0221F194]
when 0x47 # Vol Grando
[0x022C230C, 0x022C2584, 0x022C22FC]
when 0x40 # Cubus
[0x022C31DC]
when 0x53 # Morbus
[0x022C2354, 0x022C2318, 0x022C2344]
when 0x76 # Dominus Agony
[0x022C25BC]
else
return
end
# What glyph is actually spawned.
game.fs.write(glyph_id_location, [pickup_global_id+1].pack("C"))
if pickup_flag_write_location
# The pickup flag set when you absorb the glyph.
pickup_flag = pickup_global_id+2
game.fs.write(pickup_flag_write_location, [pickup_flag].pack("C"))
end
if pickup_flag_read_location
# The pickup flag read to decide whether you've completed this puzzle yet or not.
# This is determined by two lines of code:
# The first loads the word in the bitfield containing the correct bit (0x20 bits in each word):
pickup_flag_word_offset = 0x40 + 4*(pickup_flag/0x20)
game.fs.write(pickup_flag_read_location, [pickup_flag_word_offset].pack("C"))
game.fs.write(second_pickup_flag_read_location, [pickup_flag_word_offset].pack("C")) if second_pickup_flag_read_location
# The second does a tst on the exact bit within that word:
pickup_flag_bit_index = pickup_flag % 0x20
game.fs.replace_hardcoded_bit_constant(pickup_flag_read_location+4, pickup_flag_bit_index)
game.fs.replace_hardcoded_bit_constant(second_pickup_flag_read_location+4, pickup_flag_bit_index) if second_pickup_flag_read_location
end
end
end
end
List non-randomized progress items in spoiler log too
module PickupRandomizer
VILLAGER_NAME_TO_EVENT_FLAG = {
:villagerjacob => 0x2A,
:villagerabram => 0x2D,
:villageraeon => 0x3C,
:villagereugen => 0x38,
:villagermonica => 0x4F,
:villagerlaura => 0x32,
:villagermarcel => 0x40,
:villagerserge => 0x47,
:villageranna => 0x4B,
:villagerdaniela => 0x57,
:villageririna => 0x53,
:villagergeorge => 0x0D,
}
RANDOMIZABLE_VILLAGER_NAMES = VILLAGER_NAME_TO_EVENT_FLAG.keys
PORTRAIT_NAME_TO_DATA = {
:portraitcityofhaze => {subtype: 0x1A, area_index: 1, sector_index: 0, room_index: 0x1A},
:portraitsandygrave => {subtype: 0x1A, area_index: 3, sector_index: 0, room_index: 0},
:portraitnationoffools => {subtype: 0x1A, area_index: 5, sector_index: 0, room_index: 0x21},
:portraitforestofdoom => {subtype: 0x1A, area_index: 7, sector_index: 0, room_index: 0},
:portraitdarkacademy => {subtype: 0x76, area_index: 8, sector_index: 1, room_index: 6},
:portraitburntparadise => {subtype: 0x76, area_index: 6, sector_index: 0, room_index: 0x20},
:portraitforgottencity => {subtype: 0x76, area_index: 4, sector_index: 0, room_index: 0},
:portrait13thstreet => {subtype: 0x76, area_index: 2, sector_index: 0, room_index: 7},
:portraitnestofevil => {subtype: 0x86, area_index: 9, sector_index: 0, room_index: 0},
}
PORTRAIT_NAME_TO_DATA.each do |portrait_name, portrait_data|
portrait_data[:var_a] = portrait_data[:area_index]
portrait_data[:var_b] = ((portrait_data[:sector_index] << 6) & 0x03C0) | (portrait_data[:room_index] & 0x003F)
end
PORTRAIT_NAMES = PORTRAIT_NAME_TO_DATA.keys
AREA_INDEX_TO_PORTRAIT_NAME = PORTRAIT_NAME_TO_DATA.map do |name, data|
[data[:area_index], name]
end.to_h
PORTRAIT_NAME_TO_AREA_INDEX = PORTRAIT_NAME_TO_DATA.map do |name, data|
[name, data[:area_index]]
end.to_h
PORTRAIT_NAME_TO_DEFAULT_ENTITY_LOCATION = {
:portraitcityofhaze => "00-01-00_00",
:portraitsandygrave => "00-04-12_00",
:portraitnationoffools => "00-06-01_00",
:portraitforestofdoom => "00-08-01_02",
:portraitdarkacademy => "00-0B-00_04",
:portraitburntparadise => "00-0B-00_03",
:portraitforgottencity => "00-0B-00_01",
:portrait13thstreet => "00-0B-00_02",
:portraitnestofevil => "00-00-05_00",
}
EARLY_GAME_PORTRAITS = [
:portraitcityofhaze,
:portraitsandygrave,
:portraitnationoffools,
:portraitforestofdoom,
]
LATE_GAME_PORTRAITS = [
:portraitdarkacademy,
:portraitburntparadise,
:portraitforgottencity,
:portrait13thstreet
]
def randomize_pickups_completably(&block)
spoiler_log.puts
spoiler_log.puts "Progression pickup locations:"
case GAME
when "dos"
checker.add_item(0x43) # knife
checker.add_item(0x91) # casual clothes
checker.add_item(0x3D) # seal 1
if options[:unlock_boss_doors]
checker.add_item(0x3E) # seal 2
checker.add_item(0x3F) # seal 3
checker.add_item(0x40) # seal 4
checker.add_item(0x41) # seal 5
end
when "por"
checker.add_item(0x61) # starting vampire killer
checker.add_item(0x6C) # encyclopedia
checker.add_item(0xAA) # casual clothes
# In the corridor where Behemoth chases you, change the code of the platform to not permanently disappear.
# This is so the player can't get stuck if they miss an important item up there.
game.fs.load_overlay(79)
game.fs.write(0x022EC638, [0xEA000003].pack("V"))
# Room in Sandy Grave that has two overlapping Charm Necklaces.
# We don't want these to overlap as the player could easily think it's just one item and not see the one beneath it.
# Move one a bit to the left and the other a bit to the right. Also give one a different pickup flag.
item_a = game.areas[3].sectors[0].rooms[0x13].entities[0]
item_b = game.areas[3].sectors[0].rooms[0x13].entities[1]
item_a.x_pos = 0x120
item_b.x_pos = 0x140
pickup_flag = get_unused_pickup_flag()
item_b.var_a = pickup_flag
use_pickup_flag(pickup_flag)
item_a.write_to_rom()
item_b.write_to_rom()
when "ooe"
checker.add_item(0xE6) # casual clothes
checker.add_item(0x6F) # lizard tail
checker.add_item(0x72) # glyph union
checker.add_item(0x1E) # torpor. the player will get enough of these as it is
# Give the player the glyph sleeve in Ecclesia like in hard mode.
# To do this just get rid of the entity hider that hides it on normal mode.
entity_hider = game.areas[2].sectors[0].rooms[4].entities[6]
entity_hider.type = 0
entity_hider.write_to_rom()
# But we also need to give the chest a unique flag, because it shares the flag with the one from Minera in normal mode.
sleeve_chest = game.areas[2].sectors[0].rooms[4].entities[7]
pickup_flag = get_unused_pickup_flag()
sleeve_chest.var_b = pickup_flag
use_pickup_flag(pickup_flag)
sleeve_chest.write_to_rom()
# We also make sure the chest in Minera appears even on hard mode.
entity_hider = game.areas[8].sectors[2].rooms[7].entities[1]
entity_hider.type = 0
entity_hider.write_to_rom()
checker.add_item(0x73) # glyph sleeve
# Room in the Final Approach that has two overlapping chests both containing diamonds.
# We don't want these to overlap as the player could easily think it's just one item and not see the one beneath it.
# Move one a bit to the left and the other a bit to the right. Also give one a different pickup flag.
chest_a = game.areas[0].sectors[0xA].rooms[0xB].entities[1]
chest_b = game.areas[0].sectors[0xA].rooms[0xB].entities[2]
chest_a.x_pos = 0xE0
chest_b.x_pos = 0x130
pickup_flag = get_unused_pickup_flag()
chest_b.var_b = pickup_flag
use_pickup_flag(pickup_flag)
chest_a.write_to_rom()
chest_b.write_to_rom()
end
@locations_randomized_to_have_useful_pickups = []
@rooms_by_progression_order_accessed = []
@rooms_that_already_have_an_event = []
game.each_room do |room|
room.entities.each do |entity|
if entity.is_special_object? && (0x5F..0x88).include?(entity.subtype)
room_str = "%02X-%02X-%02X" % [room.area_index, room.sector_index, room.room_index]
@rooms_that_already_have_an_event << room_str
break
end
end
end
if @progression_fill_mode == :forward
total_progression_pickups = checker.all_progression_pickups.length
place_progression_pickups_forward_fill() do |progression_pickups_placed|
percent_done = progression_pickups_placed.to_f / total_progression_pickups
yield percent_done
end
elsif @progression_fill_mode == :assumed
place_progression_pickups_assumed_fill()
else
raise "Unknown progression fill mode: #{@progression_fill_mode}"
end
if !checker.game_beatable?
item_names = checker.current_items.map do |global_id|
if global_id.is_a?(Symbol)
global_id
else
checker.defs.invert[global_id]
end
end.compact
raise "Bug: Game is not beatable on this seed!\nThis error shouldn't happen.\nSeed: #{@seed}\n\nItems:\n#{item_names.join(", ")}"
end
if GAME == "por" && options[:randomize_portraits]
# Remove the extra portraits at the end of 13th Street, Forgotten City, Burnt Paradise, and Dark Academy.
# (The one return portrait back to where you entered this portrait from is not removed, and is updated elsewhere in the code.)
[
"02-02-16_01",
"02-02-16_03",
"02-02-16_04",
"04-01-07_02",
"04-01-07_03",
"04-01-07_04",
"06-00-06_01",
"06-00-06_02",
"06-00-06_04",
"08-00-08_01",
"08-00-08_02",
"08-00-08_03",
].each do |entity_str|
portrait = game.entity_by_str(entity_str)
portrait.type = 0
portrait.write_to_rom()
end
end
rescue StandardError => e
#output_map_rando_error_debug_info()
raise e
end
def place_progression_pickups_assumed_fill
verbose = false
# This attribute is modified when adding a villager to a room.
orig_rooms_that_already_have_an_event = @rooms_that_already_have_an_event.dup
# First place things that are not randomized in their normal locations.
nonrandomized_item_locations = get_nonrandomized_item_locations()
orig_current_items = checker.current_items.dup
if room_rando?
orig_return_portraits = checker.return_portraits.dup
end
pickups_available = checker.all_progression_pickups - checker.current_items - nonrandomized_item_locations.values
# Because DoS has two bat transformation souls, which both allow a ton of progression, at least one of them tends to be placed very early.
# So we change it so that only one of the two is available to be randomly placed to reduce the chance of early bat.
# (The remaining second one will be placed non-randomly later.)
if GAME == "dos" && pickups_available.include?(0x104) && pickups_available.include?(0xFC)
bat_to_keep = [0x104, 0xFC].sample(random: rng)
bat_to_remove = (bat_to_keep == 0x104 ? 0xFC : 0x104)
pickups_available.delete(bat_to_remove)
elsif GAME == "dos" && pickups_available.include?(0xFC)
bat_to_keep = 0x104
bat_to_remove = 0xFC
pickups_available.delete(bat_to_remove)
else
bat_to_keep = nil
bat_to_remove = nil
end
if room_rando?
# Temporarily give all progress items and check what locations are available.
# Those are all the valid locations on this seed, excluding rooms, subrooms, and portraits that are unused.
checker.all_progression_pickups.each do |pickup|
next if checker.current_items.include?(pickup)
checker.add_item(pickup)
end
locations_available, _ = checker.get_accessible_locations_and_doors()
checker.restore_current_items(orig_current_items)
else
locations_available = checker.all_locations.keys
# Don't put items in removed portraits.
if GAME == "por" && options[:por_short_mode]
area_indexes_of_removed_portraits = @portraits_to_remove.map do |portrait_name|
PickupRandomizer::PORTRAIT_NAME_TO_AREA_INDEX[portrait_name]
end
locations_available.reject! do |location|
area_indexes_of_removed_portraits.include?(location[0,2].to_i(16))
end
end
end
locations_available -= nonrandomized_item_locations.keys
locations_accessible_at_start = nil
if room_rando?
locations_accessible_at_start, _ = checker.get_accessible_locations_and_doors()
end
# Place pickups in completely random locations, and then check if the resulting seed is beatable.
# Repeat this until a beatable seed is found.
num_failures = 0
while true
@done_item_locations = nonrandomized_item_locations.dup
@rooms_that_already_have_an_event = orig_rooms_that_already_have_an_event.dup
progression_spheres = decide_progression_pickups_for_assumed_fill(
pickups_available,
locations_available,
locations_accessible_at_start: locations_accessible_at_start
)
if progression_spheres != :failure
puts "Total number of assumed fill failures: #{num_failures}"
break
end
num_failures += 1
puts "Assumed fill failure ##{num_failures}" if num_failures % 100 == 0
checker.restore_current_items(orig_current_items)
if room_rando?
checker.restore_return_portraits(orig_return_portraits)
end
end
# Restore this since any villagers we decided on during the previous step haven't actually been placed yet.
@rooms_that_already_have_an_event = orig_rooms_that_already_have_an_event.dup
@progression_spheres = progression_spheres
if bat_to_keep
# If we had to remove one of the two bats from being randomly placed, we now go and place it non-randomly.
# We simply place it in the last possible progression sphere we can find.
# (Which specific location within that sphere is still chosen randomly.)
placed_bat_to_remove = false
@progression_spheres.reverse_each do |sphere|
locations_accessed_in_this_sphere = sphere[:locs]
progress_locations_accessed_in_this_sphere = sphere[:progress_locs]
unused_locs = locations_accessed_in_this_sphere - progress_locations_accessed_in_this_sphere
valid_unused_locs = filter_locations_valid_for_pickup(
unused_locs,
bat_to_remove
)
if valid_unused_locs.any?
location_for_bat_to_remove = valid_unused_locs.sample(random: rng)
@done_item_locations[location_for_bat_to_remove] = bat_to_remove
progress_locations_accessed_in_this_sphere = locations_accessed_in_this_sphere.select do |location|
progress_locations_accessed_in_this_sphere.include?(location) || location == location_for_bat_to_remove
end
sphere[:progress_locs] = progress_locations_accessed_in_this_sphere
placed_bat_to_remove = true
break
end
end
if !placed_bat_to_remove
raise "Couldn't place #{checker.defs.invert[bat_to_remove]}} anywhere"
end
end
# Now actually place the pickups in the locations we decided on, and write to the spoiler log.
already_seen_room_strs = []
sphere_index = 0
@progression_spheres.each do |sphere|
progress_locations_accessed_in_this_sphere = sphere[:progress_locs]
doors_accessed_in_this_sphere = sphere[:doors]
spoiler_str = "#{sphere_index+1}:"
spoiler_log.puts spoiler_str
puts spoiler_str if verbose
progress_locations_accessed_in_this_sphere.each do |location|
pickup_global_id = @done_item_locations[location]
next unless checker.all_progression_pickups.include?(pickup_global_id)
@locations_randomized_to_have_useful_pickups << location
unless nonrandomized_item_locations.has_key?(location)
change_entity_location_to_pickup_global_id(location, pickup_global_id)
end
spoiler_str = get_item_placement_spoiler_string(location, pickup_global_id)
if nonrandomized_item_locations.has_key?(location)
spoiler_str += " (Not randomized)"
end
spoiler_log.puts spoiler_str
puts spoiler_str if verbose
end
if room_rando?
rooms_accessed_in_this_sphere = doors_accessed_in_this_sphere.map{|door_str| door_str[0,8]}
# Remove duplicate rooms caused by accessing a new door in an old room.
rooms_accessed_in_this_sphere -= already_seen_room_strs
@rooms_by_progression_order_accessed << rooms_accessed_in_this_sphere
already_seen_room_strs += rooms_accessed_in_this_sphere
end
sphere_index += 1
end
end
def decide_progression_pickups_for_assumed_fill(pickups_available, locations_available, locations_accessible_at_start: nil)
remaining_progress_items = pickups_available.dup
remaining_locations = locations_available.dup
if GAME == "por" && options[:randomize_starting_room] && options[:randomize_portraits]
starting_portrait_name = AREA_INDEX_TO_PORTRAIT_NAME[@starting_room.area_index]
if starting_portrait_name
starting_portrait_location_in_castle = pick_starting_portrait_location_in_castle()
@done_item_locations[starting_portrait_location_in_castle] = starting_portrait_name
return_portrait = get_primary_return_portrait_for_portrait(starting_portrait_name)
checker.add_return_portrait(return_portrait.room.room_str, starting_portrait_location_in_castle)
remaining_progress_items.delete(starting_portrait_name)
end
end
if room_rando?
# Place the very first item somewhere that is definitely reachable within the first sphere.
# This is for the sake of performance - tons of attempts where there isn't a single item accessible at the start is just a waste of time.
if locations_accessible_at_start.nil?
locations_accessible_at_start, _ = checker.get_accessible_locations_and_doors()
end
possible_first_items = remaining_progress_items.dup
if GAME == "por" && (!room_rando? || !options[:rebalance_enemies_in_room_rando])
# Don't allow putting late game portraits at the very start.
possible_first_items -= LATE_GAME_PORTRAITS
end
possible_first_items.shuffle!(random: rng)
while true
if possible_first_items.empty?
raise "No possible item to place first in assumed fill"
end
possible_first_item = possible_first_items.pop()
possible_locations = filter_locations_valid_for_pickup(locations_accessible_at_start, possible_first_item)
if possible_locations.empty?
next
end
remaining_progress_items.delete(possible_first_item)
location = possible_locations.sample(random: rng)
remaining_locations.delete(location)
@done_item_locations[location] = possible_first_item
if RANDOMIZABLE_VILLAGER_NAMES.include?(possible_first_item)
# Villager
room_str = location[0,8]
@rooms_that_already_have_an_event << room_str
end
break
end
end
remaining_progress_items.each do |pickup_global_id|
possible_locations = filter_locations_valid_for_pickup(remaining_locations, pickup_global_id)
if possible_locations.empty?
raise "No locations to place pickup"
end
location = possible_locations.sample(random: rng)
remaining_locations.delete(location)
@done_item_locations[location] = pickup_global_id
if RANDOMIZABLE_VILLAGER_NAMES.include?(pickup_global_id)
# Villager
room_str = location[0,8]
@rooms_that_already_have_an_event << room_str
end
end
inaccessible_progress_locations = @done_item_locations.keys
accessible_progress_locations = []
accessible_doors = []
progression_spheres = []
while true
if room_rando?
curr_accessible_locations, curr_accessible_doors = checker.get_accessible_locations_and_doors()
locations_accessed_in_this_sphere = curr_accessible_locations
doors_accessed_in_this_sphere = curr_accessible_doors - accessible_doors
else
locations_accessed_in_this_sphere = checker.get_accessible_locations()
end
progress_locations_accessed_in_this_sphere = locations_accessed_in_this_sphere & inaccessible_progress_locations
if progress_locations_accessed_in_this_sphere.empty?
#if room_rando?
# puts "Starting room: #{@starting_room}"
# puts "Num progression spheres at time of failure: #{progression_spheres.size}"
# puts "Num accessible locations at time of failure: #{curr_accessible_locations.size}"
# accesible_progress_locs = (@done_item_locations.keys & curr_accessible_locations)
# puts "Num accessible progress locations at time of failure: #{accesible_progress_locs.size}"
# puts "Total progress locations at time of failure: #{@done_item_locations.keys.size}"
# accessible_area_indexes = curr_accessible_doors.map{|x| x[0,2].to_i(16)}.uniq
# puts "All accessible areas: #{accessible_area_indexes}"
#
# inaccessible_item_locations = (@done_item_locations.keys - accesible_progress_locs)
# puts "Inaccessible item locations:"
# p inaccessible_item_locations
# puts "Inaccessible items:"
# p inaccessible_item_locations.map{|loc| @done_item_locations[loc]}
#else
# puts "Starting room: #{@starting_room}"
# puts "Num progression spheres at time of failure: #{progression_spheres.size}"
# puts "Total progress locations at time of failure: #{@done_item_locations.keys.size}"
# puts "Num accessible progress locations at time of failure: #{accessible_progress_locations.size}"
# puts "Num inaccessible progress locations at time of failure: #{inaccessible_progress_locations.size}"
# puts "Inaccessible locations: #{inaccessible_progress_locations}"
# accessible_area_indexes = (accessible_progress_locations+locations_accessed_in_this_sphere).map{|x| x[0,2].to_i(16)}.uniq
# puts "All accessible areas: #{accessible_area_indexes}"
#end
return :failure
end
pickups_obtained_in_this_sphere = []
progress_locations_accessed_in_this_sphere.each do |location|
pickup_global_id = @done_item_locations[location]
pickups_obtained_in_this_sphere << pickup_global_id
checker.add_item(pickup_global_id)
end
if GAME == "por" && progression_spheres.size == 0 && (!room_rando? || !options[:rebalance_enemies_in_room_rando])
# If portraits are randomized but we can't rebalance enemies, try to avoid placing late game portraits in the early game.
if (pickups_obtained_in_this_sphere & LATE_GAME_PORTRAITS).any?
return :failure
end
end
accessible_progress_locations += progress_locations_accessed_in_this_sphere
inaccessible_progress_locations -= progress_locations_accessed_in_this_sphere
progression_spheres << {
locs: locations_accessed_in_this_sphere,
progress_locs: progress_locations_accessed_in_this_sphere,
doors: doors_accessed_in_this_sphere,
}
if inaccessible_progress_locations.empty?
break
end
end
return progression_spheres
end
def get_nonrandomized_item_locations
nonrandomized_done_item_locations = {}
if !options[:randomize_boss_souls] && ["dos", "ooe"].include?(GAME)
# Vanilla boss souls.
checker.enemy_locations.each do |location|
pickup_global_id = get_entity_skill_drop_by_entity_location(location)
nonrandomized_done_item_locations[location] = pickup_global_id
@locations_randomized_to_have_useful_pickups << location
end
end
if !options[:randomize_villagers] && GAME == "ooe"
# Vanilla villagers.
checker.villager_locations.each do |location|
pickup_global_id = get_villager_name_by_entity_location(location)
nonrandomized_done_item_locations[location] = pickup_global_id
@locations_randomized_to_have_useful_pickups << location
end
end
if !options[:randomize_portraits] && GAME == "por"
# Vanilla portraits.
checker.portrait_locations.each do |location|
# Don't count removed portraits in short mode as portrait locations.
next if @portrait_locations_to_remove.include?(location)
pickup_global_id = get_portrait_name_by_entity_location(location)
nonrandomized_done_item_locations[location] = pickup_global_id
@locations_randomized_to_have_useful_pickups << location
end
end
return nonrandomized_done_item_locations
end
def place_progression_pickups_forward_fill(&block)
previous_accessible_locations = []
progression_pickups_placed = 0
total_progression_pickups = checker.all_progression_pickups.length
on_leftovers = false
if GAME == "por" && options[:randomize_starting_room] && options[:randomize_portraits]
starting_portrait_name = AREA_INDEX_TO_PORTRAIT_NAME[@starting_room.area_index]
if starting_portrait_name
starting_portrait_location_in_castle = pick_starting_portrait_location_in_castle()
change_entity_location_to_pickup_global_id(starting_portrait_location_in_castle, starting_portrait_name)
@locations_randomized_to_have_useful_pickups << starting_portrait_location_in_castle
end
end
verbose = false
# First place progression pickups needed to beat the game.
spoiler_log.puts "Placing main route progression pickups:"
while true
if room_rando?
possible_locations, accessible_doors = checker.get_accessible_locations_and_doors()
accessible_rooms = accessible_doors.map{|door_str| door_str[0,8]}
@rooms_by_progression_order_accessed << accessible_rooms
else
possible_locations = checker.get_accessible_locations()
end
possible_locations -= @locations_randomized_to_have_useful_pickups
puts "Total possible locations: #{possible_locations.size}" if verbose
pickups_by_locations = checker.pickups_by_current_num_locations_they_access()
if starting_portrait_name
# Don't place the starting portrait anywhere, it's already in Dracula's Castle.
pickups_by_locations.delete(starting_portrait_name)
end
if GAME == "por" && options[:randomize_portraits] && (!room_rando? || !options[:rebalance_enemies_in_room_rando])
# If portraits are randomized but we can't rebalance enemies, try to avoid placing late game portraits in the early game.
if progression_pickups_placed < 5
pickups_by_locations_filtered = pickups_by_locations.reject do |pickup, usefulness|
LATE_GAME_PORTRAITS.include?(pickup)
end
if pickups_by_locations_filtered.any?
pickups_by_locations = pickups_by_locations_filtered
end
end
end
pickups_by_usefulness = pickups_by_locations.select{|pickup, num_locations| num_locations > 0}
currently_useless_pickups = pickups_by_locations.select{|pickup, num_locations| num_locations == 0}
puts "Num useless pickups: #{currently_useless_pickups.size}" if verbose
placing_currently_useless_pickup = false
if pickups_by_usefulness.any?
max_usefulness = pickups_by_usefulness.values.max
weights = pickups_by_usefulness.map do |pickup, usefulness|
# Weight less useful pickups as being more likely to be chosen.
weight = max_usefulness - usefulness + 1
weight = Math.sqrt(weight)
if checker.preferences[pickup]
weight *= checker.preferences[pickup]
end
weight
end
ps = weights.map{|w| w.to_f / weights.reduce(:+)}
useful_pickups = pickups_by_usefulness.keys
weighted_useful_pickups = useful_pickups.zip(ps).to_h
pickup_global_id = weighted_useful_pickups.max_by{|_, weight| rng.rand ** (1.0 / weight)}.first
weighted_useful_pickups_names = weighted_useful_pickups.map do |global_id, weight|
"%.2f %s" % [weight, checker.defs.invert[global_id]]
end
#puts "Weighted less useful pickups: [" + weighted_useful_pickups_names.join(", ") + "]"
elsif pickups_by_locations.any? && checker.game_beatable?
# The player can access all locations.
# So we just randomly place one progression pickup.
if !on_leftovers
spoiler_log.puts "Placing leftover progression pickups:"
on_leftovers = true
end
pickup_global_id = pickups_by_locations.keys.sample(random: rng)
elsif pickups_by_locations.any?
# No locations can access new areas, but the game isn't beatable yet.
# This means any new areas will need at least two new items to access.
# So just place a random pickup for now.
valid_pickups = pickups_by_locations.keys
if GAME == "ooe" && options[:randomize_villagers]
valid_villagers = valid_pickups & RANDOMIZABLE_VILLAGER_NAMES
if checker.albus_fight_accessible?
if valid_villagers.any?
# Once Albus is accessible, prioritize placing villagers over other pickups.
valid_pickups = valid_villagers
end
else
# Don't start placing villagers until Albus is accessible.
valid_pickups -= RANDOMIZABLE_VILLAGER_NAMES
end
if valid_pickups.empty?
# But if the only things left to place are villagers, we have no choice but to place them before Albus is accessible.
valid_pickups = pickups_by_locations.keys
end
elsif GAME == "dos" && room_rando? && accessible_rooms.include?("00-06-00")
# Player has access to the Subterranean Hell room with the huge spikes.
# To get through this room you need either rahab and bone ark or rahab, puppet master, and skeleton ape.
# The logic can have trouble placing the items necessary to get through this room, since skeleton ape and bone ark are useless everywhere else, and rahab is only useful in a handful of rooms - so if the player doesn't have access to any places that make rahab useful by itself, the randomizer might just try to place every other item, filling up all available item locations, and never place rahab.
# So we add a special case here to 100% guaranteed place rahab (assuming the player has access to under 15 item locations). From there the randomizer can figure out that it should place bone ark or puppet master and skeleton ape.
if valid_pickups.include?(0x145) && possible_locations.length < 15
valid_pickups = [0x145] # Rahab
end
end
pickup_global_id = valid_pickups.sample(random: rng)
placing_currently_useless_pickup = true
puts "Placing currently useless pickup." if verbose
else
# All progression pickups placed.
break
end
pickup_name = checker.defs.invert[pickup_global_id].to_s
puts "Trying to place #{pickup_name}" if verbose
if !options[:randomize_boss_souls]
# If randomize boss souls option is off, don't allow putting random things in these locations.
accessible_unused_boss_locations = possible_locations & checker.enemy_locations
accessible_unused_boss_locations.each do |location|
possible_locations.delete(location)
@locations_randomized_to_have_useful_pickups << location
# Also, give the player what this boss drops so the checker takes this into account.
pickup_global_id = get_entity_skill_drop_by_entity_location(location)
checker.add_item(pickup_global_id) unless pickup_global_id.nil?
end
next if accessible_unused_boss_locations.length > 0
end
if !options[:randomize_villagers] && GAME == "ooe"
# If randomize villagers option is off, don't allow putting random things in these locations.
accessible_unused_villager_locations = possible_locations & checker.villager_locations
accessible_unused_villager_locations.each do |location|
possible_locations.delete(location)
@locations_randomized_to_have_useful_pickups << location
# Also, give the player this villager so the checker takes this into account.
villager_name = get_villager_name_by_entity_location(location)
checker.add_item(villager_name)
end
next if accessible_unused_villager_locations.length > 0
end
if !options[:randomize_portraits] && GAME == "por"
# If randomize portraits option is off, don't allow putting random things in these locations.
accessible_unused_portrait_locations = possible_locations & checker.portrait_locations
accessible_unused_portrait_locations -= @portrait_locations_to_remove # Don't count removed portraits in short mode as portrait locations.
accessible_unused_portrait_locations.each do |location|
possible_locations.delete(location)
@locations_randomized_to_have_useful_pickups << location
# Also, give the player this portrait so the checker takes this into account.
portrait_name = get_portrait_name_by_entity_location(location)
checker.add_item(portrait_name)
end
next if accessible_unused_portrait_locations.length > 0
end
new_possible_locations = possible_locations - previous_accessible_locations.flatten
filtered_new_possible_locations = filter_locations_valid_for_pickup(new_possible_locations, pickup_global_id)
puts "Filtered new possible locations: #{filtered_new_possible_locations.size}" if verbose
puts " " + filtered_new_possible_locations.join(", ") if verbose
valid_previous_accessible_regions = previous_accessible_locations.map do |previous_accessible_region|
possible_locations = previous_accessible_region.dup
possible_locations -= @locations_randomized_to_have_useful_pickups
possible_locations = filter_locations_valid_for_pickup(possible_locations, pickup_global_id)
possible_locations = nil if possible_locations.empty?
possible_locations
end.compact
possible_locations_to_choose_from = filtered_new_possible_locations.dup
if placing_currently_useless_pickup
# Place items that don't immediately open up new areas anywhere in the game, with no weighting towards later areas.
valid_accessible_locations = previous_accessible_locations.map do |previous_accessible_region|
possible_locations = previous_accessible_region.dup
possible_locations -= @locations_randomized_to_have_useful_pickups
possible_locations = filter_locations_valid_for_pickup(possible_locations, pickup_global_id)
possible_locations = nil if possible_locations.empty?
possible_locations
end.compact.flatten
valid_accessible_locations += filtered_new_possible_locations
possible_locations_to_choose_from = valid_accessible_locations
elsif filtered_new_possible_locations.empty? && valid_previous_accessible_regions.any?
# No new locations, so select an old location.
if on_leftovers
# Just placing a leftover progression pickup.
# Weighted to be more likely to select locations you got access to later rather than earlier.
i = 1
weights = valid_previous_accessible_regions.map do |region|
# Weight later accessible regions as more likely than earlier accessible regions (exponential)
weight = i**2
i += 1
weight
end
ps = weights.map{|w| w.to_f / weights.reduce(:+)}
weighted_accessible_regions = valid_previous_accessible_regions.zip(ps).to_h
previous_accessible_region = weighted_accessible_regions.max_by{|_, weight| rng.rand ** (1.0 / weight)}.first
possible_locations_to_choose_from = previous_accessible_region
else
# Placing a main route progression pickup, just not one that immediately opens up new areas.
# Always place in the most recent accessible region.
possible_locations_to_choose_from = valid_previous_accessible_regions.last
puts "No new locations, using previous accessible location, total available: #{valid_previous_accessible_regions.last.size}" if verbose
end
elsif filtered_new_possible_locations.empty? && valid_previous_accessible_regions.empty?
# No new locations, but there's no old locations either.
if @locations_randomized_to_have_useful_pickups.size < 2
# If we're still very early in placing items yet there's no accessible spots, then the room/map randomizer must have resulted in a bad start.
# So we place the this progression item in the starting room.
entity = @starting_room.add_new_entity()
entity.x_pos = @starting_x_pos
entity.y_pos = @starting_y_pos
@coll = RoomCollision.new(@starting_room, game.fs)
floor_y = coll.get_floor_y(entity, allow_jumpthrough: true)
entity.y_pos = floor_y - 0x18
location = "#{@starting_room.room_str}_%02X" % (@starting_room.entities.length-1)
possible_locations_to_choose_from = [location]
else
possible_locations_to_choose_from = []
end
elsif filtered_new_possible_locations.size <= 5 && valid_previous_accessible_regions.last && valid_previous_accessible_regions.last.size >= 15
# There aren't many new locations unlocked by the last item we placed.
# But there are a lot of other locations unlocked by the one we placed before that.
# So we give it a chance to put it in one of those last spots, instead of the new spots.
# The chance is proportional to how few new locations there are. 1 = 70%, 2 = 60%, 3 = 50%, 4 = 40%, 5 = 30%.
chance = 0.30 + (5-filtered_new_possible_locations.size)*10
if rng.rand() <= chance
possible_locations_to_choose_from = valid_previous_accessible_regions.last
puts "Not many new locations, using previous accessible location, total available: #{valid_previous_accessible_regions.last.size}" if verbose
end
end
previous_accessible_locations << new_possible_locations
if possible_locations_to_choose_from.empty?
item_names = checker.current_items.map do |global_id|
checker.defs.invert[global_id]
end.compact
raise "Bug: Failed to find any spots to place pickup.\nSeed: #{@seed}\n\nItems:\n#{item_names.join(", ")}"
end
#puts "Possible locations: #{possible_locations_to_choose_from.join(", ")}" if verbose
location = possible_locations_to_choose_from.sample(random: rng)
@locations_randomized_to_have_useful_pickups << location
spoiler_str = get_item_placement_spoiler_string(location, pickup_global_id)
spoiler_log.puts spoiler_str
puts spoiler_str if verbose
change_entity_location_to_pickup_global_id(location, pickup_global_id)
checker.add_item(pickup_global_id)
progression_pickups_placed += 1
yield(progression_pickups_placed)
end
if room_rando? && false
File.open("accessible_doors.txt", "w") do |f|
accessible_doors.each do |accessible_door|
f.puts accessible_door
end
end
end
spoiler_log.puts "All progression pickups placed successfully."
end
def pick_starting_portrait_location_in_castle
if GAME != "por" || !options[:randomize_starting_room] || !options[:randomize_portraits]
raise "Cannot choose random location for starting portrait with these settings"
end
starting_portrait_name = AREA_INDEX_TO_PORTRAIT_NAME[@starting_room.area_index]
if starting_portrait_name.nil?
raise "Starting area is not in a portrait"
end
# The starting room randomizer started the player in a portrait.
# This is problematic because the portrait randomizer will traditionally never place a portrait back to Dracula's castle, making it inaccessible.
# So we need to place the starting portrait at a random location in Dracula's Castle and register it with the logic.
# First pick a random valid location.
possible_portrait_locations = checker.all_locations.keys
possible_portrait_locations = filter_locations_valid_for_pickup(possible_portrait_locations, starting_portrait_name)
unused_room_strs = @unused_rooms.map{|room| room.room_str}
possible_portrait_locations.reject! do |location|
room_str = location[0,8]
unused_room_strs.include?(room_str)
end
possible_portrait_locations.select! do |location|
area_index = location[0,2].to_i(16)
area_index == 0
end
starting_portrait_location_in_castle = possible_portrait_locations.sample(random: rng)
return starting_portrait_location_in_castle
end
def get_primary_return_portrait_for_portrait(portrait_name)
portrait_data = PORTRAIT_NAME_TO_DATA[portrait_name]
dest_area_index = portrait_data[:area_index]
dest_sector_index = portrait_data[:sector_index]
dest_room_index = portrait_data[:room_index]
dest_room = game.areas[dest_area_index].sectors[dest_sector_index].rooms[dest_room_index]
return_portrait = dest_room.entities.find do |entity|
entity.is_special_object? && [0x1A, 0x76, 0x86, 0x87].include?(entity.subtype)
end
return return_portrait
end
def get_item_placement_spoiler_string(location, pickup_global_id)
if RANDOMIZABLE_VILLAGER_NAMES.include?(pickup_global_id)
# Villager
# Remove the word villager and capitalize the name.
pickup_str = pickup_global_id[8..-1].capitalize
elsif PORTRAIT_NAMES.include?(pickup_global_id)
# Portrait
# Remove the word portrait and capitalize the name.
pickup_str = pickup_global_id[8..-1].capitalize
else
pickup_str = checker.defs.invert[pickup_global_id].to_s
pickup_str = pickup_str.tr("_", " ").split.map(&:capitalize).join(" ")
end
location =~ /^(\h\h)-(\h\h)-(\h\h)_(\h+)$/
area_index, sector_index, room_index, entity_index = $1.to_i(16), $2.to_i(16), $3.to_i(16), $4.to_i(16)
if SECTOR_INDEX_TO_SECTOR_NAME[area_index]
area_name = SECTOR_INDEX_TO_SECTOR_NAME[area_index][sector_index]
if area_name == "Condemned Tower & Mine of Judgment"
if MapRandomizer::CONDEMNED_TOWER_ROOM_INDEXES.include?(room_index)
area_name = "Condemned Tower"
else
area_name = "Mine of Judgment"
end
end
else
area_name = AREA_INDEX_TO_AREA_NAME[area_index]
end
is_enemy_str = checker.enemy_locations.include?(location) ? " (Boss)" : ""
is_event_str = checker.event_locations.include?(location) ? " (Event)" : ""
is_easter_egg_str = checker.easter_egg_locations.include?(location) ? " (Easter Egg)" : ""
is_hidden_str = checker.hidden_locations.include?(location) ? " (Hidden)" : ""
is_mirror_str = checker.mirror_locations.include?(location) ? " (Mirror)" : ""
location_str = "#{area_name} (#{location})#{is_enemy_str}#{is_event_str}#{is_easter_egg_str}#{is_hidden_str}#{is_mirror_str}"
spoiler_str = " %-18s %s" % [pickup_str+":", location_str]
return spoiler_str
end
def output_map_rando_error_debug_info
return unless options[:randomize_maps]
# When debugging logic errors in map rando, output a list of what room strings were accessible at the end.
File.open("./logs/accessed rooms debug #{GAME} #{seed}.txt", "w") do |f|
for room_str in @rooms_by_progression_order_accessed.flatten.uniq
f.puts(room_str)
end
end
# And also output an image of the map with accessible rooms highlighted in red.
unique_rooms_accessed = @rooms_by_progression_order_accessed.flatten.uniq
game.areas.each_index do |area_index|
map = game.get_map(area_index, 0)
for tile in map.tiles
if tile.sector_index.nil? || tile.room_index.nil?
next
end
room_str_for_tile = "%02X-%02X-%02X" % [area_index, tile.sector_index, tile.room_index]
if unique_rooms_accessed.include?(room_str_for_tile)
tile.is_save = true
tile.is_warp = false
tile.is_entrance = false
else
tile.is_save = false
tile.is_warp = false
tile.is_entrance = false
end
end
hardcoded_transition_rooms = (GAME == "dos" ? @transition_rooms : [])
filename = "./logs/map debug #{GAME} area %02X #{seed}.png" % area_index
renderer.render_map(map, scale=3, hardcoded_transition_rooms=hardcoded_transition_rooms).save(filename)
end
end
def place_non_progression_pickups
remaining_locations = checker.get_accessible_locations() - @locations_randomized_to_have_useful_pickups
remaining_locations.shuffle!(random: rng)
# In room rando, some items may be unreachable.
# We don't want the player to see these items in a different subroom and think the randomizer is bugged, so we delete them.
inaccessible_remaining_locations = checker.all_locations.keys - @locations_randomized_to_have_useful_pickups - remaining_locations
remove_inaccessible_items(inaccessible_remaining_locations)
if GAME == "ooe"
# Do event glyphs first. This is so they don't reuse a glyph already used by a glyph statue.
# If the player got the one from the glyph statue first then the one in the event/puzzle wouldn't appear, breaking the event/puzzle.
ooe_event_glyph_locations = remaining_locations.select{|location| checker.event_locations.include?(location)}
ooe_event_glyph_locations.each do |location|
pickup_global_id = get_unplaced_non_progression_skill()
change_entity_location_to_pickup_global_id(location, pickup_global_id)
end
remaining_locations -= ooe_event_glyph_locations
end
chaos_ring_placed = false
remaining_locations.each_with_index do |location, i|
if checker.enemy_locations.include?(location)
# Boss
pickup_global_id = get_unplaced_non_progression_skill()
elsif ["dos", "por"].include?(GAME) && (checker.event_locations.include?(location) || checker.easter_egg_locations.include?(location))
# Event item
pickup_global_id = get_unplaced_non_progression_item()
elsif GAME == "ooe" && location == "08-02-06_01"
# Tin man's strength ring blue chest. Can't be a glyph.
pickup_global_id = get_unplaced_non_progression_item_that_can_be_an_arm_shifted_immediate()
elsif GAME == "dos" && checker.mirror_locations.include?(location)
# Soul candles shouldn't be placed in mirrors, as they will appear even outside the mirror.
pickup_global_id = get_unplaced_non_progression_item()
elsif GAME == "dos" && !chaos_ring_placed
pickup_global_id = 0xCD
chaos_ring_placed = true
elsif GAME == "por" && !chaos_ring_placed
pickup_global_id = 0x12C
chaos_ring_placed = true
else
# Pickup
# Select the type of pickup weighed by difficulty options.
weights = {
money: @difficulty_settings[:money_placement_weight],
item: @difficulty_settings[:item_placement_weight],
}
if GAME == "por" || GAME == "ooe"
weights[:max_up] = @difficulty_settings[:max_up_placement_weight]
end
case GAME
when "dos"
weights[:skill] = @difficulty_settings[:soul_candle_placement_weight]
when "por"
weights[:skill] = @difficulty_settings[:por_skill_placement_weight]
when "ooe"
weights[:skill] = @difficulty_settings[:glyph_placement_weight]
end
weighted_pickup_types = {}
weights_sum = weights.values.reduce(:+)
weights.each do |type, weight|
weighted_pickup_types[type] = weight.to_f / weights_sum
end
random_pickup_type = weighted_pickup_types.max_by{|_, weight| rng.rand ** (1.0 / weight)}.first
case random_pickup_type
when :money
pickup_global_id = :money
when :max_up
pickup_global_id = @max_up_items.sample(random: rng)
when :skill
pickup_global_id = get_unplaced_non_progression_skill()
when :item
if checker.hidden_locations.include?(location)
# Don't let relics be inside breakable walls in OoE.
# This is because they need to be inside a chest, and chests can't be hidden.
pickup_global_id = get_unplaced_non_progression_item_except_ooe_relics()
else
pickup_global_id = get_unplaced_non_progression_item()
end
end
end
if all_non_progression_pickups.include?(pickup_global_id)
@used_non_progression_pickups << pickup_global_id
end
change_entity_location_to_pickup_global_id(location, pickup_global_id)
end
end
def initialize_all_non_progression_pickups
if !@all_non_progression_pickups.nil?
raise "all_non_progression_pickups was initialized too early."
end
@all_non_progression_pickups = begin
all_non_progression_pickups = PICKUP_GLOBAL_ID_RANGE.to_a - checker.all_progression_pickups
all_non_progression_pickups -= NONRANDOMIZABLE_PICKUP_GLOBAL_IDS
all_non_progression_pickups -= @max_up_items
if needs_infinite_magical_tickets?
all_non_progression_pickups -= [MAGICAL_TICKET_GLOBAL_ID]
end
all_non_progression_pickups
end
end
def filter_locations_valid_for_pickup(locations, pickup_global_id)
locations = locations.dup
if ITEM_GLOBAL_ID_RANGE.include?(pickup_global_id)
# If the pickup is an item instead of a skill, don't let bosses drop it.
locations -= checker.enemy_locations
end
# Don't let progression items be in certain problematic locations. (This function is only called for progression items.)
locations -= checker.no_progression_locations
if GAME == "dos" && SKILL_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Don't let events give you souls in DoS.
locations -= checker.event_locations
locations -= checker.easter_egg_locations
# Don't let soul candles be inside mirrors. They don't get hidden, and are accessible without Paranoia.
locations -= checker.mirror_locations
# Don't let soul candles be inside specific locations that can be broken without reaching them.
locations -= checker.no_soul_locations
end
if GAME == "dos" && MAGIC_SEAL_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Magic seals can't be given by easter egg locations.
locations -= checker.easter_egg_locations
end
if GAME == "ooe" && ITEM_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Don't let events give you items in OoE.
locations -= checker.event_locations
end
if GAME == "ooe" && !ITEM_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Glyphs/villagers can't be in the special blue chest spawned by the searchlights when you kill a Tin Man.
locations -= ["08-02-06_01"]
end
if GAME == "ooe" && (!pickup_global_id.is_a?(Integer) || !game.fs.check_integer_can_be_an_arm_shifted_immediate?(pickup_global_id))
# The pickup ID is a hardcoded arm shifted immediate for the special blue chest spawned by the searchlights when you kill a Tin Man.
locations -= ["08-02-06_01"]
end
if GAME == "ooe" && (0x6F..0x74).include?(pickup_global_id)
# Don't let relics be inside breakable walls in OoE.
# This is because they need to be inside a chest, and chests can't be hidden.
locations -= checker.hidden_locations
end
if RANDOMIZABLE_VILLAGER_NAMES.include?(pickup_global_id)
# Villagers can't be hidden, an event glyph, or a boss drop.
locations -= checker.hidden_locations
locations -= checker.event_locations
locations -= checker.enemy_locations
# Villagers can't appear in Dracula's Castle since the castle can't be unlocked until you have all villagers.
locations.reject! do |location|
area_index = location[0,2].to_i(16)
area_index == 0
end
# Locations too close to the top of the room shouldn't be villagers, as the Torpor glyph would spawn above the screen and not be absorbable.
locations_too_high_to_be_a_villager = ["00-05-07_01", "00-05-07_02", "00-05-08_02", "00-05-08_03", "00-05-0C_01", "00-06-09_00", "0D-00-04_00", "0D-00-0C_00"]
locations -= locations_too_high_to_be_a_villager
# Two villagers shouldn't be placed in the same room, or their events will conflict and not work correctly.
locations.reject! do |location|
room_str = location[0,8]
@rooms_that_already_have_an_event.include?(room_str)
end
end
if PORTRAIT_NAMES.include?(pickup_global_id)
bad_portrait_locations = [
"05-02-0C_01", # Legion's room. If a portrait gets placed here the player won't be able to activate Legion because using a portrait doesn't set the pickup flag Legion checks.
"05-01-13_00", # This location overlaps a ring of flaming skulls that would damage the player on return.
"06-01-0D_02", # This location overlaps a ring of flaming skulls that would damage the player on return.
"03-00-12_00", # Enemies overlap this location.
"04-00-12_00", # Enemies overlap this location.
]
locations.select! do |location|
!bad_portrait_locations.include?(location)
end
if !room_rando? && pickup_global_id != :portraitnestofevil
# This is the location where Nest of Evil was in vanilla.
# If room rando is off, you need to do the quest with the map percentages to unlock this location.
# That quest requires you to be able to access the other 8 portraits, so we can't allow any of them to be placed here.
locations -= ["00-00-05_00"]
end
end
if GAME == "ooe" && SKILL_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Don't put progression glyph in certain locations where the player could easily get them early.
locations -= checker.no_glyph_locations
end
locations
end
def get_unplaced_non_progression_pickup(valid_ids: PICKUP_GLOBAL_ID_RANGE.to_a)
valid_possible_items = @unplaced_non_progression_pickups.select do |pickup_global_id|
valid_ids.include?(pickup_global_id)
end
pickup_global_id = valid_possible_items.sample(random: rng)
if pickup_global_id.nil?
# Ran out of unplaced pickups, so place a duplicate instead.
@unplaced_non_progression_pickups += all_non_progression_pickups().select do |pickup_global_id|
valid_ids.include?(pickup_global_id)
end
@unplaced_non_progression_pickups -= checker.current_items
# If a glyph has already been placed as an event glyph, do not place it again somewhere.
# If the player gets one from a glyph statue first, then the one in the event/puzzle won't appear.
@unplaced_non_progression_pickups -= @glyphs_placed_as_event_glyphs
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
@unplaced_non_progression_pickups.delete(pickup_global_id)
@used_non_progression_pickups << pickup_global_id
return pickup_global_id
end
def get_unplaced_non_progression_item
return get_unplaced_non_progression_pickup(valid_ids: ITEM_GLOBAL_ID_RANGE.to_a)
end
def get_unplaced_non_progression_item_that_can_be_an_arm_shifted_immediate
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a
valid_ids.select!{|item_id| game.fs.check_integer_can_be_an_arm_shifted_immediate?(item_id+1)}
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_skill
return get_unplaced_non_progression_pickup(valid_ids: SKILL_GLOBAL_ID_RANGE.to_a)
end
def get_unplaced_non_progression_item_except_ooe_relics
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a
if GAME == "ooe"
valid_ids -= (0x6F..0x74).to_a
end
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_projectile_glyph
projectile_glyph_ids = (0x16..0x18).to_a + (0x1C..0x32).to_a + (0x34..0x36).to_a
return get_unplaced_non_progression_pickup(valid_ids: projectile_glyph_ids)
end
def get_unplaced_non_progression_pickup_for_enemy_drop
valid_ids = PICKUP_GLOBAL_ID_RANGE.to_a - ITEMS_WITH_OP_HARDCODED_EFFECT
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_item_for_enemy_drop
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a - ITEMS_WITH_OP_HARDCODED_EFFECT
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_item_except_ooe_relics_for_enemy_drop
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a - ITEMS_WITH_OP_HARDCODED_EFFECT
if GAME == "ooe"
valid_ids -= (0x6F..0x74).to_a
end
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_entity_by_location_str(location)
location =~ /^(\h\h)-(\h\h)-(\h\h)_(\h+)$/
area_index, sector_index, room_index, entity_index = $1.to_i(16), $2.to_i(16), $3.to_i(16), $4.to_i(16)
room = game.areas[area_index].sectors[sector_index].rooms[room_index]
entity = room.entities[entity_index]
return entity
end
def change_entity_location_to_pickup_global_id(location, pickup_global_id)
entity = get_entity_by_location_str(location)
if checker.event_locations.include?(location) || checker.easter_egg_locations.include?(location)
# Event with a hardcoded item/glyph.
change_hardcoded_event_pickup(entity, pickup_global_id)
return
end
if GAME == "ooe" && location == "08-02-06_01" # Strength Ring blue chest spawned by the searchlights after you kill the Tin Man
if entity.var_a != 2
raise "Searchlights are not of type 2 (Tin Man spawn)"
end
game.fs.replace_arm_shifted_immediate_integer(0x022A194C, pickup_global_id+1)
elsif RANDOMIZABLE_VILLAGER_NAMES.include?(pickup_global_id)
# Villager
if GAME != "ooe"
raise "Tried to place villager in #{GAME}"
end
room_str = location[0,8]
@rooms_that_already_have_an_event << room_str
entity.type = 2
entity.subtype = 0x89
entity.var_a = VILLAGER_NAME_TO_EVENT_FLAG[pickup_global_id]
entity.var_b = 0
entity.write_to_rom()
if pickup_global_id == :villageranna
# Anna must have Tom in her room, or her event will crash the game.
room = entity.room
cat = Entity.new(room, room.fs)
cat.x_pos = entity.x_pos
cat.y_pos = entity.y_pos
cat.type = 2
cat.subtype = 0x3F
cat.var_a = 3
cat.var_b = 1
room.entities << cat
room.write_entities_to_rom()
# Remove the Tom in Anna's original room since he's not needed there.
original_cat = game.areas[7].sectors[0].rooms[6].entities[2]
original_cat.type = 0
original_cat.write_to_rom()
end
elsif PORTRAIT_NAMES.include?(pickup_global_id)
# Portrait
if GAME != "por"
raise "Tried to place portrait in #{GAME}"
end
portrait_data = PORTRAIT_NAME_TO_DATA[pickup_global_id]
entity.type = SPECIAL_OBJECT_ENTITY_TYPE
entity.subtype = portrait_data[:subtype]
entity.var_a = portrait_data[:var_a]
entity.var_b = portrait_data[:var_b]
# Move the portrait to a short distance above the closest floor so it looks good and is enterable.
coll = RoomCollision.new(entity.room, game.fs)
floor_y = coll.get_floor_y(entity, allow_jumpthrough: true)
entity_original_y_pos = entity.y_pos
entity.y_pos = floor_y - 0x50 # Portraits should float 5 tiles off the ground.
entity.write_to_rom()
curr_area_index = entity.room.area_index
curr_sector_index = entity.room.sector_index
curr_room_index = entity.room.room_index
# Find the return portrait.
dest_area_index = portrait_data[:area_index]
dest_sector_index = portrait_data[:sector_index]
dest_room_index = portrait_data[:room_index]
dest_room = game.areas[dest_area_index].sectors[dest_sector_index].rooms[dest_room_index]
dest_portrait = dest_room.entities.find{|entity| entity.is_special_object? && [0x1A, 0x76, 0x86, 0x87].include?(entity.subtype)}
return_portraits = [dest_portrait]
# Update the list of x/y positions the player returns at in the por_distinct_return_portrait_positions patch.
return_x = entity.x_pos
return_y = floor_y
game.fs.write(0x02309010+dest_area_index*4, [return_x, return_y].pack("vv"))
# If there's a small breakable wall containing this portrait we remove it.
# Not only does the breakable wall not hide the portrait, but when the player returns they would be put out of bounds by it.
breakable_wall_x_range = (entity.x_pos-8..entity.x_pos+8)
breakable_wall_y_range = (entity_original_y_pos-8..entity_original_y_pos+8)
breakable_wall_entity = entity.room.entities.find do |e|
e.is_special_object? && e.subtype == 0x3B && breakable_wall_x_range.include?(e.x_pos) && breakable_wall_y_range.include?(e.y_pos)
end
if breakable_wall_entity
breakable_wall_entity.type = 0
breakable_wall_entity.write_to_rom()
end
# Also update the bonus return portrait at the end of some areas.
case dest_area_index
when 2 # 13th Street
return_portraits << game.entity_by_str("02-02-16_02")
when 4 # Forgotten City
return_portraits << game.entity_by_str("04-01-07_01")
when 6 # Burnt Paradise
return_portraits << game.entity_by_str("06-00-06_03")
when 8 # Dark Academy
return_portraits << game.entity_by_str("08-00-08_04")
end
return_portraits.each do |return_portrait|
return_portrait.var_a = curr_area_index
return_portrait.var_b = ((curr_sector_index & 0xF) << 6) | (curr_room_index & 0x3F)
return_portrait.subtype = case curr_area_index
when 1, 3, 5, 7 # City of Haze, Sandy Grave, Nation of Fools, or Forest of Doom.
0x1A
when 2, 4, 6, 8 # 13th Street, Forgotten City, Burnt Paradise, or Dark Academy.
0x76
when 0, 9 # Dracula's Castle or Nest of Evil.
if [2, 4, 6, 8].include?(dest_area_index)
# Use the alt portrait frame when returning to Dracula's Castle from 13th Street, Forgotten City, Burnt Paradise, or Dark Academy.
0x87
else
0x86
end
else
puts "Unknown area to portrait into: %02X" % curr_area_index
end
# Set highest bit of var B to indicate that this is a return portrait to the por_distinct_return_portrait_positions patch.
return_portrait.var_b = 0x8000 | return_portrait.var_b
return_portrait.write_to_rom()
if room_rando?
# Tell the room rando logic about this return portrait.
checker.add_return_portrait(return_portrait.room.room_str, location)
end
end
if dest_area_index == 7 # Forest of Doom
# Remove the event from the original Forest of Doom portrait room since the portrait is no longer there.
forest_event = game.entity_by_str("00-08-01_03")
forest_event.type = 0
forest_event.write_to_rom()
end
elsif entity.type == 1
# Boss
item_type, item_index = game.get_item_type_and_index_by_global_id(pickup_global_id)
if !PICKUP_SUBTYPES_FOR_SKILLS.include?(item_type)
raise "Can't make boss drop required item"
end
if GAME == "dos" && entity.room.sector_index == 9 && entity.room.room_index == 1
# Aguni. He's not placed in the room so we hardcode him.
enemy_dna = game.enemy_dnas[0x70]
else
enemy_dna = game.enemy_dnas[entity.subtype]
end
case GAME
when "dos"
enemy_dna["Soul"] = item_index
when "ooe"
enemy_dna["Glyph"] = pickup_global_id + 1
else
raise "Boss soul randomizer is bugged for #{LONG_GAME_NAME}."
end
enemy_dna.write_to_rom()
elsif GAME == "dos" || GAME == "por"
if GAME == "por" && location == "05-02-0C_01"
# Cog's location. We always make this location use pickup flag 0x10 since Legion is hardcoded to check that flag, not whether you own the cog.
pickup_flag = 0x10
is_cog = true
else
pickup_flag = get_unused_pickup_flag_for_entity(entity)
is_cog = false
end
if pickup_global_id == :money
if entity.is_hidden_pickup? || is_cog || rng.rand <= 0.80
# 80% chance to be a money bag
# Hidden pickups have to be a bag since chests can't be hidden in a wall.
# The cog location has to be a bag since chests can't have a pickup flag so they wouldn't be able to activate legion.
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = 1
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = rng.rand(4..6) # 500G, 1000G, 2000G
else
# 20% chance to be a money chest
entity.type = 2
entity.subtype = 1
if GAME == "dos"
entity.var_a = 0x10
else
entity.var_a = [0xE, 0xF, 0x12].sample(random: rng)
end
# We didn't use the pickup flag, so put it back
@unused_pickup_flags << pickup_flag
end
entity.write_to_rom()
return
end
# Make sure Chaos/Magus Ring isn't easily available.
if GAME == "dos" && pickup_global_id == 0xCD # Chaos Ring
entity.type = 2
entity.subtype = 0x4C # All-souls-owned item
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
entity.write_to_rom()
return
elsif GAME == "por" && pickup_global_id == 0x12C # Magus Ring
entity.type = 6 # All-quests-complete item
entity.subtype = 7
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = 6
entity.write_to_rom()
return
end
item_type, item_index = game.get_item_type_and_index_by_global_id(pickup_global_id)
if PICKUP_SUBTYPES_FOR_SKILLS.include?(item_type)
case GAME
when "dos"
# Soul candle
entity.type = 2
entity.subtype = 1
entity.var_a = 0
entity.var_b = item_index
# We didn't use the pickup flag, so put it back
@unused_pickup_flags << pickup_flag
when "por"
# Skill
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = item_type
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = item_index
end
else
# Item
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = item_type
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = item_index
end
entity.write_to_rom()
elsif GAME == "ooe"
pickup_flag = get_unused_pickup_flag_for_entity(entity)
if entity.is_glyph? && !entity.is_hidden_pickup?
entity.y_pos += 0x20
end
if pickup_global_id == :money
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = 1
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = rng.rand(4..6) # 500G, 1000G, 2000G
entity.write_to_rom()
return
end
if (0x6F..0x74).include?(pickup_global_id)
# Relic. Must go in a chest, if you leave it lying on the ground it won't autoequip.
entity.type = 2
entity.subtype = 0x16
entity.var_a = pickup_global_id + 1
entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
entity.write_to_rom()
return
end
if pickup_global_id >= 0x6F
# Item
if entity.is_hidden_pickup?
entity.type = 7
entity.subtype = 0xFF
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
case rng.rand
when 0.00..0.70
# 70% chance for a red chest
entity.type = 2
entity.subtype = 0x16
entity.var_a = pickup_global_id + 1
entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
when 0.70..0.95
# 15% chance for an item on the ground
entity.type = 4
entity.subtype = 0xFF
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
# 5% chance for a hidden blue chest
entity.type = 2
entity.subtype = 0x17
entity.var_a = pickup_global_id + 1
entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
end
end
else
# Glyph
if entity.is_hidden_pickup?
entity.type = 7
entity.subtype = 2
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
puzzle_glyph_ids = [0x1D, 0x1F, 0x20, 0x22, 0x24, 0x26, 0x27, 0x2A, 0x2B, 0x2F, 0x30, 0x31, 0x32, 0x46, 0x4E]
if puzzle_glyph_ids.include?(pickup_global_id)
# Free glyph
entity.type = 4
entity.subtype = 2
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
# Glyph statue
entity.type = 2
entity.subtype = 2
entity.var_a = 0
entity.var_b = pickup_global_id + 1
# We didn't use the pickup flag, so put it back
@unused_pickup_flags << pickup_flag
end
end
end
if entity.is_glyph? && !entity.is_hidden_pickup?
entity.y_pos -= 0x20
end
entity.write_to_rom()
end
end
def remove_inaccessible_items(inaccessible_remaining_locations)
inaccessible_remaining_locations.each do |location|
entity = get_entity_by_location_str(location)
if checker.event_locations.include?(location) || entity.type == 1
# Don't delete inaccessible events/bosses, just in case.
next
end
entity.type = 0
entity.write_to_rom()
end
end
def get_unused_pickup_flag_for_entity(entity)
if entity.is_item_chest?
pickup_flag = entity.var_b
elsif entity.is_pickup?
pickup_flag = entity.var_a
elsif GAME == "dos" && entity.is_special_object? && entity.subtype == 0x4D # Easter egg item
pickup_flag = entity.var_b
elsif GAME == "dos" && entity.is_special_object? && entity.subtype == 0x4C # All-souls-obtained item
pickup_flag = entity.var_a
end
if GAME == "ooe" && (0..0x51).include?(pickup_flag)
# In OoE, these pickup flags are used by glyph statues automatically and we can't control those.
# Therefore we need to reassign pickups that were free glyphs in the original game a new pickup flag, so it doesn't conflict with where those glyphs (Rapidus Fio and Volaticus) got moved to when randomized.
pickup_flag = nil
end
if pickup_flag.nil? || @used_pickup_flags.include?(pickup_flag)
pickup_flag = @unused_pickup_flags.pop()
if pickup_flag.nil?
raise "No pickup flag for this item, this error shouldn't happen"
end
end
return pickup_flag
end
def get_unused_pickup_flag()
pickup_flag = @unused_pickup_flags.pop()
if pickup_flag.nil?
raise "No pickup flag for this item, this error shouldn't happen"
end
return pickup_flag
end
def use_pickup_flag(pickup_flag)
@used_pickup_flags << pickup_flag
@unused_pickup_flags -= @used_pickup_flags
end
def get_entity_skill_drop_by_entity_location(location)
entity = get_entity_by_location_str(location)
if entity.type != 1
raise "Not an enemy: #{location}"
end
if GAME == "dos" && entity.room.sector_index == 9 && entity.room.room_index == 1
# Aguni. He's not placed in the room so we hardcode him.
enemy_dna = game.enemy_dnas[0x70]
else
enemy_dna = game.enemy_dnas[entity.subtype]
end
case GAME
when "dos"
skill_local_id = enemy_dna["Soul"]
if skill_local_id == 0xFF
return nil
end
when "ooe"
skill_local_id = enemy_dna["Glyph"] - 1
else
raise "Boss soul randomizer is bugged for #{LONG_GAME_NAME}."
end
skill_global_id = skill_local_id + SKILL_GLOBAL_ID_RANGE.begin
return skill_global_id
end
def get_villager_name_by_entity_location(location)
entity = get_entity_by_location_str(location)
if GAME == "ooe" && entity.type == 2 && [0x89, 0x6D].include?(entity.subtype)
villager_name = VILLAGER_NAME_TO_EVENT_FLAG.invert[entity.var_a]
return villager_name
else
raise "Not a villager: #{location}"
end
end
def get_portrait_name_by_entity_location(location)
entity = get_entity_by_location_str(location)
if GAME == "por" && entity.is_special_object? && [0x1A, 0x76, 0x86, 0x87].include?(entity.subtype)
portrait_name = AREA_INDEX_TO_PORTRAIT_NAME[entity.var_a]
return portrait_name
else
raise "Not a portrait: #{location} #{entity.inspect}"
end
end
def change_hardcoded_event_pickup(event_entity, pickup_global_id)
case GAME
when "dos"
dos_change_hardcoded_event_pickup(event_entity, pickup_global_id)
when "por"
por_change_hardcoded_event_pickup(event_entity, pickup_global_id)
when "ooe"
ooe_change_hardcoded_event_pickup(event_entity, pickup_global_id)
end
end
def dos_change_hardcoded_event_pickup(event_entity, pickup_global_id)
event_entity.room.sector.load_necessary_overlay()
if event_entity.subtype == 0x65 # Mina's Talisman
item_type, item_index = game.get_item_type_and_index_by_global_id(pickup_global_id)
if MAGIC_SEAL_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Magic seal. These need to call a different function to be properly given.
seal_index = pickup_global_id - 0x3D
# Seal given when watching the event
game.fs.write(0x021CB9F4, [seal_index].pack("C"))
game.fs.write(0x021CB9FC, [0xEB006ECF].pack("V")) # Call func 021E7540
# Seal given when skipping the event
game.fs.write(0x021CBC14, [seal_index].pack("C"))
game.fs.write(0x021CBC1C, [0xEB006E47].pack("V")) # Call func 021E7540
else
# Regular item.
# Item given when watching the event
game.fs.write(0x021CB9F4, [item_type].pack("C"))
game.fs.write(0x021CB9F8, [item_index].pack("C"))
# Item given when skipping the event
game.fs.write(0x021CBC14, [item_type].pack("C"))
game.fs.write(0x021CBC18, [item_index].pack("C"))
end
# Item name shown in the corner of the screen when watching the event.
game.fs.write(0x021CBA08, [item_type].pack("C"))
game.fs.write(0x021CBA0C, [item_index].pack("C"))
# Also display the item's name in the corner when skipping the event.
# We add a few new lines of code in free space for this.
code = [0xE3A00000, 0xE3A010F0, 0xEBFDB6FD, 0xE1A00005, 0xEA042E64]
game.fs.write(0x020C027C, code.pack("V*"))
game.fs.write(0x020C027C, [pickup_global_id+1].pack("C"))
game.fs.write(0x021CBC20, [0xEAFBD195].pack("V"))
elsif event_entity.subtype == 0x4D # Easter egg item
# Change what item is actually placed into your inventory when you get the easter egg.
easter_egg_index = event_entity.var_a
game.fs.write(0x0222BE34 + easter_egg_index*0xC, [pickup_global_id+1].pack("v"))
# Update the pickup flag.
pickup_flag = get_unused_pickup_flag_for_entity(event_entity)
event_entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
# Make the easter egg special object use the same palette list as actual item icons, since that gives access to all 3 icon palettes, while the actual object's palette only has the first.
sprite_info = SpecialObjectType.new(0x4D, game.fs).extract_gfx_and_palette_and_sprite_from_create_code
item = game.items[pickup_global_id]
icon_palette_pointer = 0x022C4684
game.fs.write(0x021AF5CC, [icon_palette_pointer].pack("V"))
icon_palette_index = (item["Icon"] & 0xFF00) >> 8
sprite = sprite_info.sprite
sprite.frames[easter_egg_index].parts.first.palette_index = icon_palette_index
sprite.write_to_rom()
# Now update the actual item visual on the object's GFX page so it visually shows the correct item.
sprite_info = SpecialObjectType.new(0x4D, game.fs).extract_gfx_and_palette_and_sprite_from_create_code # We extract sprite info again to get the updated palette pointer after we changed it.
gfx = sprite_info.gfx_pages.first
palettes = renderer.generate_palettes(sprite_info.palette_pointer, 16)
chunky_image = renderer.render_gfx_page(gfx, palettes[icon_palette_index], gfx.canvas_width)
new_icon = renderer.render_icon_by_item(item)
x_offset = 16*easter_egg_index
y_offset = 0
chunky_image.replace!(new_icon, x_offset, y_offset)
renderer.save_gfx_page(chunky_image, gfx, sprite_info.palette_pointer, 16, icon_palette_index)
end
end
def por_change_hardcoded_event_pickup(event_entity, pickup_global_id)
event_entity.room.sector.load_necessary_overlay()
end
def ooe_change_hardcoded_event_pickup(event_entity, pickup_global_id)
event_entity.room.sector.load_necessary_overlay()
@glyphs_placed_as_event_glyphs << pickup_global_id
if event_entity.subtype == 0x8A # Magnes
# Get rid of the event, turn it into a normal free glyph
# We can't keep the event because it automatically equips Magnes even if the glyph it gives is not Magnes.
# Changing what it equips would just make the event not work right, so we may as well remove it.
pickup_flag = get_unused_pickup_flag()
event_entity.type = 4
event_entity.subtype = 2
event_entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
event_entity.var_b = pickup_global_id + 1
event_entity.x_pos = 0x80
event_entity.y_pos = 0x2B0
event_entity.write_to_rom()
elsif event_entity.subtype == 0x69 # Dominus Hatred
game.fs.write(0x02230A7C, [pickup_global_id+1].pack("C"))
game.fs.write(0x022C25D8, [pickup_global_id+1].pack("C"))
elsif event_entity.subtype == 0x6F # Dominus Anger
game.fs.write(0x02230A84, [pickup_global_id+1].pack("C"))
game.fs.write(0x022C25DC, [pickup_global_id+1].pack("C"))
elsif event_entity.subtype == 0x81 # Cerberus
# Get rid of the event, turn it into a normal free glyph
# We can't keep the event because it has special programming to always spawn them in order even if you get to the locations out of order.
pickup_flag = get_unused_pickup_flag()
event_entity.type = 4
event_entity.subtype = 2
event_entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
event_entity.var_b = pickup_global_id + 1
event_entity.x_pos = 0x80
event_entity.y_pos = 0x60
event_entity.write_to_rom()
other_cerberus_events = event_entity.room.entities.select{|e| e.is_special_object? && [0x82, 0x83].include?(e.subtype)}
other_cerberus_events.each do |event|
# Delete these others, we don't want the events.
event.type = 0
event.write_to_rom()
end
else
glyph_id_location, pickup_flag_read_location, pickup_flag_write_location, second_pickup_flag_read_location = case event_entity.subtype
when 0x2F # Luminatio
[0x022C4894, 0x022C483C, 0x022C4888]
when 0x3B # Pneuma
[0x022C28E8, 0x022C2880, 0x022C28DC, 0x022C279C]
when 0x44 # Lapiste
[0x022C2CB0, 0x022C2C24, 0x022C2CA0]
when 0x54 # Vol Umbra
[0x022C2FBC, 0x022C2F70, 0x022C2FB4]
when 0x4C # Vol Fulgur
[0x022C2490, 0x022C2404, 0x022C2480]
when 0x52 # Vol Ignis
[0x0221F1A0, 0x0221F148, 0x0221F194]
when 0x47 # Vol Grando
[0x022C230C, 0x022C2584, 0x022C22FC]
when 0x40 # Cubus
[0x022C31DC]
when 0x53 # Morbus
[0x022C2354, 0x022C2318, 0x022C2344]
when 0x76 # Dominus Agony
[0x022C25BC]
else
return
end
# What glyph is actually spawned.
game.fs.write(glyph_id_location, [pickup_global_id+1].pack("C"))
if pickup_flag_write_location
# The pickup flag set when you absorb the glyph.
pickup_flag = pickup_global_id+2
game.fs.write(pickup_flag_write_location, [pickup_flag].pack("C"))
end
if pickup_flag_read_location
# The pickup flag read to decide whether you've completed this puzzle yet or not.
# This is determined by two lines of code:
# The first loads the word in the bitfield containing the correct bit (0x20 bits in each word):
pickup_flag_word_offset = 0x40 + 4*(pickup_flag/0x20)
game.fs.write(pickup_flag_read_location, [pickup_flag_word_offset].pack("C"))
game.fs.write(second_pickup_flag_read_location, [pickup_flag_word_offset].pack("C")) if second_pickup_flag_read_location
# The second does a tst on the exact bit within that word:
pickup_flag_bit_index = pickup_flag % 0x20
game.fs.replace_hardcoded_bit_constant(pickup_flag_read_location+4, pickup_flag_bit_index)
game.fs.replace_hardcoded_bit_constant(second_pickup_flag_read_location+4, pickup_flag_bit_index) if second_pickup_flag_read_location
end
end
end
end
|
adding watchr config file watchr.rb
# config file for watchr http://github.com/mynyml/watchr
# install: gem install watchr
# run: watch watchr.rb
# note: make sure that you have jstd server running (server.sh) and a browser captured
watch( '(src|test|example)/' ) do
%x{ echo "\n\ntest run started @ `date`" > logs/jstd.log; ./test.sh &> logs/jstd.log & }
%x{ echo "\n\nlint started @ `date`" > logs/lint.log; rake lint &> logs/lint.log & }
end
|
Regenerate gemspec for version 0.1.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "sumitup"
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Justin Ball"]
s.date = "2012-03-02"
s.description = "Given an html document or fragment this gem will build a summary of the content."
s.email = "justinball@gmail.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
".rspec",
"Gemfile",
"Gemfile.lock",
"Guardfile",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/sumitup.rb",
"lib/sumitup/parser.rb",
"spec/spec_helper.rb",
"spec/sumitup/parser_spec.rb"
]
s.homepage = "http://github.com/tatemae/sumitup"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.12"
s.summary = "Generate a summary of html content"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<sanitize>, [">= 0"])
s.add_development_dependency(%q<growl>, [">= 0"])
s.add_development_dependency(%q<rspec>, ["~> 2.8.0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.3"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<guard>, [">= 1.0.0"])
s.add_development_dependency(%q<guard-rspec>, [">= 0.6.0"])
s.add_development_dependency(%q<guard-bundler>, [">= 0.1.3"])
s.add_development_dependency(%q<ruby-debug>, [">= 0"])
else
s.add_dependency(%q<sanitize>, [">= 0"])
s.add_dependency(%q<growl>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.8.0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.3"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<guard>, [">= 1.0.0"])
s.add_dependency(%q<guard-rspec>, [">= 0.6.0"])
s.add_dependency(%q<guard-bundler>, [">= 0.1.3"])
s.add_dependency(%q<ruby-debug>, [">= 0"])
end
else
s.add_dependency(%q<sanitize>, [">= 0"])
s.add_dependency(%q<growl>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.8.0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.3"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<guard>, [">= 1.0.0"])
s.add_dependency(%q<guard-rspec>, [">= 0.6.0"])
s.add_dependency(%q<guard-bundler>, [">= 0.1.3"])
s.add_dependency(%q<ruby-debug>, [">= 0"])
end
end
|
#
# Be sure to run `pod lib lint CPYPageViewController.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'CPYPageViewController'
s.version = '0.1.2'
s.summary = 'A page view controller with tab integration.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = 'A page view controller with tab integration. Also you can use page view controller only.'
s.homepage = 'https://github.com/cielpy/CPYPageViewController'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'cielpy' => 'beijiu572@gmail.com' }
s.source = { :git => 'https://github.com/cielpy/CPYPageViewController.git', :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/cielpy5'
s.ios.deployment_target = '8.0'
s.source_files = 'CPYPageViewController/Classes/**/*'
# s.resource_bundles = {
# 'CPYPageViewController' => ['CPYPageViewController/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
bump version 0.1.3
#
# Be sure to run `pod lib lint CPYPageViewController.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'CPYPageViewController'
s.version = '0.1.3'
s.summary = 'A page view controller with tab integration.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = 'A page view controller with tab integration. Also you can use page view controller only.'
s.homepage = 'https://github.com/cielpy/CPYPageViewController'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'cielpy' => 'beijiu572@gmail.com' }
s.source = { :git => 'https://github.com/cielpy/CPYPageViewController.git', :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/cielpy5'
s.ios.deployment_target = '8.0'
s.source_files = 'CPYPageViewController/Classes/**/*'
# s.resource_bundles = {
# 'CPYPageViewController' => ['CPYPageViewController/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "coral_core"
s.version = "0.2.29"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Adrian Webb"]
s.date = "2014-01-11"
s.description = "= coral_core\n\nThis library provides core data elements and utilities used in other Coral gems.\n\nThe Coral core library contains functionality that is utilized by other\nCoral gems by providing basic utilities like Git, Shell, Disk, and Data\nmanipulation libraries, a UI system, and a core data model that supports\nEvents, Commands, Repositories, and Memory (version controlled JSON \nobjects). This library is only used as a starting point for other systems.\n\nNote: This library is still very early in development!\n\n== Contributing to coral_core\n \n* Check out the latest {major}.{minor} branch to make sure the feature hasn't \n been implemented or the bug hasn't been fixed yet.\n* Check out the issue tracker to make sure someone already hasn't requested \n it and/or contributed it.\n* Fork the project.\n* Start a feature/bugfix branch.\n* Commit and push until you are happy with your contribution.\n* Make sure to add tests for it. This is important so I don't break it in a \n future version unintentionally.\n* Please try not to mess with the Rakefile, version, or history. If you want \n to have your own version, or is otherwise necessary, that is fine, but \n please isolate to its own commit so I can cherry-pick around it.\n\n== Copyright\n\nLicensed under GPLv3. See LICENSE.txt for further details.\n\nCopyright (c) 2013 Adrian Webb <adrian.webb@coraltech.net>\nCoral Technology Group LLC"
s.email = "adrian.webb@coraltech.net"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"coral_core.gemspec",
"lib/coral_core.rb",
"lib/coral_core/command.rb",
"lib/coral_core/config.rb",
"lib/coral_core/config/collection.rb",
"lib/coral_core/config/file.rb",
"lib/coral_core/config/options.rb",
"lib/coral_core/core.rb",
"lib/coral_core/event.rb",
"lib/coral_core/event/puppet_event.rb",
"lib/coral_core/event/regexp_event.rb",
"lib/coral_core/memory.rb",
"lib/coral_core/mixin/config_collection.rb",
"lib/coral_core/mixin/config_ops.rb",
"lib/coral_core/mixin/config_options.rb",
"lib/coral_core/mixin/lookup.rb",
"lib/coral_core/mixin/macro/object_interface.rb",
"lib/coral_core/mixin/macro/plugin_interface.rb",
"lib/coral_core/mixin/settings.rb",
"lib/coral_core/mixin/sub_config.rb",
"lib/coral_core/mod/hash.rb",
"lib/coral_core/mod/hiera_backend.rb",
"lib/coral_core/plugin.rb",
"lib/coral_core/repository.rb",
"lib/coral_core/resource.rb",
"lib/coral_core/template.rb",
"lib/coral_core/template/environment.rb",
"lib/coral_core/template/json.rb",
"lib/coral_core/template/wrapper.rb",
"lib/coral_core/template/yaml.rb",
"lib/coral_core/util/cli.rb",
"lib/coral_core/util/data.rb",
"lib/coral_core/util/disk.rb",
"lib/coral_core/util/git.rb",
"lib/coral_core/util/interface.rb",
"lib/coral_core/util/process.rb",
"lib/coral_core/util/shell.rb",
"spec/coral_core/interface_spec.rb",
"spec/coral_mock_input.rb",
"spec/coral_test_kernel.rb",
"spec/spec_helper.rb"
]
s.homepage = "http://github.com/coraltech/ruby-coral_core"
s.licenses = ["GPLv3"]
s.rdoc_options = ["--title", "Coral Core library", "--main", "README.rdoc", "--line-numbers"]
s.require_paths = ["lib"]
s.required_ruby_version = Gem::Requirement.new(">= 1.8.1")
s.rubyforge_project = "coral_core"
s.rubygems_version = "1.8.11"
s.summary = "Provides core data elements and utilities used in other Coral gems"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<log4r>, ["~> 1.1"])
s.add_runtime_dependency(%q<i18n>, ["~> 0.6"])
s.add_runtime_dependency(%q<deep_merge>, ["~> 1.0"])
s.add_runtime_dependency(%q<multi_json>, ["~> 1.7"])
s.add_runtime_dependency(%q<grit>, ["~> 2.5"])
s.add_runtime_dependency(%q<fog>, ["~> 1"])
s.add_runtime_dependency(%q<rgen>, ["~> 0.6"])
s.add_runtime_dependency(%q<facter>, ["~> 1.7"])
s.add_runtime_dependency(%q<puppet>, ["~> 3.2"])
s.add_development_dependency(%q<bundler>, ["~> 1.2"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8"])
s.add_development_dependency(%q<rspec>, ["~> 2.10"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<yard>, ["~> 0.8"])
else
s.add_dependency(%q<log4r>, ["~> 1.1"])
s.add_dependency(%q<i18n>, ["~> 0.6"])
s.add_dependency(%q<deep_merge>, ["~> 1.0"])
s.add_dependency(%q<multi_json>, ["~> 1.7"])
s.add_dependency(%q<grit>, ["~> 2.5"])
s.add_dependency(%q<fog>, ["~> 1"])
s.add_dependency(%q<rgen>, ["~> 0.6"])
s.add_dependency(%q<facter>, ["~> 1.7"])
s.add_dependency(%q<puppet>, ["~> 3.2"])
s.add_dependency(%q<bundler>, ["~> 1.2"])
s.add_dependency(%q<jeweler>, ["~> 1.8"])
s.add_dependency(%q<rspec>, ["~> 2.10"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<yard>, ["~> 0.8"])
end
else
s.add_dependency(%q<log4r>, ["~> 1.1"])
s.add_dependency(%q<i18n>, ["~> 0.6"])
s.add_dependency(%q<deep_merge>, ["~> 1.0"])
s.add_dependency(%q<multi_json>, ["~> 1.7"])
s.add_dependency(%q<grit>, ["~> 2.5"])
s.add_dependency(%q<fog>, ["~> 1"])
s.add_dependency(%q<rgen>, ["~> 0.6"])
s.add_dependency(%q<facter>, ["~> 1.7"])
s.add_dependency(%q<puppet>, ["~> 3.2"])
s.add_dependency(%q<bundler>, ["~> 1.2"])
s.add_dependency(%q<jeweler>, ["~> 1.8"])
s.add_dependency(%q<rspec>, ["~> 2.10"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<yard>, ["~> 0.8"])
end
end
Updating the gemspec file.
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "coral_core"
s.version = "0.2.29"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Adrian Webb"]
s.date = "2014-01-11"
s.description = "= coral_core\n\nThis library provides core data elements and utilities used in other Coral gems.\n\nThe Coral core library contains functionality that is utilized by other\nCoral gems by providing basic utilities like Git, Shell, Disk, and Data\nmanipulation libraries, a UI system, and a core data model that supports\nEvents, Commands, Repositories, and Memory (version controlled JSON \nobjects). This library is only used as a starting point for other systems.\n\nNote: This library is still very early in development!\n\n== Contributing to coral_core\n \n* Check out the latest {major}.{minor} branch to make sure the feature hasn't \n been implemented or the bug hasn't been fixed yet.\n* Check out the issue tracker to make sure someone already hasn't requested \n it and/or contributed it.\n* Fork the project.\n* Start a feature/bugfix branch.\n* Commit and push until you are happy with your contribution.\n* Make sure to add tests for it. This is important so I don't break it in a \n future version unintentionally.\n* Please try not to mess with the Rakefile, version, or history. If you want \n to have your own version, or is otherwise necessary, that is fine, but \n please isolate to its own commit so I can cherry-pick around it.\n\n== Copyright\n\nLicensed under GPLv3. See LICENSE.txt for further details.\n\nCopyright (c) 2013 Adrian Webb <adrian.webb@coraltech.net>\nCoral Technology Group LLC"
s.email = "adrian.webb@coraltech.net"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"coral_core.gemspec",
"lib/coral/machine/fog.rb",
"lib/coral/network/default.rb",
"lib/coral/node/default.rb",
"lib/coral_core.rb",
"lib/coral_core/command.rb",
"lib/coral_core/config.rb",
"lib/coral_core/config/collection.rb",
"lib/coral_core/config/file.rb",
"lib/coral_core/config/options.rb",
"lib/coral_core/core.rb",
"lib/coral_core/event.rb",
"lib/coral_core/event/puppet_event.rb",
"lib/coral_core/event/regexp_event.rb",
"lib/coral_core/memory.rb",
"lib/coral_core/mixin/config_collection.rb",
"lib/coral_core/mixin/config_ops.rb",
"lib/coral_core/mixin/config_options.rb",
"lib/coral_core/mixin/lookup.rb",
"lib/coral_core/mixin/macro/object_interface.rb",
"lib/coral_core/mixin/macro/plugin_interface.rb",
"lib/coral_core/mixin/settings.rb",
"lib/coral_core/mixin/sub_config.rb",
"lib/coral_core/mod/hash.rb",
"lib/coral_core/mod/hiera_backend.rb",
"lib/coral_core/plugin.rb",
"lib/coral_core/plugin/machine.rb",
"lib/coral_core/plugin/network.rb",
"lib/coral_core/plugin/node.rb",
"lib/coral_core/repository.rb",
"lib/coral_core/resource.rb",
"lib/coral_core/template.rb",
"lib/coral_core/template/environment.rb",
"lib/coral_core/template/json.rb",
"lib/coral_core/template/wrapper.rb",
"lib/coral_core/template/yaml.rb",
"lib/coral_core/util/cli.rb",
"lib/coral_core/util/data.rb",
"lib/coral_core/util/disk.rb",
"lib/coral_core/util/git.rb",
"lib/coral_core/util/interface.rb",
"lib/coral_core/util/process.rb",
"lib/coral_core/util/shell.rb",
"spec/coral_core/interface_spec.rb",
"spec/coral_mock_input.rb",
"spec/coral_test_kernel.rb",
"spec/spec_helper.rb"
]
s.homepage = "http://github.com/coraltech/ruby-coral_core"
s.licenses = ["GPLv3"]
s.rdoc_options = ["--title", "Coral Core library", "--main", "README.rdoc", "--line-numbers"]
s.require_paths = ["lib"]
s.required_ruby_version = Gem::Requirement.new(">= 1.8.1")
s.rubyforge_project = "coral_core"
s.rubygems_version = "1.8.11"
s.summary = "Provides core data elements and utilities used in other Coral gems"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<log4r>, ["~> 1.1"])
s.add_runtime_dependency(%q<i18n>, ["~> 0.6"])
s.add_runtime_dependency(%q<deep_merge>, ["~> 1.0"])
s.add_runtime_dependency(%q<multi_json>, ["~> 1.7"])
s.add_runtime_dependency(%q<grit>, ["~> 2.5"])
s.add_runtime_dependency(%q<fog>, ["~> 1"])
s.add_runtime_dependency(%q<rgen>, ["~> 0.6"])
s.add_runtime_dependency(%q<facter>, ["~> 1.7"])
s.add_runtime_dependency(%q<puppet>, ["~> 3.2"])
s.add_development_dependency(%q<bundler>, ["~> 1.2"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8"])
s.add_development_dependency(%q<rspec>, ["~> 2.10"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<yard>, ["~> 0.8"])
else
s.add_dependency(%q<log4r>, ["~> 1.1"])
s.add_dependency(%q<i18n>, ["~> 0.6"])
s.add_dependency(%q<deep_merge>, ["~> 1.0"])
s.add_dependency(%q<multi_json>, ["~> 1.7"])
s.add_dependency(%q<grit>, ["~> 2.5"])
s.add_dependency(%q<fog>, ["~> 1"])
s.add_dependency(%q<rgen>, ["~> 0.6"])
s.add_dependency(%q<facter>, ["~> 1.7"])
s.add_dependency(%q<puppet>, ["~> 3.2"])
s.add_dependency(%q<bundler>, ["~> 1.2"])
s.add_dependency(%q<jeweler>, ["~> 1.8"])
s.add_dependency(%q<rspec>, ["~> 2.10"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<yard>, ["~> 0.8"])
end
else
s.add_dependency(%q<log4r>, ["~> 1.1"])
s.add_dependency(%q<i18n>, ["~> 0.6"])
s.add_dependency(%q<deep_merge>, ["~> 1.0"])
s.add_dependency(%q<multi_json>, ["~> 1.7"])
s.add_dependency(%q<grit>, ["~> 2.5"])
s.add_dependency(%q<fog>, ["~> 1"])
s.add_dependency(%q<rgen>, ["~> 0.6"])
s.add_dependency(%q<facter>, ["~> 1.7"])
s.add_dependency(%q<puppet>, ["~> 3.2"])
s.add_dependency(%q<bundler>, ["~> 1.2"])
s.add_dependency(%q<jeweler>, ["~> 1.8"])
s.add_dependency(%q<rspec>, ["~> 2.10"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<yard>, ["~> 0.8"])
end
end
|
#
# Be sure to run `pod lib lint NAME.podspec' to ensure this is a
# valid spec and remove all comments before submitting the spec.
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "UITableViewDataSource-RACExtensions"
s.version = "0.1.5"
s.summary = "RACify your UITableViewDataSource"
s.description = %{
UITableViewDataSource-RACExtensions adds a single method
to UITableViewControllers called `rac_dataSource` and it
requires a signal be passed to it.
The signal that you pass to `rac_dataSource` is used to
populate the UITableView.
}
s.homepage = "https://github.com/michaelavila/UITableViewDataSource-RACExtensions"
s.license = 'MIT'
s.author = { "Michael Avila" => "me@michaelavila.com" }
s.source = { :git => "git@github.com:michaelavila/UITableViewDataSource-RACExtensions.git", :tag => s.version.to_s }
s.platform = :ios, '5.0'
s.requires_arc = true
s.source_files = 'Classes/**/*.{h,m}'
s.dependency 'ReactiveCocoa', '~> 2.3.1'
# s.public_header_files = 'Classes/**/*.h'
# s.frameworks = 'SomeFramework', 'AnotherFramework'
end
Release 0.1.6
#
# Be sure to run `pod lib lint NAME.podspec' to ensure this is a
# valid spec and remove all comments before submitting the spec.
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "UITableViewDataSource-RACExtensions"
s.version = "0.1.6"
s.summary = "RACify your UITableViewDataSource"
s.description = %{
UITableViewDataSource-RACExtensions adds a single method
to UITableViewControllers called `rac_dataSource` and it
requires a signal be passed to it.
The signal that you pass to `rac_dataSource` is used to
populate the UITableView.
}
s.homepage = "https://github.com/michaelavila/UITableViewDataSource-RACExtensions"
s.license = 'MIT'
s.author = { "Michael Avila" => "me@michaelavila.com" }
s.source = { :git => "git@github.com:michaelavila/UITableViewDataSource-RACExtensions.git", :tag => s.version.to_s }
s.platform = :ios, '5.0'
s.requires_arc = true
s.source_files = 'Classes/**/*.{h,m}'
s.dependency 'ReactiveCocoa', '~> 2.3.1'
# s.public_header_files = 'Classes/**/*.h'
# s.frameworks = 'SomeFramework', 'AnotherFramework'
end
|
Gem::Specification.new do |s|
s.name = 'reactive_rails_generator'
s.version = '0.1.4'
s.date = '2016-01-17'
s.summary = "React.rb generators for rails"
s.description = "This gem provide rails generators for react.rb"
s.authors = ["Loic Boutet"]
s.email = 'loic@boutet.com'
s.add_runtime_dependency "rails", [">= 4.0.0"]
s.files = [ "lib/reactive_rails_generator.rb",
"lib/generators/reactrb/install_generator.rb",
"lib/generators/reactrb/component_generator.rb",
"lib/generators/reactrb/templates/component_template.rb"
]
s.homepage =
'http://rubygems.org/gems/hola'
s.license = 'MIT'
end
missing file include for router_generator
Gem::Specification.new do |s|
s.name = 'reactive_rails_generator'
s.version = '0.1.5'
s.date = '2016-01-17'
s.summary = "React.rb generators for rails"
s.description = "This gem provide rails generators for react.rb"
s.authors = ["Loic Boutet"]
s.email = 'loic@boutet.com'
s.add_runtime_dependency "rails", [">= 4.0.0"]
s.files = [ "lib/reactive_rails_generator.rb",
"lib/generators/reactrb/install_generator.rb",
"lib/generators/reactrb/component_generator.rb",
"lib/generators/reactrb/router_generator.rb"
"lib/generators/reactrb/templates/component_template.rb"
]
s.homepage =
'https://github.com/loicboutet/reactive-rails-generator'
s.license = 'MIT'
end
|
cask "avast-secure-browser" do
version "97.0.1528.72"
sha256 :no_check
url "https://cdn-download.avastbrowser.com/AvastSecureBrowserSetup.pkg",
verified: "cdn-download.avastbrowser.com/"
name "Avast Secure Browser"
desc "Web browser focusing on privacy"
homepage "https://www.avast.com/secure-browser#mac"
livecheck do
url :url
strategy :extract_plist
end
pkg "AvastSecureBrowserSetup.pkg"
uninstall quit: "com.avast.browser",
pkgutil: "com.avast.browser"
zap trash: [
"~/Library/Application Support/AVAST Software/Browser",
"~/Library/Caches/AVAST Software/Browser",
"~/Library/Caches/com.avast.browser",
"~/Library/Preferences/com.avast.AvastSecureBrowser.plist",
"~/Library/Preferences/com.avast.browser.plist",
"~/Library/Saved Application State/com.avast.browser.savedState",
],
rmdir: [
"~/Library/Application Support/AVAST Software",
"~/Library/Caches/AVAST Software",
]
end
Update avast-secure-browser from 97.0.1528.72 to 99.0.1962.83 (#121413)
cask "avast-secure-browser" do
version "99.0.1962.83"
sha256 :no_check
url "https://cdn-download.avastbrowser.com/AvastSecureBrowserSetup.pkg",
verified: "cdn-download.avastbrowser.com/"
name "Avast Secure Browser"
desc "Web browser focusing on privacy"
homepage "https://www.avast.com/secure-browser#mac"
livecheck do
url :url
strategy :extract_plist
end
pkg "AvastSecureBrowserSetup.pkg"
uninstall quit: "com.avast.browser",
pkgutil: "com.avast.browser"
zap trash: [
"~/Library/Application Support/AVAST Software/Browser",
"~/Library/Caches/AVAST Software/Browser",
"~/Library/Caches/com.avast.browser",
"~/Library/Preferences/com.avast.AvastSecureBrowser.plist",
"~/Library/Preferences/com.avast.browser.plist",
"~/Library/Saved Application State/com.avast.browser.savedState",
],
rmdir: [
"~/Library/Application Support/AVAST Software",
"~/Library/Caches/AVAST Software",
]
end
|
cask :v1 => 'dropbox-experimental' do
version '3.3.32'
sha256 'efa9263d948870082fa3e334aa825563074aeff9d49bb417a63eee22deb93e4f'
url "https://dl.dropboxusercontent.com/u/17/Dropbox%20#{version}.dmg"
homepage 'https://www.dropboxforum.com/hc/communities/public/questions/202383225-Experimental-Build-3-3-32'
license :gratis
app 'Dropbox.app'
end
Updated dropbox-experimental to 3.3.34
cask :v1 => 'dropbox-experimental' do
version '3.3.34'
sha256 '5bf86b86e08461640ee20ed7e7bfcf59fef86a9d99f706a8063e0eb7b375ab8a'
url "https://dl.dropboxusercontent.com/u/17/Dropbox%20#{version}.dmg"
homepage 'https://www.dropboxforum.com/hc/communities/public/questions/202522965-Experimental-Build-3-3-34'
license :gratis
app 'Dropbox.app'
end
|
cask 'font-inconsolata-lgc' do
# version '1.1.0'
version :latest
sha256 :no_check
url 'https://github.com/DeLaGuardo/Inconsolata-LGC/trunk',
using: :svn,
trust_cert: true
homepage 'https://github.com/DeLaGuardo/Inconsolata-LGC'
font 'inconsolatalgc.ttf'
font 'inconsolatalgcbold.ttf'
font 'inconsolatalgcbolditalic.ttf'
font 'inconsolatalgcitalic.ttf'
end
Update font-inconsolata-lgc to latest (#749)
cask 'font-inconsolata-lgc' do
version :latest
sha256 :no_check
url 'https://github.com/DeLaGuardo/Inconsolata-LGC/trunk',
using: :svn,
trust_cert: true
name 'Inconsolata LGC'
homepage 'https://github.com/DeLaGuardo/Inconsolata-LGC'
font 'inconsolatalgc.ttf'
font 'inconsolatalgcbold.ttf'
font 'inconsolatalgcbolditalic.ttf'
font 'inconsolatalgcitalic.ttf'
end
|
cask 'jeromelebel-mongohub' do
version '3.1.4'
sha256 '9fdeb27aa87004f6c9c35c7736836ad952f11e1d678cda6e6215a8bdd617bbd3'
# mongohub.s3.amazonaws.com was verified as official when first introduced to the cask
url 'https://mongohub.s3.amazonaws.com/MongoHub.zip'
appcast 'https://github.com/jeromelebel/MongoHub-Mac/releases.atom'
name 'MongoHub'
homepage 'https://github.com/jeromelebel/MongoHub-Mac'
app 'MongoHub.app'
end
jeromelebel-mongohub.rb: Add trailing slash to verified comment
cask 'jeromelebel-mongohub' do
version '3.1.4'
sha256 '9fdeb27aa87004f6c9c35c7736836ad952f11e1d678cda6e6215a8bdd617bbd3'
# mongohub.s3.amazonaws.com/ was verified as official when first introduced to the cask
url 'https://mongohub.s3.amazonaws.com/MongoHub.zip'
appcast 'https://github.com/jeromelebel/MongoHub-Mac/releases.atom'
name 'MongoHub'
homepage 'https://github.com/jeromelebel/MongoHub-Mac'
app 'MongoHub.app'
end
|
cask 'macmediakeyforwarder' do
version '2.8'
sha256 'db7375b7d7c34f0430dc728c41f32ba29d753d789529ec80e63ba12493e57df7'
url "http://milgra.com/downloads/mmkf/MacMediaKeyForwarder#{version}.zip"
appcast 'http://milgra.com/downloads/mmkf/'
name 'Mac Media Key Forwarder'
homepage 'http://milgra.com/mac-media-key-forwarder.html'
depends_on macos: '>= :sierra'
app 'MacMediaKeyForwarder.app'
zap trash: '~/Library/Preferences/com.milgra.hsmke.plist'
end
Update MacMediaKeyForwarder from 2.8 to 3.0 (#70433)
* Update MacMediaKeyForwarder from 2.8 to 3.0
Author has made an update for macOS Catalina which can be confirmed here:
https://github.com/milgra/macmediakeyforwarder/issues/82
* Fixed macmediakeyforwarder-3.0 sha256 hash
cask 'macmediakeyforwarder' do
version '3.0'
sha256 '704e22a01e4d964c7a86ae33073658c90cc80cfdd4ece10bae06fee49d1ecb2d'
url "http://milgra.com/downloads/mmkf/MacMediaKeyForwarder#{version}.zip"
appcast 'http://milgra.com/downloads/mmkf/'
name 'Mac Media Key Forwarder'
homepage 'http://milgra.com/mac-media-key-forwarder.html'
depends_on macos: '>= :sierra'
app 'MacMediaKeyForwarder.app'
zap trash: '~/Library/Preferences/com.milgra.hsmke.plist'
end
|
#
# Copyright 2012-2014 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "chef"
default_version "master"
source git: "git://github.com/chef/chef"
relative_path "chef"
if windows?
dependency "ruby-windows"
dependency "libyaml-windows"
dependency "openssl-windows"
dependency "ruby-windows-devkit"
dependency "ruby-windows-devkit-bash"
dependency "cacerts"
dependency "rubygems"
else
dependency "ruby"
dependency "rubygems"
dependency "libffi"
end
dependency "bundler"
dependency "ohai"
dependency "appbundler"
build do
env = with_standard_compiler_flags(with_embedded_path)
if windows?
# Normally we would symlink the required unix tools.
# However with the introduction of git-cache to speed up omnibus builds,
# we can't do that anymore since git on windows doesn't support symlinks.
# https://groups.google.com/forum/#!topic/msysgit/arTTH5GmHRk
# Therefore we copy the tools to the necessary places.
# We need tar for 'knife cookbook site install' to function correctly
{
'tar.exe' => 'bsdtar.exe',
'libarchive-2.dll' => 'libarchive-2.dll',
'libexpat-1.dll' => 'libexpat-1.dll',
'liblzma-1.dll' => 'liblzma-1.dll',
'libbz2-2.dll' => 'libbz2-2.dll',
'libz-1.dll' => 'libz-1.dll',
}.each do |target, to|
copy "#{install_dir}/embedded/mingw/bin/#{to}", "#{install_dir}/bin/#{target}"
end
bundle "install --without server docgen", env: env
gem "build chef-{windows,x86-mingw32}.gemspec", env: env
gem "install chef*mingw32.gem" \
" --no-ri --no-rdoc" \
" --verbose", env: env
block "Build Event Log Dll" do
Dir.chdir software.project_dir do
rake = windows_safe_path("#{install_dir}/embedded/bin/rake")
`#{rake} -rdevkit build_eventlog"` if File.exist? "#{software.project_dir}/ext/win32-eventlog"
end
end
else
# install the whole bundle first
bundle "install --without server docgen", env: env
# custom patch to replace Digest with Openssl
patch source: 'digest_openssl_fix.patch'
# Install components that live inside Chef's git repo. For now this is just
# 'chef-config'
bundle "exec rake install_components", env: env
gem "build chef.gemspec", env: env
# Don't use -n #{install_dir}/bin. Appbundler will take care of them later
gem "install chef*.gem " \
" --no-ri --no-rdoc", env: env
end
auxiliary_gems = {}
auxiliary_gems['ruby-shadow'] = '>= 0.0.0' unless aix? || windows?
auxiliary_gems.each do |name, version|
gem "install #{name}" \
" --version '#{version}'" \
" --no-ri --no-rdoc" \
" --verbose", env: env
end
appbundle 'chef'
appbundle 'ohai'
# Clean up
delete "#{install_dir}/embedded/docs"
delete "#{install_dir}/embedded/share/man"
delete "#{install_dir}/embedded/share/doc"
delete "#{install_dir}/embedded/share/gtk-doc"
delete "#{install_dir}/embedded/ssl/man"
delete "#{install_dir}/embedded/man"
delete "#{install_dir}/embedded/info"
end
Remove this subcomponent build also for other platforms.
#
# Copyright 2012-2014 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "chef"
default_version "master"
source git: "git://github.com/chef/chef"
relative_path "chef"
if windows?
dependency "ruby-windows"
dependency "libyaml-windows"
dependency "openssl-windows"
dependency "ruby-windows-devkit"
dependency "ruby-windows-devkit-bash"
dependency "cacerts"
dependency "rubygems"
else
dependency "ruby"
dependency "rubygems"
dependency "libffi"
end
dependency "bundler"
dependency "ohai"
dependency "appbundler"
build do
env = with_standard_compiler_flags(with_embedded_path)
if windows?
# Normally we would symlink the required unix tools.
# However with the introduction of git-cache to speed up omnibus builds,
# we can't do that anymore since git on windows doesn't support symlinks.
# https://groups.google.com/forum/#!topic/msysgit/arTTH5GmHRk
# Therefore we copy the tools to the necessary places.
# We need tar for 'knife cookbook site install' to function correctly
{
'tar.exe' => 'bsdtar.exe',
'libarchive-2.dll' => 'libarchive-2.dll',
'libexpat-1.dll' => 'libexpat-1.dll',
'liblzma-1.dll' => 'liblzma-1.dll',
'libbz2-2.dll' => 'libbz2-2.dll',
'libz-1.dll' => 'libz-1.dll',
}.each do |target, to|
copy "#{install_dir}/embedded/mingw/bin/#{to}", "#{install_dir}/bin/#{target}"
end
bundle "install --without server docgen", env: env
gem "build chef-{windows,x86-mingw32}.gemspec", env: env
gem "install chef*mingw32.gem" \
" --no-ri --no-rdoc" \
" --verbose", env: env
block "Build Event Log Dll" do
Dir.chdir software.project_dir do
rake = windows_safe_path("#{install_dir}/embedded/bin/rake")
`#{rake} -rdevkit build_eventlog"` if File.exist? "#{software.project_dir}/ext/win32-eventlog"
end
end
else
# install the whole bundle first
bundle "install --without server docgen", env: env
# custom patch to replace Digest with Openssl
patch source: 'digest_openssl_fix.patch'
gem "build chef.gemspec", env: env
# Don't use -n #{install_dir}/bin. Appbundler will take care of them later
gem "install chef*.gem " \
" --no-ri --no-rdoc", env: env
end
auxiliary_gems = {}
auxiliary_gems['ruby-shadow'] = '>= 0.0.0' unless aix? || windows?
auxiliary_gems.each do |name, version|
gem "install #{name}" \
" --version '#{version}'" \
" --no-ri --no-rdoc" \
" --verbose", env: env
end
appbundle 'chef'
appbundle 'ohai'
# Clean up
delete "#{install_dir}/embedded/docs"
delete "#{install_dir}/embedded/share/man"
delete "#{install_dir}/embedded/share/doc"
delete "#{install_dir}/embedded/share/gtk-doc"
delete "#{install_dir}/embedded/ssl/man"
delete "#{install_dir}/embedded/man"
delete "#{install_dir}/embedded/info"
end
|
#
# Copyright 2012-2014 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "curl"
default_version "7.36.0"
dependency "zlib"
dependency "openssl"
source url: "http://curl.haxx.se/download/curl-#{version}.tar.gz",
md5: "643a7030b27449e76413d501d4b8eb57"
relative_path "curl-#{version}"
build do
env = with_standard_compiler_flags(with_embedded_path)
delete "#{project_dir}/src/tool_hugehelp.c"
command "./configure" \
" --prefix=#{install_dir}/embedded" \
" --disable-manual" \
" --disable-debug" \
" --enable-optimize" \
" --disable-ldap" \
" --disable-ldaps" \
" --disable-rtsp" \
" --enable-proxy" \
" --disable-dependency-tracking" \
" --enable-ipv6" \
" --without-libidn" \
" --without-gnutls" \
" --without-librtmp" \
" --with-ssl=#{install_dir}/embedded" \
"--with-zlib=#{install_dir}/embedded", env: env
command "make -j #{max_build_jobs}", env: env
command "make install", env: env
end
Use `make` instead of `command` in curl
#
# Copyright 2012-2014 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "curl"
default_version "7.36.0"
dependency "zlib"
dependency "openssl"
source url: "http://curl.haxx.se/download/curl-#{version}.tar.gz",
md5: "643a7030b27449e76413d501d4b8eb57"
relative_path "curl-#{version}"
build do
env = with_standard_compiler_flags(with_embedded_path)
delete "#{project_dir}/src/tool_hugehelp.c"
command "./configure" \
" --prefix=#{install_dir}/embedded" \
" --disable-manual" \
" --disable-debug" \
" --enable-optimize" \
" --disable-ldap" \
" --disable-ldaps" \
" --disable-rtsp" \
" --enable-proxy" \
" --disable-dependency-tracking" \
" --enable-ipv6" \
" --without-libidn" \
" --without-gnutls" \
" --without-librtmp" \
" --with-ssl=#{install_dir}/embedded" \
"--with-zlib=#{install_dir}/embedded", env: env
make "-j #{max_build_jobs}", env: env
make "install", env: env
end
|
Use a custom ruby software definition
While we are waiting for a decision on
https://github.com/opscode/omnibus-software/pull/124 ,
we can vendor the ruby software definition to get more reliable
Ruby source downloads in the meantime.
#
# Copyright:: Copyright (c) 2012 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "ruby"
version "1.9.3-p484"
dependency "zlib"
dependency "ncurses"
dependency "libedit"
dependency "openssl"
dependency "libyaml"
dependency "libiconv"
dependency "gdbm" if (platform == "mac_os_x" or platform == "freebsd" or platform == "aix")
dependency "libgcc" if (platform == "solaris2" and Omnibus.config.solaris_compiler == "gcc")
source :url => "http://cache.ruby-lang.org/pub/ruby/1.9/ruby-#{version}.tar.gz",
:md5 => '8ac0dee72fe12d75c8b2d0ef5d0c2968'
relative_path "ruby-#{version}"
env =
case platform
when "mac_os_x"
{
"CFLAGS" => "-arch x86_64 -m64 -L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include -I#{install_dir}/embedded/include/ncurses -O3 -g -pipe",
"LDFLAGS" => "-arch x86_64 -R#{install_dir}/embedded/lib -L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include -I#{install_dir}/embedded/include/ncurses"
}
when "solaris2"
if Omnibus.config.solaris_compiler == "studio"
{
"CFLAGS" => "-L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include",
"LDFLAGS" => "-R#{install_dir}/embedded/lib -L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include"
}
elsif Omnibus.config.solaris_compiler == "gcc"
{
"CFLAGS" => "-L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include -O3 -g -pipe",
"LDFLAGS" => "-R#{install_dir}/embedded/lib -L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include -static-libgcc",
"LD_OPTIONS" => "-R#{install_dir}/embedded/lib"
}
else
raise "Sorry, #{Omnibus.config.solaris_compiler} is not a valid compiler selection."
end
when "aix"
{
# see http://www.ibm.com/developerworks/aix/library/au-gnu.html
#
# specifically:
#
# "To use AIX run-time linking, you should create the shared object
# using gcc -shared -Wl,-G and create executables using the library
# by adding the -Wl,-brtl option to the link line. Technically, you
# can leave off the -shared option, but it does no harm and reduces
# confusion."
#
# AIX also uses -Wl,-blibpath instead of -R or LD_RUN_PATH, but the
# option is not additive, so requires /usr/lib and /lib as well (there
# is a -bsvr4 option to allow ld to take an -R flag in addition
# to turning on -brtl, but it had other side effects I couldn't fix).
#
# If libraries linked with gcc -shared have symbol resolution failures
# then it may be useful to add -bexpfull to export all symbols.
#
# -O2 optimized away some configure test which caused ext libs to fail
#
# We also need prezl's M4 instead of picking up /usr/bin/m4 which
# barfs on ruby.
#
"CC" => "xlc -q64",
"CXX" => "xlC -q64",
"LD" => "ld -b64",
"CFLAGS" => "-q64 -O -qhot -I#{install_dir}/embedded/include",
"CXXFLAGS" => "-q64 -O -qhot -I#{install_dir}/embedded/include",
"LDFLAGS" => "-q64 -L#{install_dir}/embedded/lib -Wl,-brtl -Wl,-blibpath:#{install_dir}/embedded/lib:/usr/lib:/lib",
"OBJECT_MODE" => "64",
"ARFLAGS" => "-X64 cru",
"M4" => "/opt/freeware/bin/m4",
"warnflags" => "-qinfo=por"
}
else
{
"CFLAGS" => "-I#{install_dir}/embedded/include -O3 -g -pipe",
"LDFLAGS" => "-Wl,-rpath,#{install_dir}/embedded/lib -L#{install_dir}/embedded/lib"
}
end
build do
configure_command = ["./configure",
"--prefix=#{install_dir}/embedded",
"--with-out-ext=fiddle",
"--enable-shared",
"--enable-libedit",
"--with-ext=psych",
"--disable-install-doc"]
case platform
when "aix"
patch :source => "ruby-aix-configure.patch", :plevel => 1
patch :source => "ruby_aix_1_9_3_448_ssl_EAGAIN.patch", :plevel => 1
# --with-opt-dir causes ruby to send bogus commands to the AIX linker
when "freebsd"
configure_command << "--without-execinfo"
configure_command << "--with-opt-dir=#{install_dir}/embedded"
when "smartos"
# Opscode patch - someara@opscode.com
# GCC 4.7.0 chokes on mismatched function types between OpenSSL 1.0.1c and Ruby 1.9.3-p286
patch :source => "ruby-openssl-1.0.1c.patch", :plevel => 1
# Patches taken from RVM.
# http://bugs.ruby-lang.org/issues/5384
# https://www.illumos.org/issues/1587
# https://github.com/wayneeseguin/rvm/issues/719
patch :source => "rvm-cflags.patch", :plevel => 1
# From RVM forum
# https://github.com/wayneeseguin/rvm/commit/86766534fcc26f4582f23842a4d3789707ce6b96
configure_command << "ac_cv_func_dl_iterate_phdr=no"
configure_command << "--with-opt-dir=#{install_dir}/embedded"
else
configure_command << "--with-opt-dir=#{install_dir}/embedded"
end
# @todo expose bundle_bust() in the DSL
env.merge!({
"RUBYOPT" => nil,
"BUNDLE_BIN_PATH" => nil,
"BUNDLE_GEMFILE" => nil,
"GEM_PATH" => nil,
"GEM_HOME" => nil
})
# @todo: move into omnibus-ruby
has_gmake = system("gmake --version")
if has_gmake
env.merge!({'MAKE' => 'gmake'})
make_binary = 'gmake'
else
make_binary = 'make'
end
command configure_command.join(" "), :env => env
command "#{make_binary} -j #{max_build_jobs}", :env => env
command "#{make_binary} -j #{max_build_jobs} install", :env => env
end
|
#
# Copyright 2012-2016 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name 'ruby'
license 'BSD-2-Clause'
license_file 'BSDL'
license_file 'COPYING'
license_file 'LEGAL'
skip_transitive_dependency_licensing true
default_version '2.7.4'
fips_enabled = (project.overrides[:fips] && project.overrides[:fips][:enabled]) || false
dependency 'zlib'
dependency 'openssl' unless Build::Check.use_system_ssl?
dependency 'libffi'
dependency 'libyaml'
# Needed for chef_gem installs of (e.g.) nokogiri on upgrades -
# they expect to see our libiconv instead of a system version.
dependency 'libiconv'
version('2.7.4') { source sha256: '3043099089608859fc8cce7f9fdccaa1f53a462457e3838ec3b25a7d609fbc5b' }
source url: "https://cache.ruby-lang.org/pub/ruby/#{version.match(/^(\d+\.\d+)/)[0]}/ruby-#{version}.tar.gz"
relative_path "ruby-#{version}"
env = with_standard_compiler_flags(with_embedded_path)
env['CFLAGS'] << if version.satisfies?('>= 2.3.0') &&
rhel? && platform_version.satisfies?('< 6.0')
' -O2 -g -pipe'
else
' -O3 -g -pipe'
end
build do
env['CFLAGS'] << ' -fno-omit-frame-pointer'
# disable libpath in mkmf across all platforms, it trolls omnibus and
# breaks the postgresql cookbook. i'm not sure why ruby authors decided
# this was a good idea, but it breaks our use case hard. AIX cannot even
# compile without removing it, and it breaks some native gem installs on
# other platforms. generally you need to have a condition where the
# embedded and non-embedded libs get into a fight (libiconv, openssl, etc)
# and ruby trying to set LD_LIBRARY_PATH itself gets it wrong.
if version.satisfies?('>= 2.1')
patch source: 'ruby-mkmf.patch', plevel: 1, env: env
# should intentionally break and fail to apply on 2.2, patch will need to
# be fixed.
end
# Enable custom patch created by ayufan that allows to count memory allocations
# per-thread. This is asked to be upstreamed as part of https://github.com/ruby/ruby/pull/3978
patch source: 'thread-memory-allocations-2.7.patch', plevel: 1, env: env
# Fix reserve stack segmentation fault when building on RHEL5 or below
# Currently only affects 2.1.7 and 2.2.3. This patch taken from the fix
# in Ruby trunk and expected to be included in future point releases.
# https://redmine.ruby-lang.org/issues/11602
if rhel? &&
platform_version.satisfies?('< 6') &&
(version == '2.1.7' || version == '2.2.3')
patch source: 'ruby-fix-reserve-stack-segfault.patch', plevel: 1, env: env
end
# copy_file_range() has been disabled on recent RedHat kernels:
# 1. https://gitlab.com/gitlab-org/gitlab/-/issues/218999
# 2. https://bugs.ruby-lang.org/issues/16965
# 3. https://bugzilla.redhat.com/show_bug.cgi?id=1783554
patch source: 'ruby-disable-copy-file-range.patch', plevel: 1, env: env if centos? || rhel?
configure_command = ['--with-out-ext=dbm,readline',
'--enable-shared',
'--disable-install-doc',
'--without-gmp',
'--without-gdbm',
'--without-tk',
'--disable-dtrace']
configure_command << '--with-ext=psych' if version.satisfies?('< 2.3')
configure_command << '--with-bundled-md5' if fips_enabled
configure_command << %w(host target build).map { |w| "--#{w}=#{OhaiHelper.gcc_target}" } if OhaiHelper.raspberry_pi?
configure_command << "--with-opt-dir=#{install_dir}/embedded"
configure(*configure_command, env: env)
make "-j #{workers}", env: env
make "-j #{workers} install", env: env
end
Upgrade Ruby to 2.7.5
#
# Copyright 2012-2016 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name 'ruby'
license 'BSD-2-Clause'
license_file 'BSDL'
license_file 'COPYING'
license_file 'LEGAL'
skip_transitive_dependency_licensing true
default_version '2.7.5'
fips_enabled = (project.overrides[:fips] && project.overrides[:fips][:enabled]) || false
dependency 'zlib'
dependency 'openssl' unless Build::Check.use_system_ssl?
dependency 'libffi'
dependency 'libyaml'
# Needed for chef_gem installs of (e.g.) nokogiri on upgrades -
# they expect to see our libiconv instead of a system version.
dependency 'libiconv'
version('2.7.5') { source sha256: '2755b900a21235b443bb16dadd9032f784d4a88f143d852bc5d154f22b8781f1' }
source url: "https://cache.ruby-lang.org/pub/ruby/#{version.match(/^(\d+\.\d+)/)[0]}/ruby-#{version}.tar.gz"
relative_path "ruby-#{version}"
env = with_standard_compiler_flags(with_embedded_path)
env['CFLAGS'] << if version.satisfies?('>= 2.3.0') &&
rhel? && platform_version.satisfies?('< 6.0')
' -O2 -g -pipe'
else
' -O3 -g -pipe'
end
build do
env['CFLAGS'] << ' -fno-omit-frame-pointer'
# disable libpath in mkmf across all platforms, it trolls omnibus and
# breaks the postgresql cookbook. i'm not sure why ruby authors decided
# this was a good idea, but it breaks our use case hard. AIX cannot even
# compile without removing it, and it breaks some native gem installs on
# other platforms. generally you need to have a condition where the
# embedded and non-embedded libs get into a fight (libiconv, openssl, etc)
# and ruby trying to set LD_LIBRARY_PATH itself gets it wrong.
if version.satisfies?('>= 2.1')
patch source: 'ruby-mkmf.patch', plevel: 1, env: env
# should intentionally break and fail to apply on 2.2, patch will need to
# be fixed.
end
# Enable custom patch created by ayufan that allows to count memory allocations
# per-thread. This is asked to be upstreamed as part of https://github.com/ruby/ruby/pull/3978
patch source: 'thread-memory-allocations-2.7.patch', plevel: 1, env: env
# Fix reserve stack segmentation fault when building on RHEL5 or below
# Currently only affects 2.1.7 and 2.2.3. This patch taken from the fix
# in Ruby trunk and expected to be included in future point releases.
# https://redmine.ruby-lang.org/issues/11602
if rhel? &&
platform_version.satisfies?('< 6') &&
(version == '2.1.7' || version == '2.2.3')
patch source: 'ruby-fix-reserve-stack-segfault.patch', plevel: 1, env: env
end
# copy_file_range() has been disabled on recent RedHat kernels:
# 1. https://gitlab.com/gitlab-org/gitlab/-/issues/218999
# 2. https://bugs.ruby-lang.org/issues/16965
# 3. https://bugzilla.redhat.com/show_bug.cgi?id=1783554
patch source: 'ruby-disable-copy-file-range.patch', plevel: 1, env: env if centos? || rhel?
configure_command = ['--with-out-ext=dbm,readline',
'--enable-shared',
'--disable-install-doc',
'--without-gmp',
'--without-gdbm',
'--without-tk',
'--disable-dtrace']
configure_command << '--with-ext=psych' if version.satisfies?('< 2.3')
configure_command << '--with-bundled-md5' if fips_enabled
configure_command << %w(host target build).map { |w| "--#{w}=#{OhaiHelper.gcc_target}" } if OhaiHelper.raspberry_pi?
configure_command << "--with-opt-dir=#{install_dir}/embedded"
configure(*configure_command, env: env)
make "-j #{workers}", env: env
make "-j #{workers} install", env: env
end
|
Pod::Spec.new do |s|
name = "SHFastEnumerationProtocols"
url = "https://github.com/podfactory/#{name}"
git_url = "#{url}.git"
s.name = name
version = "1.0.0"
source_files = "#{name}/**/*.{h,m}"
s.version = version
s.summary = "NSFastEnumeration helpers and enumeration blocks through a protocol on foundation collection classes."
s.description = <<-DESC
NSFastEnumeration helpers and enumeration blocks through a protocol on foundation collection classes.
Helpers for both keyed, index and unordered collection objects.
Converting to other collection classes through dot notation.
Block based callers for enumeration.
Tested and ready.
* NSArray and NSMutableArray
* NSOrderedSet and NSMutableOrderedSet
* NSSet, NSMutableSet and NSCountedset
* NSHashTable
* NSDictionary and NSMutableDictionary
* NSMapTable
TODO: NSIndexSet, NSMutableIndexSet and NSEnumerator.
DESC
s.homepage = url
s.license = 'MIT'
s.author = { "Seivan Heidari" => "seivan.heidari@icloud.com" }
s.source = { :git => git_url, :tag => version}
s.ios.deployment_target = "6.0"
s.osx.deployment_target = "10.8"
s.source_files = source_files
s.requires_arc = true
end
Bump version 1.1.0
Pod::Spec.new do |s|
name = "SHFastEnumerationProtocols"
url = "https://github.com/podfactory/#{name}"
git_url = "#{url}.git"
s.name = name
version = "1.1.0"
source_files = "#{name}/**/*.{h,m}"
s.version = version
s.summary = "NSFastEnumeration helpers and enumeration blocks through a protocol on foundation collection classes."
s.description = <<-DESC
NSFastEnumeration helpers and enumeration blocks through a protocol on foundation collection classes.
Helpers for both keyed, index and unordered collection objects.
Converting to other collection classes through dot notation.
Block based callers for enumeration.
Tested and ready.
* NSArray and NSMutableArray
* NSOrderedSet and NSMutableOrderedSet
* NSSet, NSMutableSet and NSCountedset
* NSHashTable
* NSDictionary and NSMutableDictionary
* NSMapTable
TODO: NSIndexSet, NSMutableIndexSet and NSEnumerator.
DESC
s.homepage = url
s.license = 'MIT'
s.author = { "Seivan Heidari" => "seivan.heidari@icloud.com" }
s.source = { :git => git_url, :tag => version}
s.ios.deployment_target = "6.0"
s.osx.deployment_target = "10.8"
s.source_files = source_files
s.requires_arc = true
end
|
Add podspec file
Pod::Spec.new do |s|
s.name = "SMVerticalSegmentedControl"
s.version = "0.1.1"
s.summary = "SMVerticalSegmentedControl is plain vertical segmented control"
s.homepage = "https://github.com/shkutkov/SMVerticalSegmentedControl"
s.screenshots = "https://raw.github.com/shkutkov/SMVerticalSegmentedControl/master/Screenshots/1.png", "https://raw.github.com/shkutkov/SMVerticalSegmentedControl/master/Screenshots/2.png"
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { "Michael Shkutkov" => "shkutkov@gmail.com" }
s.platform = :ios, '5.0'
s.source = { :git => "https://github.com/shkutkov/SMVerticalSegmentedControl.git", :tag => s.version.to_s }
s.source_files = 'SMVerticalSegmentedControl/*.{h,m}'
s.requires_arc = true
s.frameworks = 'QuartzCore', 'UIKit', 'CoreGraphics'
end
|
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "childprocess/version"
Gem::Specification.new do |s|
s.name = "childprocess"
s.version = ChildProcess::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Jari Bakken"]
s.email = ["jari.bakken@gmail.com"]
s.homepage = "http://github.com/jarib/childprocess"
s.summary = %q{This gem aims at being a simple and reliable solution for controlling external programs running in the background on any Ruby / OS combination.}
s.description = %q{This gem aims at being a simple and reliable solution for controlling external programs running in the background on any Ruby / OS combination.}
s.rubyforge_project = "childprocess"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.require_paths = ["lib"]
s.add_development_dependency "rspec", [">= 2.0.0"]
s.add_development_dependency "yard", [">= 0"]
s.add_development_dependency "rake", ["~> 0.9.2"]
s.add_runtime_dependency "ffi", ["~> 1.0.6"]
end
Remove arrays from dep specification
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "childprocess/version"
Gem::Specification.new do |s|
s.name = "childprocess"
s.version = ChildProcess::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Jari Bakken"]
s.email = ["jari.bakken@gmail.com"]
s.homepage = "http://github.com/jarib/childprocess"
s.summary = %q{This gem aims at being a simple and reliable solution for controlling external programs running in the background on any Ruby / OS combination.}
s.description = %q{This gem aims at being a simple and reliable solution for controlling external programs running in the background on any Ruby / OS combination.}
s.rubyforge_project = "childprocess"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.require_paths = ["lib"]
s.add_development_dependency "rspec", ">= 2.0.0"
s.add_development_dependency "yard", ">= 0"
s.add_development_dependency "rake", "~> 0.9.2"
s.add_runtime_dependency "ffi", "~> 1.0.6"
end
|
require 'optparse'
require 'shellwords'
require 'pathname'
class ShellExecutor
@@dry_run = false
class << self
# setup
def setup (dry_run = false)
@@dry_run = dry_run
end
def dry?
puts "Condition statement is #{@@dry_run.to_s}"
@@dry_run
end
def run_command_line(line)
puts "I will perform \n <#{line}>"
if dry?
puts "I am on dry run!"
else
# if run
result = %x(#{line})
puts "result is:" + result.to_s
if $? != 0
puts "I fall down on < #{result} >\n! because of < #{$?} >"
exit($?)
end
result
end
end
end
end
class KeysGenerator
module FileFormatsModule
def only_name(name)
# remove extension
Pathname.new(name).basename
end
def to_plain_text(name)
self.only_name(name).sub_ext('.txt')
end
def to_p12(name)
# remove extension
# add .p12
self.only_name(name).sub_ext('.p12')
end
def to_certificate_request(name)
# remove extension
# add .csr
self.only_name(name).sub_ext('.csr')
end
def to_certificate(name, format = '.cer')
# remove extension
# add .cer
self.only_name(name).sub_ext(format)
end
def to_pem(name)
# remove extension
# add .pem
self.only_name(name).sub_ext('.pem')
end
end
class FileFormats
class << self
include FileFormatsModule
end
attr_accessor :name
def initialize(name)
self.name = @name
end
def valid?
!self.name.nil?
end
def to_p12
self.class.to_p12(self.name)
end
def to_certificate_requets
self.class.to_certificate_request(self.name)
end
def to_certificate
self.class.to_certificate(self.name)
end
def to_pem
self.class.to_pem(self.name)
end
end
class << self
def available_ec_curves
# secp256r1 - alias?
%w(prime256v1 secp384r1 secp512r1)
end
def available_rsa_sizes
[ 2048, 3072, 4096 ]
end
end
attr_accessor :type, :curve_name, :rsa_size, :secret
def initialize(secret = 'secret')
self.secret = secret
end
TYPES = [:ec, :rsa]
def type=(value)
@type = value.downcase.to_sym
end
def valid?
self.class::TYPES.include?(self.type)
end
def fix
@curve_name ||= self.class.available_ec_curves.first if self.type == :ec
@rsa_size ||= self.class.available_rsa_sizes.first if self.type == :rsa
end
def to_s
{
key_type: self.type,
curve_name: self.curve_name,
rsa_size: self.rsa_size,
rsa_private_key_passphrase: self.secret
}
end
module Generating
def default_subject
{
"C": "GB",
"ST": "London",
"L": "London",
"O": "Global Security",
"OU": "IT Department",
"CN": "example.com"
}
end
def key_and_value_or_empty(key, value, delimiter, empty = -> (v){ v.nil? || v.empty?})
the_empty = empty #empty || -> (v){ v.nil? || v.empty?}
{key => value}.reject{|k,v| the_empty.call(v)}.collect{|k,v| "#{k}#{delimiter}#{v}"}.join('')
end
def subject_value_from_subject(subject = {})
value = subject.collect{|k,v| "/#{k}=#{v}"}.join('')
value
end
def subject_key_and_value_from_subject(subject = {})
value = subject_value_from_subject(subject)
{'-subj': value}.reject{|k,v| v.empty?}.collect{|k,v| "#{k} \"#{v}\""}.join('')
end
def tool
%q(openssl)
end
def suppress_prompt(command)
%Q(echo '#{secret}' | #{command})
end
def generate_key(type, name, parameters)
case type
when :rsa
size = parameters[:size]
rsa_private_key_passphrase = parameters[:rsa_private_key_passphrase]
#%Q(#{tool} genrsa -des3 -out #{name} #{size} -passin 'password')
#%Q(#{tool} genrsa -aes256 -passin pass:#{rsa_private_key_passphrase} -out #{name} #{size})
# ssh-keygen -t rsa -b 4096 -f jwtRS256.key -q -N secret_phrase ( -N - NewPassphrase )
# -q -N #{rsa_private_key_passphrase}
%Q(echo "y" | ssh-keygen -t #{type} -b #{size} -f #{name} -q -N '')
when :ec
curve_name = parameters[:curve_name]
%Q(#{tool} ecparam -name #{curve_name} -genkey -noout -out #{name})
end
end
# def generate_key_rsa(parameter, name)
# %Q(#{tool} genrsa -des3 -out #{name} #{size} -passin 'password')
# # %Q(#{tool} genrsa -name #{parameter} -genkey -noout -out #{name})
# end
# def generate_key_ec(curve_name, name)
# %Q(#{tool} ecparam -name #{curve_name} -genkey -noout -out #{name})
# end
# output keys
def output_key(access, generated_key_name, name)
%Q(#{tool} #{access == 'private' ? '' : '-pubout' } -in #{generated_key_name} -out #{name} < echo "#{secret}")
end
def output_public_key(generated_key_name, name)
output_key('public', generated_key_name, name)
end
def output_private_key(generated_key_name, name)
output_key('private', generated_key_name, name)
end
def export_public_key(type, private_key_pem, public_key_pem)
%Q(#{tool} #{type} -in #{private_key_pem} -pubout -out #{public_key_pem})
#openssl ec -in ec256-private.pem -pubout -out ec256-public.pem
end
# with prompt?
def output_certificate_request(private_key_pem, certificate_request, subject = {})
subject_key_value = subject_key_and_value_from_subject(subject)
%Q(#{tool} req -new -key #{private_key_pem} -out #{certificate_request} #{subject_key_value})
# openssl req -new -key private_1.pem -out cert_1.csr
end
def output_certificate(certificate_request, private_key_pem, certificate)
%Q(#{tool} x509 -req -in #{certificate_request} -signkey #{private_key_pem} -out #{certificate})
# openssl x509 -req -in cert_1.csr -signkey private_1.pem -out cert_1.cer
end
def output_p12(certificate, private_key_pem, p12_name, password_file)
# replace name's extension by p12
# private_key_pem.pem
# certificate.crt or certificate.cer
# p12_name.p12
password_file_content = %Q("$(cat #{password_file})")
the_password_key_value = key_and_value_or_empty('-passout pass:', password_file_content, '')
%Q(#{tool} pkcs12 -export -in #{certificate} -inkey #{private_key_pem} -out #{p12_name} #{the_password_key_value})
end
def output_password(file, password = 'password')
%Q(echo -n "#{password}" > #{file})
end
end
class << self
include Generating
end
def generated_by_type(type = self.type)
# we need name and parameters?
parameters = {}
name = nil
case self.type
when :rsa
parameters[:size] = self.rsa_size
parameters[:rsa_private_key_passphrase] = self.secret
name = "rsa-#{self.rsa_size}"
when :ec
parameters[:curve_name] = self.curve_name
name = "ec-#{self.curve_name}"
end
self.generated(type, parameters, "#{name}-private", "#{name}-public", "#{name}", "#{name}", "#{name}-private", "#{name}-p12-password")
end
def generated(type, parameters, private_key_pem, public_key_pem, certificate_request, certificate, p12, p12_password)
the_private_key_pem ||= FileFormats.to_pem private_key_pem
the_public_key_pem ||= FileFormats.to_pem public_key_pem
the_certificate_request ||= FileFormats.to_certificate_request certificate_request
the_certificate ||= FileFormats.to_certificate certificate
the_p12 ||= FileFormats.to_p12 p12
the_p12_password ||= FileFormats.to_plain_text p12_password
[
self.class.generate_key(type, the_private_key_pem, parameters),
#self.class.generate_key(curve_name, the_private_key_pem),
self.class.export_public_key(type, the_private_key_pem, the_public_key_pem),
self.class.output_certificate_request(the_private_key_pem, the_certificate_request, self.class.default_subject),
self.class.output_certificate(the_certificate_request, the_private_key_pem, the_certificate),
self.class.output_password(the_p12_password),
self.class.output_p12(the_certificate, the_private_key_pem, the_p12, the_p12_password)
]
end
end
class MainWork
class << self
def work(arguments)
the_work = new
the_work.work(the_work.parse_options(arguments))
end
end
def fix_options(the_options)
options = the_options
options[:result_directory] ||= '../Tests/Resources/Certs/'
if options[:test]
options[:generated_key_name] ||= 'generated'
options[:private_key_name] ||= 'private'
options[:public_key_name] ||= 'public'
end
options
end
def required_keys
[:key_type]
end
def valid_options?(the_options)
(required_keys - the_options.keys).empty?
end
def work(options = {})
options = fix_options(options)
if options[:inspection]
puts "options are: #{options}"
end
unless valid_options? options
puts "options are not valid!"
puts "options are: #{options}"
puts "missing options: #{required_keys}"
exit(0)
end
ShellExecutor.setup options[:dry_run]
generator = KeysGenerator.new
generator.type = options[:key_type]
generator.curve_name = options[:curve_name]
generator.rsa_size = options[:rsa_size]
generator.fix
puts "generator fixed arguments: #{generator.to_s}"
unless generator.valid?
puts "generator types are: #{KeysGenerator::TYPES}. You type is: #{generator.type}"
exit(0)
end
generator.generated_by_type(generator.type).each do |command|
ShellExecutor.run_command_line command
end
#KeyParameters.new(options[:algorithm_type], options[:key_length])
# [
# key_parameters.generate_key(options[:generated_key_name]),
# key_parameters.output_private_key(options[:generated_key_name], options[:private_key_name]),
# key_parameters.output_public_key(options[:generated_key_name], options[:public_key_name])
# ].map do |command|
# key_parameters.suppress_prompt command
# end
# .each do |command|
# ShellExecutor.run_command_line command
# end
end
def help_message(options)
# %x[rdoc $0]
# not ok
puts <<-__HELP__
#{options.help}
this script will help you generate keys.
First, it takes arguments:
[needed] <-f DIRECTORY>: directory where you will gather files
[not needed] <-r DIRECTORY>: directory where files will be placed
---------------
Usage:
---------------
#{$0} -t ../Tests/Resources/Certs/
__HELP__
end
def parse_options(arguments)
options = {}
OptionParser.new do |opts|
opts.banner = "Usage: #{$0} [options]"
opts.on('-o', '--output_directory DIRECTORY', 'Output Directory') {|v| options[:output_directory] = v}
opts.on('-t', '--test', 'Test option') {|v| options[:test] = v}
opts.on('-c', '--curve_name CURVE_NAME', 'Curve name') {|v| options[:curve_name] = v}
opts.on('-s', '--size SIZE', 'RSA key size') {|v| options[:rsa_size] = v}
opts.on('-k', '--key_type KEY_TYPE', 'Key Type (RSA or EC), case insensitive') {|v| options[:key_type] = v}
opts.on('-g', '--generated_key_name NAME', 'Generated key name') {|v| options[:generated_key_name] = v}
opts.on('-r', '--private_key_name NAME', 'Private Key Name') {|v| options[:private_key_name] = v}
opts.on('-u', '--public_key_name NAME', 'Public Key Name') {|v| options[:public_key_name] = v}
# opts.on('-l', '--log_level LEVEL', 'Logger level of warning') {|v| options[:log_level] = v}
# opts.on('-o', '--output_log OUTPUT', 'Logger output stream') {|v| options[:output_stream] = v}
opts.on('-d', '--dry_run', 'Dry run to see all options') {|v| options[:dry_run] = v}
opts.on('-i', '--inspection', 'Inspection of all items, like tests'){|v| options[:inspection] = v}
# help
opts.on('-h', '--help', 'Help option') { self.help_message(opts); exit()}
end.parse!(arguments)
options
end
end
MainWork.work(ARGV)
Remove executable bit from non-executable files
require 'optparse'
require 'shellwords'
require 'pathname'
class ShellExecutor
@@dry_run = false
class << self
# setup
def setup (dry_run = false)
@@dry_run = dry_run
end
def dry?
puts "Condition statement is #{@@dry_run.to_s}"
@@dry_run
end
def run_command_line(line)
puts "I will perform \n <#{line}>"
if dry?
puts "I am on dry run!"
else
# if run
result = %x(#{line})
puts "result is:" + result.to_s
if $? != 0
puts "I fall down on < #{result} >\n! because of < #{$?} >"
exit($?)
end
result
end
end
end
end
class KeysGenerator
module FileFormatsModule
def only_name(name)
# remove extension
Pathname.new(name).basename
end
def to_plain_text(name)
self.only_name(name).sub_ext('.txt')
end
def to_p12(name)
# remove extension
# add .p12
self.only_name(name).sub_ext('.p12')
end
def to_certificate_request(name)
# remove extension
# add .csr
self.only_name(name).sub_ext('.csr')
end
def to_certificate(name, format = '.cer')
# remove extension
# add .cer
self.only_name(name).sub_ext(format)
end
def to_pem(name)
# remove extension
# add .pem
self.only_name(name).sub_ext('.pem')
end
end
class FileFormats
class << self
include FileFormatsModule
end
attr_accessor :name
def initialize(name)
self.name = @name
end
def valid?
!self.name.nil?
end
def to_p12
self.class.to_p12(self.name)
end
def to_certificate_requets
self.class.to_certificate_request(self.name)
end
def to_certificate
self.class.to_certificate(self.name)
end
def to_pem
self.class.to_pem(self.name)
end
end
class << self
def available_ec_curves
# secp256r1 - alias?
%w(prime256v1 secp384r1 secp512r1)
end
def available_rsa_sizes
[ 2048, 3072, 4096 ]
end
end
attr_accessor :type, :curve_name, :rsa_size, :secret
def initialize(secret = 'secret')
self.secret = secret
end
TYPES = [:ec, :rsa]
def type=(value)
@type = value.downcase.to_sym
end
def valid?
self.class::TYPES.include?(self.type)
end
def fix
@curve_name ||= self.class.available_ec_curves.first if self.type == :ec
@rsa_size ||= self.class.available_rsa_sizes.first if self.type == :rsa
end
def to_s
{
key_type: self.type,
curve_name: self.curve_name,
rsa_size: self.rsa_size,
rsa_private_key_passphrase: self.secret
}
end
module Generating
def default_subject
{
"C": "GB",
"ST": "London",
"L": "London",
"O": "Global Security",
"OU": "IT Department",
"CN": "example.com"
}
end
def key_and_value_or_empty(key, value, delimiter, empty = -> (v){ v.nil? || v.empty?})
the_empty = empty #empty || -> (v){ v.nil? || v.empty?}
{key => value}.reject{|k,v| the_empty.call(v)}.collect{|k,v| "#{k}#{delimiter}#{v}"}.join('')
end
def subject_value_from_subject(subject = {})
value = subject.collect{|k,v| "/#{k}=#{v}"}.join('')
value
end
def subject_key_and_value_from_subject(subject = {})
value = subject_value_from_subject(subject)
{'-subj': value}.reject{|k,v| v.empty?}.collect{|k,v| "#{k} \"#{v}\""}.join('')
end
def tool
%q(openssl)
end
def suppress_prompt(command)
%Q(echo '#{secret}' | #{command})
end
def generate_key(type, name, parameters)
case type
when :rsa
size = parameters[:size]
rsa_private_key_passphrase = parameters[:rsa_private_key_passphrase]
#%Q(#{tool} genrsa -des3 -out #{name} #{size} -passin 'password')
#%Q(#{tool} genrsa -aes256 -passin pass:#{rsa_private_key_passphrase} -out #{name} #{size})
# ssh-keygen -t rsa -b 4096 -f jwtRS256.key -q -N secret_phrase ( -N - NewPassphrase )
# -q -N #{rsa_private_key_passphrase}
%Q(echo "y" | ssh-keygen -t #{type} -b #{size} -f #{name} -q -N '')
when :ec
curve_name = parameters[:curve_name]
%Q(#{tool} ecparam -name #{curve_name} -genkey -noout -out #{name})
end
end
# def generate_key_rsa(parameter, name)
# %Q(#{tool} genrsa -des3 -out #{name} #{size} -passin 'password')
# # %Q(#{tool} genrsa -name #{parameter} -genkey -noout -out #{name})
# end
# def generate_key_ec(curve_name, name)
# %Q(#{tool} ecparam -name #{curve_name} -genkey -noout -out #{name})
# end
# output keys
def output_key(access, generated_key_name, name)
%Q(#{tool} #{access == 'private' ? '' : '-pubout' } -in #{generated_key_name} -out #{name} < echo "#{secret}")
end
def output_public_key(generated_key_name, name)
output_key('public', generated_key_name, name)
end
def output_private_key(generated_key_name, name)
output_key('private', generated_key_name, name)
end
def export_public_key(type, private_key_pem, public_key_pem)
%Q(#{tool} #{type} -in #{private_key_pem} -pubout -out #{public_key_pem})
#openssl ec -in ec256-private.pem -pubout -out ec256-public.pem
end
# with prompt?
def output_certificate_request(private_key_pem, certificate_request, subject = {})
subject_key_value = subject_key_and_value_from_subject(subject)
%Q(#{tool} req -new -key #{private_key_pem} -out #{certificate_request} #{subject_key_value})
# openssl req -new -key private_1.pem -out cert_1.csr
end
def output_certificate(certificate_request, private_key_pem, certificate)
%Q(#{tool} x509 -req -in #{certificate_request} -signkey #{private_key_pem} -out #{certificate})
# openssl x509 -req -in cert_1.csr -signkey private_1.pem -out cert_1.cer
end
def output_p12(certificate, private_key_pem, p12_name, password_file)
# replace name's extension by p12
# private_key_pem.pem
# certificate.crt or certificate.cer
# p12_name.p12
password_file_content = %Q("$(cat #{password_file})")
the_password_key_value = key_and_value_or_empty('-passout pass:', password_file_content, '')
%Q(#{tool} pkcs12 -export -in #{certificate} -inkey #{private_key_pem} -out #{p12_name} #{the_password_key_value})
end
def output_password(file, password = 'password')
%Q(echo -n "#{password}" > #{file})
end
end
class << self
include Generating
end
def generated_by_type(type = self.type)
# we need name and parameters?
parameters = {}
name = nil
case self.type
when :rsa
parameters[:size] = self.rsa_size
parameters[:rsa_private_key_passphrase] = self.secret
name = "rsa-#{self.rsa_size}"
when :ec
parameters[:curve_name] = self.curve_name
name = "ec-#{self.curve_name}"
end
self.generated(type, parameters, "#{name}-private", "#{name}-public", "#{name}", "#{name}", "#{name}-private", "#{name}-p12-password")
end
def generated(type, parameters, private_key_pem, public_key_pem, certificate_request, certificate, p12, p12_password)
the_private_key_pem ||= FileFormats.to_pem private_key_pem
the_public_key_pem ||= FileFormats.to_pem public_key_pem
the_certificate_request ||= FileFormats.to_certificate_request certificate_request
the_certificate ||= FileFormats.to_certificate certificate
the_p12 ||= FileFormats.to_p12 p12
the_p12_password ||= FileFormats.to_plain_text p12_password
[
self.class.generate_key(type, the_private_key_pem, parameters),
#self.class.generate_key(curve_name, the_private_key_pem),
self.class.export_public_key(type, the_private_key_pem, the_public_key_pem),
self.class.output_certificate_request(the_private_key_pem, the_certificate_request, self.class.default_subject),
self.class.output_certificate(the_certificate_request, the_private_key_pem, the_certificate),
self.class.output_password(the_p12_password),
self.class.output_p12(the_certificate, the_private_key_pem, the_p12, the_p12_password)
]
end
end
class MainWork
class << self
def work(arguments)
the_work = new
the_work.work(the_work.parse_options(arguments))
end
end
def fix_options(the_options)
options = the_options
options[:result_directory] ||= '../Tests/Resources/Certs/'
if options[:test]
options[:generated_key_name] ||= 'generated'
options[:private_key_name] ||= 'private'
options[:public_key_name] ||= 'public'
end
options
end
def required_keys
[:key_type]
end
def valid_options?(the_options)
(required_keys - the_options.keys).empty?
end
def work(options = {})
options = fix_options(options)
if options[:inspection]
puts "options are: #{options}"
end
unless valid_options? options
puts "options are not valid!"
puts "options are: #{options}"
puts "missing options: #{required_keys}"
exit(0)
end
ShellExecutor.setup options[:dry_run]
generator = KeysGenerator.new
generator.type = options[:key_type]
generator.curve_name = options[:curve_name]
generator.rsa_size = options[:rsa_size]
generator.fix
puts "generator fixed arguments: #{generator.to_s}"
unless generator.valid?
puts "generator types are: #{KeysGenerator::TYPES}. You type is: #{generator.type}"
exit(0)
end
generator.generated_by_type(generator.type).each do |command|
ShellExecutor.run_command_line command
end
#KeyParameters.new(options[:algorithm_type], options[:key_length])
# [
# key_parameters.generate_key(options[:generated_key_name]),
# key_parameters.output_private_key(options[:generated_key_name], options[:private_key_name]),
# key_parameters.output_public_key(options[:generated_key_name], options[:public_key_name])
# ].map do |command|
# key_parameters.suppress_prompt command
# end
# .each do |command|
# ShellExecutor.run_command_line command
# end
end
def help_message(options)
# %x[rdoc $0]
# not ok
puts <<-__HELP__
#{options.help}
this script will help you generate keys.
First, it takes arguments:
[needed] <-f DIRECTORY>: directory where you will gather files
[not needed] <-r DIRECTORY>: directory where files will be placed
---------------
Usage:
---------------
#{$0} -t ../Tests/Resources/Certs/
__HELP__
end
def parse_options(arguments)
options = {}
OptionParser.new do |opts|
opts.banner = "Usage: #{$0} [options]"
opts.on('-o', '--output_directory DIRECTORY', 'Output Directory') {|v| options[:output_directory] = v}
opts.on('-t', '--test', 'Test option') {|v| options[:test] = v}
opts.on('-c', '--curve_name CURVE_NAME', 'Curve name') {|v| options[:curve_name] = v}
opts.on('-s', '--size SIZE', 'RSA key size') {|v| options[:rsa_size] = v}
opts.on('-k', '--key_type KEY_TYPE', 'Key Type (RSA or EC), case insensitive') {|v| options[:key_type] = v}
opts.on('-g', '--generated_key_name NAME', 'Generated key name') {|v| options[:generated_key_name] = v}
opts.on('-r', '--private_key_name NAME', 'Private Key Name') {|v| options[:private_key_name] = v}
opts.on('-u', '--public_key_name NAME', 'Public Key Name') {|v| options[:public_key_name] = v}
# opts.on('-l', '--log_level LEVEL', 'Logger level of warning') {|v| options[:log_level] = v}
# opts.on('-o', '--output_log OUTPUT', 'Logger output stream') {|v| options[:output_stream] = v}
opts.on('-d', '--dry_run', 'Dry run to see all options') {|v| options[:dry_run] = v}
opts.on('-i', '--inspection', 'Inspection of all items, like tests'){|v| options[:inspection] = v}
# help
opts.on('-h', '--help', 'Help option') { self.help_message(opts); exit()}
end.parse!(arguments)
options
end
end
MainWork.work(ARGV)
|
#! /usr/bin/env ruby
# https://github.com/dinkypumpkin/get_iplayer
GET_IPLAYER = "~/Projects/get_iplayer/get_iplayer"
IPLAYER_CACHE = "~/.get_iplayer/radio.cache"
IPLAYER_HISTORY = "~/.get_iplayer/download_history"
DOWNLOAD_PATH = "~/Projects/snowball/public/misc/"
SHOWS = ["Pete Tong", "Annie Mac", "BBC Radio 1's Essential Mix"]
USAGE = <<END_OF_USAGE
Usage: #{__FILE__} [refresh|download|find|get] (find needs the name) (get needs the PID)"
*********** MANUAL REFRESH
#{GET_IPLAYER} --refresh --type=radio
*********** MANUAL FIND
cat ~/.get_iplayer/radio.cache | grep -i '[SHOW NAME]' | awk 'BEGIN {FS=\"|\"}{ print $4, \" \", $3, \" \", $11 }'
*********** MANUAL GET
#{GET_IPLAYER} --type=radio --pid [PID] --output #{DOWNLOAD_PATH}
END_OF_USAGE
if ARGV.length == 0
puts USAGE
exit
end
if ARGV[0] == "refresh"
puts "#{'*'*80}"
puts "*********** REFRESH (can take some time)"
`#{GET_IPLAYER} --refresh --type=radio`
elsif ARGV[0] == "download"
puts "#{'*'*80}"
puts "*********** download"
SHOWS.each do |name|
shows = `cat #{IPLAYER_CACHE} | grep -i "#{name}"`
unless shows.empty?
shows.split("\n").each do |show|
puts "#{'*'*80}"
parts = show.split('|')
downloaded = `cat #{IPLAYER_HISTORY} | grep "#{parts[3]}"`
if downloaded.empty?
puts "Download #{parts[3]} - #{parts[2]} - #{parts[10]}"
if parts[3] =~/^p/
`#{GET_IPLAYER} --type=radio --pid #{parts[3]} --output #{DOWNLOAD_PATH}`
else
`#{GET_IPLAYER} --type=radio --pid #{parts[3]} --output #{DOWNLOAD_PATH}`
end
else
puts "Downloaded #{parts[3]} - #{parts[2]} - #{parts[10]}"
end
end
end
end
elsif ARGV[0] == "find" && ARGV.length > 1
puts "#{'*'*80}"
puts "*********** find #{ARGV[1]}"
shows = `cat #{IPLAYER_CACHE} | grep -i "#{ARGV[1]}"`
unless shows.empty?
shows.split("\n").each do |show|
parts = show.split('|')
downloaded = `cat #{IPLAYER_HISTORY} | grep "#{parts[3]}"`
if downloaded.empty?
puts "Download #{parts[3]} - #{parts[2]} - #{parts[10]}"
else
puts "Downloaded #{parts[3]} - #{parts[2]} - #{parts[10]}"
end
end
end
# puts `cat ~/.get_iplayer/radio.cache | grep '#{ARGV[1]}' | awk 'BEGIN {FS=\"|\"}{ print $4, \" \", $3, \" \", $11 }'`
elsif ARGV[0] == "get" && ARGV.length > 1
puts "#{'*'*80}"
puts "*********** get #{ARGV[1]}"
downloaded = `cat #{IPLAYER_HISTORY} | grep "#{ARGV[1]}"`
if downloaded.empty?
puts "Downloading #{ARGV[1]}"
if ARGV[1] =~/^p/
`#{GET_IPLAYER} --type=radio --pid #{ARGV[1]} --output #{DOWNLOAD_PATH}`
else
`#{GET_IPLAYER} --type=radio --pid #{ARGV[1]} --output #{DOWNLOAD_PATH}`
end
else
puts "Downloaded #{ARGV[1]}"
end
else
puts USAGE
end
debugging
#! /usr/bin/env ruby
require 'fileutils'
# https://github.com/dinkypumpkin/get_iplayer
GET_IPLAYER = "~/Projects/get_iplayer/get_iplayer"
IPLAYER_CACHE = "~/.get_iplayer/radio.cache"
IPLAYER_HISTORY = "~/.get_iplayer/download_history"
DOWNLOAD_PATH = "~/Projects/snowball/public/misc/"
FileUtils.mkdir_p(File.dirname(File.expand_path(IPLAYER_CACHE)))
FileUtils.touch(File.expand_path(IPLAYER_CACHE))
FileUtils.mkdir_p(File.dirname(File.expand_path(IPLAYER_HISTORY)))
FileUtils.touch(File.expand_path(IPLAYER_HISTORY))
FileUtils.mkdir_p(File.dirname(File.expand_path(DOWNLOAD_PATH)))
SHOWS = ["Pete Tong", "Annie Mac", "BBC Radio 1's Essential Mix"]
USAGE = <<END_OF_USAGE
Usage: #{__FILE__} [refresh|download|find|get] (find needs the name) (get needs the PID)"
*********** MANUAL REFRESH
#{GET_IPLAYER} --refresh --type=radio
*********** MANUAL FIND
cat ~/.get_iplayer/radio.cache | grep -i '[SHOW NAME]' | awk 'BEGIN {FS=\"|\"}{ print $4, \" \", $3, \" \", $11 }'
*********** MANUAL GET
#{GET_IPLAYER} --type=radio --pid [PID] --output #{DOWNLOAD_PATH}
END_OF_USAGE
if ARGV.length == 0
puts USAGE
exit
end
if ARGV[0] == "refresh"
puts "#{'*'*80}"
puts "*********** REFRESH (can take some time)"
`#{GET_IPLAYER} --refresh --type=radio`
elsif ARGV[0] == "download"
puts "#{'*'*80}"
puts "*********** download"
SHOWS.each do |name|
shows = `cat #{IPLAYER_CACHE} | grep -i "#{name}"`
unless shows.empty?
shows.split("\n").each do |show|
puts "#{'*'*80}"
parts = show.split('|')
downloaded = `cat #{IPLAYER_HISTORY} | grep "#{parts[3]}"`
if downloaded.empty?
puts "Download #{parts[3]} - #{parts[2]} - #{parts[10]}"
cmd = "#{GET_IPLAYER} --modes=flashaaclow,rtspaaclow,wma --type=radio --pid #{parts[3]} --output #{DOWNLOAD_PATH}"
puts cmd
p Kernel.system cmd
else
puts "Downloaded #{parts[3]} - #{parts[2]} - #{parts[10]}"
end
end
end
end
elsif ARGV[0] == "find" && ARGV.length > 1
puts "#{'*'*80}"
puts "*********** find #{ARGV[1]}"
shows = `cat #{IPLAYER_CACHE} | grep -i "#{ARGV[1]}"`
unless shows.empty?
shows.split("\n").each do |show|
parts = show.split('|')
downloaded = `cat #{IPLAYER_HISTORY} | grep "#{parts[3]}"`
if downloaded.empty?
puts "Download #{parts[3]} - #{parts[2]} - #{parts[10]}"
else
puts "Downloaded #{parts[3]} - #{parts[2]} - #{parts[10]}"
end
end
end
# puts `cat ~/.get_iplayer/radio.cache | grep '#{ARGV[1]}' | awk 'BEGIN {FS=\"|\"}{ print $4, \" \", $3, \" \", $11 }'`
elsif ARGV[0] == "get" && ARGV.length > 1
puts "#{'*'*80}"
puts "*********** get #{ARGV[1]}"
downloaded = `cat #{IPLAYER_HISTORY} | grep "#{ARGV[1]}"`
if downloaded.empty?
puts "Downloading #{ARGV[1]}"
`#{GET_IPLAYER} --type=radio --pid #{ARGV[1]} --output #{DOWNLOAD_PATH}`
else
puts "Downloaded #{ARGV[1]}"
end
else
puts USAGE
end
|
#
# Author:: Hoat Le <hoatle@teracy.com>
# Cookbook Name:: teracy-dev
# Recipe:: proxy
#
# Copyright 2013 - current, Teracy, Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of Teracy, Inc. nor the names of its contributors may be used
# to endorse or promote products derived from this software without
# specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
proxy_conf = node['teracy-dev']['proxy']
if proxy_conf['enabled'] == true
certs_conf = node['teracy-dev']['proxy']['certs']
owner = certs_conf['owner']
group = certs_conf['group']
mode = certs_conf['mode']
sources = certs_conf['sources']
destination = certs_conf['destination']
# create the destination directory first
directory destination do
owner owner
group group
mode '0755'
action :create
recursive true
end
# then copy files
sources.each do |source|
source_path = "default/#{source}"
file_name = source.split('/')[2]
destination_path = "#{destination}/#{file_name}"
cookbook_file destination_path do
source source_path
owner owner
group group
mode mode
action :create
end
end
# start docker nginx-proxy
# this require that docker must be available
if node['docker']['enabled'] == true
container_conf = node['teracy-dev']['proxy']['container']
docker_image container_conf['repo'] do
tag container_conf['tag']
action :pull
notifies :redeploy, "docker_container[#{container_conf['name']}]"
end
docker_container container_conf['name'] do
repo container_conf['repo']
tag container_conf['tag']
volumes container_conf['volumes']
restart_policy container_conf['restart_policy']
port container_conf['port']
end
end
end
@ #253 | should support nginx-proxy and ssl out of the box: rename the key and cert files following the host name
#
# Author:: Hoat Le <hoatle@teracy.com>
# Cookbook Name:: teracy-dev
# Recipe:: proxy
#
# Copyright 2013 - current, Teracy, Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of Teracy, Inc. nor the names of its contributors may be used
# to endorse or promote products derived from this software without
# specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
proxy_conf = node['teracy-dev']['proxy']
if proxy_conf['enabled'] == true
certs_conf = node['teracy-dev']['proxy']['certs']
owner = certs_conf['owner']
group = certs_conf['group']
mode = certs_conf['mode']
sources = certs_conf['sources']
destination = certs_conf['destination']
# create the destination directory first
directory destination do
owner owner
group group
mode '0755'
action :create
recursive true
end
# then copy files
sources.each do |source|
source_path = "default/#{source}"
file_ext_splits = source.split('.')
file_ext = file_ext_splits[file_ext_splits.length-1]
destination_path = "#{destination}/#{node.name}.#{file_ext}"
cookbook_file destination_path do
source source_path
owner owner
group group
mode mode
action :create
end
end
# start docker nginx-proxy
# this require that docker must be available
if node['docker']['enabled'] == true
container_conf = node['teracy-dev']['proxy']['container']
docker_image container_conf['repo'] do
tag container_conf['tag']
action :pull
notifies :redeploy, "docker_container[#{container_conf['name']}]"
end
docker_container container_conf['name'] do
repo container_conf['repo']
tag container_conf['tag']
volumes container_conf['volumes']
restart_policy container_conf['restart_policy']
port container_conf['port']
end
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'inq/version'
Gem::Specification.new do |spec|
spec.name = "inq"
spec.version = Inq::VERSION
spec.authors = ["Ellen Marie Dash"]
spec.email = ["me@duckie.co"]
spec.summary = %q{Quantify the health of a GitHub repository.}
spec.homepage = "https://github.com/duckinator/inq"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
# Inq only supports Ruby versions under "normal maintenance".
# This number should be updated when a Ruby version goes into security
# maintenance.
#
# Ruby maintenance info: https://www.ruby-lang.org/en/downloads/branches/
#
# NOTE: Update Gemfile when this is updated!
spec.required_ruby_version = "~> 2.4"
spec.add_runtime_dependency "github_api", "~> 0.18.1"
spec.add_runtime_dependency "okay", "~> 11.0"
spec.add_runtime_dependency "json_pure"
spec.add_development_dependency "bundler", "~> 2.0"
spec.add_development_dependency "rake", "~> 12.3"
spec.add_development_dependency "rspec", "~> 3.8"
spec.add_development_dependency "timecop", "~> 0.9.1"
spec.add_development_dependency "vcr", "~> 4.0"
spec.add_development_dependency "webmock"
# Rubocop pulls in C extensions, which we want to avoid in Windows CI.
spec.add_development_dependency "rubocop", "~> 0.68.1" unless Gem.win_platform? && ENV["CI"]
spec.add_development_dependency "github_changelog_generator"
end
Don't ship the 11MB fixtures/ directory lmao
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'inq/version'
Gem::Specification.new do |spec|
spec.name = "inq"
spec.version = Inq::VERSION
spec.authors = ["Ellen Marie Dash"]
spec.email = ["me@duckie.co"]
spec.summary = %q{Quantify the health of a GitHub repository.}
spec.homepage = "https://github.com/duckinator/inq"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features|bin|fixtures)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
# Inq only supports Ruby versions under "normal maintenance".
# This number should be updated when a Ruby version goes into security
# maintenance.
#
# Ruby maintenance info: https://www.ruby-lang.org/en/downloads/branches/
#
# NOTE: Update Gemfile when this is updated!
spec.required_ruby_version = "~> 2.4"
spec.add_runtime_dependency "github_api", "~> 0.18.1"
spec.add_runtime_dependency "okay", "~> 11.0"
spec.add_runtime_dependency "json_pure"
spec.add_development_dependency "bundler", "~> 2.0"
spec.add_development_dependency "rake", "~> 12.3"
spec.add_development_dependency "rspec", "~> 3.8"
spec.add_development_dependency "timecop", "~> 0.9.1"
spec.add_development_dependency "vcr", "~> 4.0"
spec.add_development_dependency "webmock"
# Rubocop pulls in C extensions, which we want to avoid in Windows CI.
spec.add_development_dependency "rubocop", "~> 0.68.1" unless Gem.win_platform? && ENV["CI"]
spec.add_development_dependency "github_changelog_generator"
end
|
require 'yaml'
require 'json'
require 'octokit'
require 'rest-client'
require 'sidekiq'
require 'sinatra'
require 'whedon'
set :views, Proc.new { File.join(root, "responses") }
set :gh_token, ENV["GH_TOKEN"]
set :github, Octokit::Client.new(:access_token => settings.gh_token)
set :magic_word, "bananas"
set :configs, {}
YAML.load_file("config/settings.yml").each do |nwo, config|
team_id = config["editor_team_id"]
config["editors"] = settings.github.team_members(team_id).collect { |e| e.login }.sort
settings.configs[nwo] = OpenStruct.new config
end
# Before we handle the request we extract the issue body to grab the whedon
# command (if present).
before do
if %w[heartbeat].include? request.path_info.split('/')[1]
pass
else
sleep(2) # This seems to help with auto-updating GitHub issue threads
params = JSON.parse(request.env["rack.input"].read)
# Only work with issues. Halt if there isn't an issue in the JSON
halt if params['issue'].nil?
@action = params['action']
@payload = params
if @action == 'opened'
@message = params['issue']['body']
elsif @action == 'created'
@message = params['comment']['body']
end
@sender = params['sender']['login']
@issue_id = params['issue']['number']
@nwo = params['repository']['full_name']
@config = settings.configs[@nwo]
halt unless @config # We probably want to restrict this
end
end
get '/heartbeat' do
"BOOM boom. BOOM boom. BOOM boom."
end
post '/dispatch' do
if @action == "opened"
say_hello
halt
end
robawt_respond if @message
end
def say_hello
if issue.title.match(/^\[REVIEW\]:/)
reviewer = issue.body.match(/\*\*Reviewer:\*\*\s*(@\S*|Pending)/i)[1]
respond erb :reviewer_welcome, :locals => { :reviewer => reviewer, :nwo => @nwo }
# Newly created [PRE REVIEW] issue. Time to say hello
elsif assignees.any?
respond erb :welcome, :locals => { :editor => assignees.first }
else
respond erb :welcome, :locals => { :editor => nil }
end
end
def assignees
@assignees ||= settings.github.issue(@nwo, @issue_id).assignees.collect { |a| a.login }
end
def robawt_respond
puts "ACTION: #{@action}"
puts "MESSAGE: #{@message}"
case @message
when /\A@whedon commands/i
respond erb :commands
when /\A@whedon assign (.*) as reviewer/i
check_editor
assign_reviewer($1)
respond "OK, the reviewer is #{$1}"
when /\A@whedon assign (.*) as editor/i
check_editor
assign_editor($1)
respond "OK, the editor is #{$1}"
when /\A@whedon set (.*) as archive/
check_editor
assign_archive($1)
when /\A@whedon start review magic-word=(.*)|\A@whedon start review/i
check_editor
# TODO actually post something to the API
word = $1
if word && word == settings.magic_word
review_issue_id = start_review
respond erb :start_review, :locals => { :review_issue_id => review_issue_id, :nwo => @nwo }
else
respond erb :magic_word, :locals => { :magic_word => settings.magic_word }
halt
end
when /\A@whedon list editors/i
respond erb :editors, :locals => { :editors => @config.editors }
when /\A@whedon list reviewers/i
respond reviewers
when /\A@whedon assignments/i
reviewers, editors = assignments
respond erb :assignments, :locals => { :reviewers => reviewers, :editors => editors, :all_editors => @config.editors }
when /\A@whedon generate pdf/i
respond process_pdf(@config, @nwo, @issue_id)
end
end
# How Whedon talks
def respond(comment)
settings.github.add_comment(@nwo, @issue_id, comment)
end
# Download and compile the PDF
def process_pdf(@config, @nwo, @issue_id)
WhedonWorker.perform_async(@config, @nwo, @issue_id)
return "I compiled your stinkin' PDF"
end
def assign_archive(doi_string)
doi = doi_string[/\b(10[.][0-9]{4,}(?:[.][0-9]+)*\/(?:(?!["&\'<>])\S)+)\b/]
if doi
doi_with_url = "<a href=\"http://dx.doi.org/#{doi}\" target=\"_blank\">#{doi}</a>"
new_body = issue.body.gsub(/\*\*Archive:\*\*\s*(.*|Pending)/i, "**Archive:** #{doi_with_url}")
settings.github.update_issue(@nwo, @issue_id, issue.title, new_body)
respond "OK. #{doi_with_url} is the archive."
else
respond "#{doi_string} doesn't look like an archive DOI."
end
end
def assignments
issues = settings.github.list_issues(@nwo, :state => 'open')
editors = Hash.new(0)
reviewers = Hash.new(0)
issues.each do |issue|
if issue.body.match(/\*\*Editor:\*\*\s*(@\S*|Pending)/i)
editor = issue.body.match(/\*\*Editor:\*\*\s*(@\S*|Pending)/i)[1]
editors[editor] += 1
end
if issue.body.match(/\*\*Reviewer:\*\*\s*(@\S*|Pending)/i)
reviewer = issue.body.match(/\*\*Reviewer:\*\*\s*(@\S*|Pending)/i)[1]
reviewers[reviewer] += 1
end
end
sorted_editors = editors.sort_by {|_, value| value}.to_h
sorted_reviewers = reviewers.sort_by {|_, value| value}.to_h
return sorted_reviewers, sorted_editors
end
# Returns a string response with URL to Gist of reviewers
def reviewers
"Here's the current list of reviewers: #{@config.reviewers}"
end
# Change the editor on an issue. This is a two-step process:
# 1. Change the review issue assignee
# 2. Update the editor listed at the top of the issue
def assign_editor(new_editor)
new_editor = new_editor.gsub(/^\@/, "")
new_body = issue.body.gsub(/\*\*Editor:\*\*\s*(@\S*|Pending)/i, "**Editor:** @#{new_editor}")
settings.github.update_issue(@nwo, @issue_id, issue.title, new_body, :assignees => [])
update_assigness([new_editor])
end
# Change the reviewer listed at the top of the issue
def assign_reviewer(new_reviewer)
new_reviewer = new_reviewer.gsub(/^\@/, "")
editor = issue.body.match(/\*\*Editor:\*\*\s*.@(\S*)/)[1]
new_body = issue.body.gsub(/\*\*Reviewer:\*\*\s*(@\S*|Pending)/i, "**Reviewer:** @#{new_reviewer}")
settings.github.add_collaborator(@nwo, new_reviewer)
puts "NWO: #{@nwo}"
puts "ISSUE ID: #{@issue_id}"
puts "TITLE: #{issue.title}"
puts "BODY: #{new_body}"
puts "ASSIGNEES #{[new_reviewer, editor]}"
settings.github.update_issue(@nwo, @issue_id, issue.title, new_body, :assignees => [])
update_assigness([new_reviewer, editor])
end
def update_assigness(assignees)
data = { "assignees" => assignees }
url = "https://api.github.com/repos/#{@nwo}/issues/#{@issue_id}/assignees?access_token=#{settings.gh_token}"
RestClient.post(url, data.to_json)
end
def start_review
editor = issue.body.match(/\*\*Editor:\*\*\s*.@(\S*)/)[1]
reviewer = issue.body.match(/\*\*Reviewer:\*\*\s*.@(\S*)/)[1]
# Check we have an editor and a reviewer
raise unless (editor && reviewer)
url = "#{@config.site_host}/papers/api_start_review?id=#{@issue_id}&editor=#{editor}&reviewer=#{reviewer}&secret=#{@config.site_api_key}"
# TODO let's do some error handling here please
puts "POSTING TO #{url}"
response = RestClient.post(url, "")
paper = JSON.parse(response)
return paper['review_issue_id']
end
def issue
@issue ||= settings.github.issue(@nwo, @issue_id)
end
# Check that the person sending the command is an editor
def check_editor
unless @config.editors.include?(@sender)
respond "I'm sorry @#{@sender}, I'm afraid I can't do that. That's something only editors are allowed to do."
halt 403
end
end
class WhedonWorker
include Sidekiq::Worker
def perform(config, nwo, issue_id)
set_env(config, nwo)
download(issue_id)
compile(issue_id)
end
def download(issue_id)
puts "Downloading #{ENV['REVIEW_REPOSITORY']}"
`whedon download #{issue_id}`
end
def compile(issue_id)
puts "Compiling #{ENV['REVIEW_REPOSITORY']}"
`whedon prepare #{issue_id}`
end
# The Whedon gem expects a bunch of environment variables to be available
# and this method sets them.
def set_env(config, nwo)
ENV['REVIEW_REPOSITORY'] = nwo
ENV['DOI_PREFIX'] = "10.21105"
ENV['PAPER_REPOSITORY'] = config.papers
ENV['JOURNAL_URL'] = config.site_host
ENV['JOURNAL_NAME'] = config.site_name
end
end
Unset params
require 'yaml'
require 'json'
require 'octokit'
require 'rest-client'
require 'sidekiq'
require 'sinatra'
require 'whedon'
set :views, Proc.new { File.join(root, "responses") }
set :gh_token, ENV["GH_TOKEN"]
set :github, Octokit::Client.new(:access_token => settings.gh_token)
set :magic_word, "bananas"
set :configs, {}
YAML.load_file("config/settings.yml").each do |nwo, config|
team_id = config["editor_team_id"]
config["editors"] = settings.github.team_members(team_id).collect { |e| e.login }.sort
settings.configs[nwo] = OpenStruct.new config
end
# Before we handle the request we extract the issue body to grab the whedon
# command (if present).
before do
if %w[heartbeat].include? request.path_info.split('/')[1]
pass
else
sleep(2) # This seems to help with auto-updating GitHub issue threads
params = JSON.parse(request.env["rack.input"].read)
# Only work with issues. Halt if there isn't an issue in the JSON
halt if params['issue'].nil?
@action = params['action']
@payload = params
if @action == 'opened'
@message = params['issue']['body']
elsif @action == 'created'
@message = params['comment']['body']
end
@sender = params['sender']['login']
@issue_id = params['issue']['number']
@nwo = params['repository']['full_name']
@config = settings.configs[@nwo]
halt unless @config # We probably want to restrict this
end
end
get '/heartbeat' do
"BOOM boom. BOOM boom. BOOM boom."
end
post '/dispatch' do
if @action == "opened"
say_hello
halt
end
robawt_respond if @message
end
def say_hello
if issue.title.match(/^\[REVIEW\]:/)
reviewer = issue.body.match(/\*\*Reviewer:\*\*\s*(@\S*|Pending)/i)[1]
respond erb :reviewer_welcome, :locals => { :reviewer => reviewer, :nwo => @nwo }
# Newly created [PRE REVIEW] issue. Time to say hello
elsif assignees.any?
respond erb :welcome, :locals => { :editor => assignees.first }
else
respond erb :welcome, :locals => { :editor => nil }
end
end
def assignees
@assignees ||= settings.github.issue(@nwo, @issue_id).assignees.collect { |a| a.login }
end
def robawt_respond
puts "ACTION: #{@action}"
puts "MESSAGE: #{@message}"
case @message
when /\A@whedon commands/i
respond erb :commands
when /\A@whedon assign (.*) as reviewer/i
check_editor
assign_reviewer($1)
respond "OK, the reviewer is #{$1}"
when /\A@whedon assign (.*) as editor/i
check_editor
assign_editor($1)
respond "OK, the editor is #{$1}"
when /\A@whedon set (.*) as archive/
check_editor
assign_archive($1)
when /\A@whedon start review magic-word=(.*)|\A@whedon start review/i
check_editor
# TODO actually post something to the API
word = $1
if word && word == settings.magic_word
review_issue_id = start_review
respond erb :start_review, :locals => { :review_issue_id => review_issue_id, :nwo => @nwo }
else
respond erb :magic_word, :locals => { :magic_word => settings.magic_word }
halt
end
when /\A@whedon list editors/i
respond erb :editors, :locals => { :editors => @config.editors }
when /\A@whedon list reviewers/i
respond reviewers
when /\A@whedon assignments/i
reviewers, editors = assignments
respond erb :assignments, :locals => { :reviewers => reviewers, :editors => editors, :all_editors => @config.editors }
when /\A@whedon generate pdf/i
respond process_pdf
end
end
# How Whedon talks
def respond(comment)
settings.github.add_comment(@nwo, @issue_id, comment)
end
# Download and compile the PDF
def process_pdf
WhedonWorker.perform_async(@config, @nwo, @issue_id)
return "I compiled your stinkin' PDF"
end
def assign_archive(doi_string)
doi = doi_string[/\b(10[.][0-9]{4,}(?:[.][0-9]+)*\/(?:(?!["&\'<>])\S)+)\b/]
if doi
doi_with_url = "<a href=\"http://dx.doi.org/#{doi}\" target=\"_blank\">#{doi}</a>"
new_body = issue.body.gsub(/\*\*Archive:\*\*\s*(.*|Pending)/i, "**Archive:** #{doi_with_url}")
settings.github.update_issue(@nwo, @issue_id, issue.title, new_body)
respond "OK. #{doi_with_url} is the archive."
else
respond "#{doi_string} doesn't look like an archive DOI."
end
end
def assignments
issues = settings.github.list_issues(@nwo, :state => 'open')
editors = Hash.new(0)
reviewers = Hash.new(0)
issues.each do |issue|
if issue.body.match(/\*\*Editor:\*\*\s*(@\S*|Pending)/i)
editor = issue.body.match(/\*\*Editor:\*\*\s*(@\S*|Pending)/i)[1]
editors[editor] += 1
end
if issue.body.match(/\*\*Reviewer:\*\*\s*(@\S*|Pending)/i)
reviewer = issue.body.match(/\*\*Reviewer:\*\*\s*(@\S*|Pending)/i)[1]
reviewers[reviewer] += 1
end
end
sorted_editors = editors.sort_by {|_, value| value}.to_h
sorted_reviewers = reviewers.sort_by {|_, value| value}.to_h
return sorted_reviewers, sorted_editors
end
# Returns a string response with URL to Gist of reviewers
def reviewers
"Here's the current list of reviewers: #{@config.reviewers}"
end
# Change the editor on an issue. This is a two-step process:
# 1. Change the review issue assignee
# 2. Update the editor listed at the top of the issue
def assign_editor(new_editor)
new_editor = new_editor.gsub(/^\@/, "")
new_body = issue.body.gsub(/\*\*Editor:\*\*\s*(@\S*|Pending)/i, "**Editor:** @#{new_editor}")
settings.github.update_issue(@nwo, @issue_id, issue.title, new_body, :assignees => [])
update_assigness([new_editor])
end
# Change the reviewer listed at the top of the issue
def assign_reviewer(new_reviewer)
new_reviewer = new_reviewer.gsub(/^\@/, "")
editor = issue.body.match(/\*\*Editor:\*\*\s*.@(\S*)/)[1]
new_body = issue.body.gsub(/\*\*Reviewer:\*\*\s*(@\S*|Pending)/i, "**Reviewer:** @#{new_reviewer}")
settings.github.add_collaborator(@nwo, new_reviewer)
puts "NWO: #{@nwo}"
puts "ISSUE ID: #{@issue_id}"
puts "TITLE: #{issue.title}"
puts "BODY: #{new_body}"
puts "ASSIGNEES #{[new_reviewer, editor]}"
settings.github.update_issue(@nwo, @issue_id, issue.title, new_body, :assignees => [])
update_assigness([new_reviewer, editor])
end
def update_assigness(assignees)
data = { "assignees" => assignees }
url = "https://api.github.com/repos/#{@nwo}/issues/#{@issue_id}/assignees?access_token=#{settings.gh_token}"
RestClient.post(url, data.to_json)
end
def start_review
editor = issue.body.match(/\*\*Editor:\*\*\s*.@(\S*)/)[1]
reviewer = issue.body.match(/\*\*Reviewer:\*\*\s*.@(\S*)/)[1]
# Check we have an editor and a reviewer
raise unless (editor && reviewer)
url = "#{@config.site_host}/papers/api_start_review?id=#{@issue_id}&editor=#{editor}&reviewer=#{reviewer}&secret=#{@config.site_api_key}"
# TODO let's do some error handling here please
puts "POSTING TO #{url}"
response = RestClient.post(url, "")
paper = JSON.parse(response)
return paper['review_issue_id']
end
def issue
@issue ||= settings.github.issue(@nwo, @issue_id)
end
# Check that the person sending the command is an editor
def check_editor
unless @config.editors.include?(@sender)
respond "I'm sorry @#{@sender}, I'm afraid I can't do that. That's something only editors are allowed to do."
halt 403
end
end
class WhedonWorker
include Sidekiq::Worker
def perform(config, nwo, issue_id)
set_env(config, nwo)
download(issue_id)
compile(issue_id)
end
def download(issue_id)
puts "Downloading #{ENV['REVIEW_REPOSITORY']}"
`whedon download #{issue_id}`
end
def compile(issue_id)
puts "Compiling #{ENV['REVIEW_REPOSITORY']}"
`whedon prepare #{issue_id}`
end
# The Whedon gem expects a bunch of environment variables to be available
# and this method sets them.
def set_env(config, nwo)
ENV['REVIEW_REPOSITORY'] = nwo
ENV['DOI_PREFIX'] = "10.21105"
ENV['PAPER_REPOSITORY'] = config.papers
ENV['JOURNAL_URL'] = config.site_host
ENV['JOURNAL_NAME'] = config.site_name
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{swagger}
s.version = "1.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["mdeiters"]
s.date = %q{2010-07-14}
s.description = %q{Duck punch Resque to use active record for backround jobs instead of redis}
s.email = %q{mdeiters@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/redis_impersonator.rb",
"lib/resque_extension.rb",
"lib/resque_value.rb",
"lib/swagger.rb",
"spec/redis_impersonator_spec.rb",
"spec/resque_extension_spec.rb",
"spec/spec.opts",
"spec/spec_helper.rb",
"swagger.gemspec"
]
s.homepage = %q{http://github.com/mdeiters/swagger}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.6}
s.summary = %q{Everything Resque provides minus Redis}
s.test_files = [
"spec/redis_impersonator_spec.rb",
"spec/resque_extension_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 1.2.9"])
s.add_runtime_dependency(%q<activerecord>, [">= 0"])
s.add_runtime_dependency(%q<resque>, ["= 1.9.7"])
else
s.add_dependency(%q<rspec>, [">= 1.2.9"])
s.add_dependency(%q<activerecord>, [">= 0"])
s.add_dependency(%q<resque>, ["= 1.9.7"])
end
else
s.add_dependency(%q<rspec>, [">= 1.2.9"])
s.add_dependency(%q<activerecord>, [">= 0"])
s.add_dependency(%q<resque>, ["= 1.9.7"])
end
end
Regenerated gemspec for version 1.1.1
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{swagger}
s.version = "1.1.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["mdeiters"]
s.date = %q{2010-07-14}
s.description = %q{Duck punch Resque to use active record for backround jobs instead of redis}
s.email = %q{mdeiters@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/redis_impersonator.rb",
"lib/resque_extension.rb",
"lib/resque_value.rb",
"lib/swagger.rb",
"spec/redis_impersonator_spec.rb",
"spec/resque_extension_spec.rb",
"spec/spec.opts",
"spec/spec_helper.rb",
"swagger.gemspec"
]
s.homepage = %q{http://github.com/mdeiters/swagger}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.6}
s.summary = %q{Everything Resque provides minus Redis}
s.test_files = [
"spec/redis_impersonator_spec.rb",
"spec/resque_extension_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 1.2.9"])
s.add_runtime_dependency(%q<activerecord>, [">= 0"])
s.add_runtime_dependency(%q<resque>, ["= 1.9.7"])
else
s.add_dependency(%q<rspec>, [">= 1.2.9"])
s.add_dependency(%q<activerecord>, [">= 0"])
s.add_dependency(%q<resque>, ["= 1.9.7"])
end
else
s.add_dependency(%q<rspec>, [">= 1.2.9"])
s.add_dependency(%q<activerecord>, [">= 0"])
s.add_dependency(%q<resque>, ["= 1.9.7"])
end
end
|
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'locomotive/wagon/version'
Gem::Specification.new do |gem|
gem.name = 'locomotivecms_wagon'
gem.version = Locomotive::Wagon::VERSION
gem.authors = ['Didier Lafforgue', 'Rodrigo Alvarez']
gem.email = ['did@locomotivecms.com', 'papipo@gmail.com']
gem.description = %q{The LocomotiveCMS wagon is a site generator for the LocomotiveCMS engine}
gem.summary = %q{The LocomotiveCMS wagon is a site generator for the LocomotiveCMS engine powered by all the efficient and modern HTML development tools (Haml, SASS, Compass, Less).}
gem.homepage = 'http://www.locomotivecms.com'
gem.license = 'MIT'
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ['lib']
gem.executables = ['wagon']
gem.add_development_dependency 'rake', '~> 10.0.4'
gem.add_dependency 'thor', '~> 0.19.1'
gem.add_dependency 'thin', '~> 1.6.3'
gem.add_dependency 'rubyzip', '~> 1.1.7'
gem.add_dependency 'netrc', '~> 0.10.3'
gem.add_dependency 'locomotivecms_common', '~> 0.0.5'
gem.add_dependency 'locomotivecms_coal', '~> 1.0.0.rc2'
gem.add_dependency 'locomotivecms_steam', '~> 1.0.0.rc4'
gem.add_dependency 'listen', '~> 3.0.4'
gem.add_dependency 'rack-livereload', '~> 0.3.16'
gem.add_dependency 'yui-compressor', '~> 0.12.0'
gem.add_dependency 'faker', '~> 1.4.3'
end
use Steam 1.0.0.rc6
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'locomotive/wagon/version'
Gem::Specification.new do |gem|
gem.name = 'locomotivecms_wagon'
gem.version = Locomotive::Wagon::VERSION
gem.authors = ['Didier Lafforgue', 'Rodrigo Alvarez']
gem.email = ['did@locomotivecms.com', 'papipo@gmail.com']
gem.description = %q{The LocomotiveCMS wagon is a site generator for the LocomotiveCMS engine}
gem.summary = %q{The LocomotiveCMS wagon is a site generator for the LocomotiveCMS engine powered by all the efficient and modern HTML development tools (Haml, SASS, Compass, Less).}
gem.homepage = 'http://www.locomotivecms.com'
gem.license = 'MIT'
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ['lib']
gem.executables = ['wagon']
gem.add_development_dependency 'rake', '~> 10.0.4'
gem.add_dependency 'thor', '~> 0.19.1'
gem.add_dependency 'thin', '~> 1.6.3'
gem.add_dependency 'rubyzip', '~> 1.1.7'
gem.add_dependency 'netrc', '~> 0.10.3'
gem.add_dependency 'locomotivecms_common', '~> 0.0.5'
gem.add_dependency 'locomotivecms_coal', '~> 1.0.0.rc2'
gem.add_dependency 'locomotivecms_steam', '~> 1.0.0.rc6'
gem.add_dependency 'listen', '~> 3.0.4'
gem.add_dependency 'rack-livereload', '~> 0.3.16'
gem.add_dependency 'yui-compressor', '~> 0.12.0'
gem.add_dependency 'faker', '~> 1.4.3'
end
|
# Source Compiler
module Cms
module SourceCompiler
module ClassMethods
end; extend ClassMethods
def get_text(text_id, layout, var_hash, source_id, lang_id)
tag_id = "#{source_id}-#{lang_id}-#{text_id}"
localized = SiteLocal.find_by_tag_id tag_id
if localized.nil?
localized = SiteLocal.create(:tag_id => tag_id, :text => "#{lang_name}=(#{text_id})")
end
localized.text.gsub!("\n", '')
prepended_aloha_tags = ""
is_admin = var_hash[:admin_view?] == true
prepended_aloha_tags = ".aloha-editable.editable-long-text{'data-content_name' => '#{tag_id}'} " if is_admin
# Trick with empty text:
if localized.text.blank?
localized.update_attribute(:text, "#{lang_name}=(#{text_id})")
end
prepended_aloha_tags + localized.text
end
def get_image(image_id, image_class, image_size, layout, var_hash, source_id, lang_id)
is_admin = var_hash[:admin_view?] || false
if is_admin
image_class = image_class ? "#{image_class} changeable-image" : "changeable-image"
end
class_str = image_class ? "class='#{image_class}'" : ""
image_styles_attr = " style='font-size: 10px' "
if image_size
image_width_attr = "width='#{image_size.split('x')[0]}' "
image_height_attr = "height='#{image_size.split('x')[1]}' "
image_styles_attr = " style='font-size: 10px; height:#{image_size.split('x')[1]}px!important' " if is_admin
else
image_width_attr = "width='100px' "
image_height_attr = "height='100px' "
end
image_size_specified = image_size ? true : false
tag_id = "#{source_id}-#{lang_id}-#{image_id}"
localized = SiteLocal.find_by_tag_id tag_id
if localized.nil?
localized = SiteLocal.create(:tag_id => tag_id, :text => "#{image_id}#{image_size}")
image_src = "#"
else
#img = Source.find_source_by_name_and_type(localized.text+".*", SourceType::IMAGE).first
img = Source.find_source_by_name_and_type(localized.text.split('/').last, SourceType::IMAGE).first
image_src = img ? img.get_image_path : "#!"
unless image_size
image_width_attr = image_height_attr = ""
end
end
resulted_value = "<img id='#{tag_id}' #{class_str} src='#{image_src}' #{image_width_attr.to_s} #{image_height_attr.to_s} alt='#{image_id}#{image_size}' #{image_styles_attr} data-hardsize='#{image_size_specified}'/>"
end
#
#
#
def build_variables(src, lang_id, var_hash)
is_admin = var_hash[:admin_view?] == true
src.gsub(/%var:([\w\-\"_\'\.]+)/) { |r|
var_name = /:([\w_\-\"\'\.]+)/.match(r)[1]
@resulted_value = ""
if var_name.match(/^text/).to_s.length > 0
text_id = var_name["text".length..-1].to_s.gsub("\'", "\"")
@resulted_value = get_text(text_id, layout, var_hash, self.get_id, lang_id)
elsif var_name.match(/^image/).to_s.length > 0
image_size_attr = /:image(\w+)/.match(r) #%var:image.span3\"cubex_logo_header\"
image_class_attr = /:image\w*\.((\w|\-)+)(\"|')/.match(r)
offset = image_size_attr.nil? ? "image".length+1 : image_size_attr[0].length+1
offset += image_class_attr.nil? ? 0 : image_class_attr[1].length+1
image_size = image_size_attr ? image_size_attr[1] : nil
image_class = image_class_attr ? image_class_attr[1] : nil
image_id = var_name[offset..-2].to_s
@resulted_value = get_image(image_id, image_class, image_size, layout, var_hash, self.get_id, lang_id)
elsif false #...var_name.match(/^%content:/).to_s.length > 0
else
@resulted_value = var_hash[var_name.to_sym]
end
@resulted_value
}
end
#
# %var:image100x100
# lang_id is not used!
#
def build_image_size_variables(src, lang_id, var_hash)
is_admin = var_hash[:admin_view?] == true
src.gsub(/%image[\w]*:[\w\-\"_\']+/) { |r|
resulted_value = ""
image_size_attr = /%image(\w+)/.match(r)
image_size = image_size_attr.nil? ? nil : image_size_attr[1].split("x")
image_name = /:([\w_\-\"\']+)/.match(r)[1]
image_source = Source.where(:type => SourceType::IMAGE, :name => image_name).first
image_size_specified = image_size ? true : false
image_styles_attr = " style='font-size: 10px' "
unless image_source.blank?
if image_size
image_width_attr = "width='#{image_size[0]}' "
image_height_attr = "height='#{image_size[1]}' "
image_styles_attr = " style='font-size: 10px; height:#{image_size[1]}px!important' " if is_admin
end
else
# specify height and width for alternative text visibility:
image_width_attr = " width='100px' "
image_height_attr = " height='30px' "
end
image_class = " class='changeable-image' " if is_admin
image_id = " id='#{image_source.get_id}' " if image_source && is_admin
width, height = image_source.get_image_size
if image_size
if image_size[0] != width.to_s || image_size[1] != height.to_s
image_source = nil
end
end
resulted_value = "<img #{image_id} src='#{image_source ? image_source.get_image_path : '#'}' #{image_class.to_s} #{image_width_attr.to_s} #{image_height_attr.to_s} alt='#{image_source ? image_source.name : image_name}#{image_size_attr[1] if image_size_attr}' #{image_styles_attr} data-hardsize='#{image_size_specified}'/>"
resulted_value
}
end
def build(var_hash, layout)
is_admin = var_hash[:admin_view?] == true
self.load!
src = self.data
lang_name = var_hash[:locale]
lang_id = SiteLanguage.find_by_url(lang_name).id
# Build variables
plain_src = build_variables(src, lang_id, var_hash)
# With image size:
plain_src = build_image_size_variables(plain_src, lang_id, var_hash)
return plain_src
end
end
end
fixed incorrect image name parsing
# Source Compiler
module Cms
module SourceCompiler
module ClassMethods
end; extend ClassMethods
def get_text(text_id, layout, var_hash, source_id, lang_id)
tag_id = "#{source_id}-#{lang_id}-#{text_id}"
localized = SiteLocal.find_by_tag_id tag_id
if localized.nil?
localized = SiteLocal.create(:tag_id => tag_id, :text => "#{lang_name}=(#{text_id})")
end
localized.text.gsub!("\n", '')
prepended_aloha_tags = ""
is_admin = var_hash[:admin_view?] == true
prepended_aloha_tags = ".aloha-editable.editable-long-text{'data-content_name' => '#{tag_id}'} " if is_admin
# Trick with empty text:
if localized.text.blank?
localized.update_attribute(:text, "#{lang_name}=(#{text_id})")
end
prepended_aloha_tags + localized.text
end
def get_image(image_id, image_class, image_size, layout, var_hash, source_id, lang_id)
is_admin = var_hash[:admin_view?] || false
if is_admin
image_class = image_class ? "#{image_class} changeable-image" : "changeable-image"
end
class_str = image_class ? "class='#{image_class}'" : ""
image_styles_attr = " style='font-size: 10px' "
if image_size
image_width_attr = "width='#{image_size.split('x')[0]}' "
image_height_attr = "height='#{image_size.split('x')[1]}' "
image_styles_attr = " style='font-size: 10px; height:#{image_size.split('x')[1]}px!important' " if is_admin
else
image_width_attr = "width='100px' "
image_height_attr = "height='100px' "
end
image_size_specified = image_size ? true : false
tag_id = "#{source_id}-#{lang_id}-#{image_id}"
localized = SiteLocal.find_by_tag_id tag_id
if localized.nil?
localized = SiteLocal.create(:tag_id => tag_id, :text => "#{image_id}#{image_size}")
image_src = "#"
else
#img = Source.find_source_by_name_and_type(localized.text+".*", SourceType::IMAGE).first
img = Source.find_source_by_name_and_type(localized.text.split('/').last, SourceType::IMAGE).first
image_src = img ? img.get_image_path : "#!"
unless image_size
image_width_attr = image_height_attr = ""
end
end
resulted_value = "<img id='#{tag_id}' #{class_str} src='#{image_src}' #{image_width_attr.to_s} #{image_height_attr.to_s} alt='#{image_id}#{image_size}' #{image_styles_attr} data-hardsize='#{image_size_specified}'/>"
end
#
#
#
def build_variables(src, lang_id, var_hash)
is_admin = var_hash[:admin_view?] == true
src.gsub(/%var:([\w\-\"_\'\.]+)/) { |r|
var_name = /:([\w_\-\"\'\.]+)/.match(r)[1]
@resulted_value = ""
if var_name.match(/^text/).to_s.length > 0
text_id = var_name["text".length..-1].to_s.gsub("\'", "\"")
@resulted_value = get_text(text_id, layout, var_hash, self.get_id, lang_id)
elsif var_name.match(/^image/).to_s.length > 0
image_size_attr = /image(\w+)/.match(r) #%var:image.span3\"cubex_logo_header\"
image_class_attr = /image\w*\.((\w|\-)+)(\"|')/.match(r)
offset = image_size_attr.nil? ? "image".length+1 : image_size_attr[0].length+1
offset += image_class_attr.nil? ? 0 : image_class_attr[1].length+1
image_size = image_size_attr ? image_size_attr[1] : nil
image_class = image_class_attr ? image_class_attr[1] : nil
image_id = var_name[offset..-2].to_s
@resulted_value = get_image(image_id, image_class, image_size, layout, var_hash, self.get_id, lang_id)
elsif false #...var_name.match(/^%content:/).to_s.length > 0
else
@resulted_value = var_hash[var_name.to_sym]
end
@resulted_value
}
end
#
# %var:image100x100
# lang_id is not used!
#
def build_image_size_variables(src, lang_id, var_hash)
is_admin = var_hash[:admin_view?] == true
src.gsub(/%image[\w]*:[\w\-\"_\']+/) { |r|
resulted_value = ""
image_size_attr = /%image(\w+)/.match(r)
image_size = image_size_attr.nil? ? nil : image_size_attr[1].split("x")
image_name = /:([\w_\-\"\']+)/.match(r)[1]
image_source = Source.where(:type => SourceType::IMAGE, :name => image_name).first
image_size_specified = image_size ? true : false
image_styles_attr = " style='font-size: 10px' "
unless image_source.blank?
if image_size
image_width_attr = "width='#{image_size[0]}' "
image_height_attr = "height='#{image_size[1]}' "
image_styles_attr = " style='font-size: 10px; height:#{image_size[1]}px!important' " if is_admin
end
else
# specify height and width for alternative text visibility:
image_width_attr = " width='100px' "
image_height_attr = " height='30px' "
end
image_class = " class='changeable-image' " if is_admin
image_id = " id='#{image_source.get_id}' " if image_source && is_admin
width, height = image_source.get_image_size
if image_size
if image_size[0] != width.to_s || image_size[1] != height.to_s
image_source = nil
end
end
resulted_value = "<img #{image_id} src='#{image_source ? image_source.get_image_path : '#'}' #{image_class.to_s} #{image_width_attr.to_s} #{image_height_attr.to_s} alt='#{image_source ? image_source.name : image_name}#{image_size_attr[1] if image_size_attr}' #{image_styles_attr} data-hardsize='#{image_size_specified}'/>"
resulted_value
}
end
def build(var_hash, layout)
is_admin = var_hash[:admin_view?] == true
self.load!
src = self.data
lang_name = var_hash[:locale]
lang_id = SiteLanguage.find_by_url(lang_name).id
# Build variables
plain_src = build_variables(src, lang_id, var_hash)
# With image size:
plain_src = build_image_size_variables(plain_src, lang_id, var_hash)
return plain_src
end
end
end |
require 'debugger'
require 'execjs'
require 'selenium-webdriver'
require_relative 'selenium_adapter'
NUM_EDITS = 500
ALPHABET = "abcdefghijklmnopqrstuvwxyz"
################################################################################
# Helpers for generating random edits
################################################################################
def js_get_random_edit(driver)
return driver.execute_script "return parent.writer.getRandomOp();"
end
def js_get_random_delta(doc_delta, alphabet, num_edits)
return driver.execute_script "return window.DocGen.getRadomDelta", doc_delta, ALPHABET, 1)
end
################################################################################
# Helpers
################################################################################
def check_consistency(driver)
writer_delta = driver.execute_script "return parent.writer.getDelta().toString();"
reader_delta = driver.execute_script "return parent.reader.getDelta().toString();"
raise "Writer: #{writer_delta}\nReader: #{reader_delta}" unless writer_delta == reader_delta
end
def js_get_doc_delta(driver)
doc_delta = driver.execute_script "return parent.writer.getDelta()"
end
################################################################################
# WebDriver setup
################################################################################
puts "Usage: ruby _browserdriver_ _editor_url_" unless ARGV.length == 2
browserdriver = ARGV[0].to_sym
editor_url = ARGV[1]
driver = Selenium::WebDriver.for browserdriver
driver.manage.timeouts.implicit_wait = 10
driver.get editor_url
editors = driver.find_elements(:class, "editor-container")
writer, reader = editors
driver.switch_to.frame(driver.find_element(:tag_name, "iframe"))
writer = driver.find_element(:id, "scribe-container")
adapter = SeleniumAdapter.new driver, writer
################################################################################
# Fuzzer logic
################################################################################
doc_delta = js_get_doc_delta(driver)
NUM_EDITS.times do |i|
delta = js_get_random_delta(doc_delta)
puts i if i % 10 == 0
adapter.op_to_selenium(random_delta)
check_consistency(driver)
end
Forgot to remove require execjs.
require 'debugger'
require 'selenium-webdriver'
require_relative 'selenium_adapter'
NUM_EDITS = 500
ALPHABET = "abcdefghijklmnopqrstuvwxyz"
################################################################################
# Helpers for generating random edits
################################################################################
def js_get_random_edit(driver)
return driver.execute_script "return parent.writer.getRandomOp();"
end
def js_get_random_delta(doc_delta, alphabet, num_edits)
return driver.execute_script "return window.DocGen.getRadomDelta", doc_delta, ALPHABET, 1)
end
################################################################################
# Helpers
################################################################################
def check_consistency(driver)
writer_delta = driver.execute_script "return parent.writer.getDelta().toString();"
reader_delta = driver.execute_script "return parent.reader.getDelta().toString();"
raise "Writer: #{writer_delta}\nReader: #{reader_delta}" unless writer_delta == reader_delta
end
def js_get_doc_delta(driver)
doc_delta = driver.execute_script "return parent.writer.getDelta()"
end
################################################################################
# WebDriver setup
################################################################################
puts "Usage: ruby _browserdriver_ _editor_url_" unless ARGV.length == 2
browserdriver = ARGV[0].to_sym
editor_url = ARGV[1]
driver = Selenium::WebDriver.for browserdriver
driver.manage.timeouts.implicit_wait = 10
driver.get editor_url
editors = driver.find_elements(:class, "editor-container")
writer, reader = editors
driver.switch_to.frame(driver.find_element(:tag_name, "iframe"))
writer = driver.find_element(:id, "scribe-container")
adapter = SeleniumAdapter.new driver, writer
################################################################################
# Fuzzer logic
################################################################################
doc_delta = js_get_doc_delta(driver)
NUM_EDITS.times do |i|
delta = js_get_random_delta(doc_delta)
puts i if i % 10 == 0
adapter.op_to_selenium(random_delta)
check_consistency(driver)
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
# stub: trinitycrmod 0.7.11 ruby lib
Gem::Specification.new do |s|
s.name = "trinitycrmod"
s.version = "0.7.11"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Edmund Highcock"]
s.date = "2015-07-30"
s.description = "This module allows Trinity, the Multiscale Gyrokinetic Turbulent Transport solver for Fusion Reactors, to harness the power of CodeRunner, a framework for the automated running and analysis of simulations."
s.email = "edmundhighcock@sourceforge.net"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/trinitycrmod.rb",
"lib/trinitycrmod/actual_parameter_values.rb",
"lib/trinitycrmod/calib.rb",
"lib/trinitycrmod/chease.rb",
"lib/trinitycrmod/check_parameters.rb",
"lib/trinitycrmod/deleted_variables.rb",
"lib/trinitycrmod/flux_interpolator.rb",
"lib/trinitycrmod/graphs.rb",
"lib/trinitycrmod/namelists.rb",
"lib/trinitycrmod/output_files.rb",
"lib/trinitycrmod/read_netcdf.rb",
"lib/trinitycrmod/trinity.rb",
"lib/trinitycrmod/trinity_gs2.rb",
"sync_variables/helper.rb",
"sync_variables/sync_variables.rb",
"trinitycrmod.gemspec"
]
s.homepage = "http://github.com/edmundhighcock/trinitycrmod"
s.licenses = ["GPLv3"]
s.rubygems_version = "2.4.8"
s.summary = "CodeRunner module for the Trinity simulation software."
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<coderunner>, [">= 0.16.10"])
s.add_runtime_dependency(%q<text-data-tools>, [">= 1.1.6"])
s.add_runtime_dependency(%q<gs2crmod>, [">= 0.11.76"])
s.add_runtime_dependency(%q<gryfxcrmod>, [">= 0.1.9"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["> 1.0.0"])
s.add_development_dependency(%q<jeweler>, [">= 2.00"])
else
s.add_dependency(%q<coderunner>, [">= 0.16.10"])
s.add_dependency(%q<text-data-tools>, [">= 1.1.6"])
s.add_dependency(%q<gs2crmod>, [">= 0.11.76"])
s.add_dependency(%q<gryfxcrmod>, [">= 0.1.9"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["> 1.0.0"])
s.add_dependency(%q<jeweler>, [">= 2.00"])
end
else
s.add_dependency(%q<coderunner>, [">= 0.16.10"])
s.add_dependency(%q<text-data-tools>, [">= 1.1.6"])
s.add_dependency(%q<gs2crmod>, [">= 0.11.76"])
s.add_dependency(%q<gryfxcrmod>, [">= 0.1.9"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["> 1.0.0"])
s.add_dependency(%q<jeweler>, [">= 2.00"])
end
end
Regenerate gemspec for version 0.8.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
# stub: trinitycrmod 0.8.0 ruby lib
Gem::Specification.new do |s|
s.name = "trinitycrmod"
s.version = "0.8.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Edmund Highcock"]
s.date = "2015-09-24"
s.description = "This module allows Trinity, the Multiscale Gyrokinetic Turbulent Transport solver for Fusion Reactors, to harness the power of CodeRunner, a framework for the automated running and analysis of simulations."
s.email = "edmundhighcock@sourceforge.net"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/trinitycrmod.rb",
"lib/trinitycrmod/actual_parameter_values.rb",
"lib/trinitycrmod/calib.rb",
"lib/trinitycrmod/chease.rb",
"lib/trinitycrmod/check_parameters.rb",
"lib/trinitycrmod/deleted_variables.rb",
"lib/trinitycrmod/ecom.rb",
"lib/trinitycrmod/flux_interpolator.rb",
"lib/trinitycrmod/graphs.rb",
"lib/trinitycrmod/namelists.rb",
"lib/trinitycrmod/output_files.rb",
"lib/trinitycrmod/read_netcdf.rb",
"lib/trinitycrmod/trinity.rb",
"lib/trinitycrmod/trinity_gs2.rb",
"sync_variables/helper.rb",
"sync_variables/sync_variables.rb",
"trinitycrmod.gemspec"
]
s.homepage = "http://github.com/edmundhighcock/trinitycrmod"
s.licenses = ["GPLv3"]
s.rubygems_version = "2.4.8"
s.summary = "CodeRunner module for the Trinity simulation software."
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<coderunner>, [">= 0.16.10"])
s.add_runtime_dependency(%q<text-data-tools>, [">= 1.1.6"])
s.add_runtime_dependency(%q<gs2crmod>, [">= 0.11.76"])
s.add_runtime_dependency(%q<gryfxcrmod>, [">= 0.1.9"])
s.add_runtime_dependency(%q<cheasecrmod>, [">= 0.1.0"])
s.add_runtime_dependency(%q<ecomcrmod>, [">= 0.1.0"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["> 1.0.0"])
s.add_development_dependency(%q<jeweler>, [">= 2.00"])
else
s.add_dependency(%q<coderunner>, [">= 0.16.10"])
s.add_dependency(%q<text-data-tools>, [">= 1.1.6"])
s.add_dependency(%q<gs2crmod>, [">= 0.11.76"])
s.add_dependency(%q<gryfxcrmod>, [">= 0.1.9"])
s.add_dependency(%q<cheasecrmod>, [">= 0.1.0"])
s.add_dependency(%q<ecomcrmod>, [">= 0.1.0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["> 1.0.0"])
s.add_dependency(%q<jeweler>, [">= 2.00"])
end
else
s.add_dependency(%q<coderunner>, [">= 0.16.10"])
s.add_dependency(%q<text-data-tools>, [">= 1.1.6"])
s.add_dependency(%q<gs2crmod>, [">= 0.11.76"])
s.add_dependency(%q<gryfxcrmod>, [">= 0.1.9"])
s.add_dependency(%q<cheasecrmod>, [">= 0.1.0"])
s.add_dependency(%q<ecomcrmod>, [">= 0.1.0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["> 1.0.0"])
s.add_dependency(%q<jeweler>, [">= 2.00"])
end
end
|
RSpec.describe Metasploit::Cache::Module::Path do
it { should be_a ActiveModel::Dirty }
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :auxiliary_ancestors,
factory: :metasploit_cache_auxiliary_ancestor,
relative_path_prefix: 'auxiliary'
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :encoder_ancestors,
factory: :metasploit_cache_encoder_ancestor,
relative_path_prefix: 'encoders'
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :exploit_ancestors,
factory: :metasploit_cache_exploit_ancestor,
relative_path_prefix: 'exploits'
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :nop_ancestors,
factory: :metasploit_cache_nop_ancestor,
relative_path_prefix: 'nops'
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :single_payload_ancestors,
factory: :metasploit_cache_payload_single_ancestor,
relative_path_prefix: 'payloads/singles'
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :stage_payload_ancestors,
factory: :metasploit_cache_payload_stage_ancestor,
relative_path_prefix: 'payloads/stages'
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :stager_payload_ancestors,
factory: :metasploit_cache_payload_stager_ancestor,
relative_path_prefix: 'payloads/stagers'
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :post_ancestors,
factory: :metasploit_cache_post_ancestor,
relative_path_prefix: 'post'
it_should_behave_like 'Metasploit::Cache::RealPathname' do
let(:base_instance) do
FactoryGirl.build(:metasploit_cache_module_path)
end
end
context 'associations' do
it { is_expected.to have_many(:auxiliary_ancestors).class_name('Metasploit::Cache::Auxiliary::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
it { is_expected.to have_many(:encoder_ancestors).class_name('Metasploit::Cache::Encoder::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
it { is_expected.to have_many(:exploit_ancestors).class_name('Metasploit::Cache::Exploit::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
it { is_expected.to have_many(:nop_ancestors).class_name('Metasploit::Cache::Nop::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
it { is_expected.to have_many(:single_payload_ancestors).class_name('Metasploit::Cache::Payload::Single::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
it { is_expected.to have_many(:stage_payload_ancestors).class_name('Metasploit::Cache::Payload::Stage::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
it { is_expected.to have_many(:stager_payload_ancestors).class_name('Metasploit::Cache::Payload::Stager::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
it { is_expected.to have_many(:post_ancestors).class_name('Metasploit::Cache::Post::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
end
context 'callbacks' do
context 'before_validation' do
context 'nilify blanks' do
let(:path) do
FactoryGirl.build(
:metasploit_cache_module_path,
gem: '',
name: ''
)
end
it 'should have empty gem' do
expect(path.gem).not_to be_nil
expect(path.gem).to be_empty
end
it 'should have empty name' do
expect(path.name).not_to be_nil
expect(path.name).to be_empty
end
context 'after validation' do
before(:each) do
path.valid?
end
it 'does not have a gem' do
expect(path.gem).to be_nil
end
it 'does not have a name' do
expect(path.name).to be_nil
end
end
end
context '#normalize_real_path' do
let(:parent_pathname) do
Metasploit::Model::Spec.temporary_pathname.join('metasploit', 'cache', 'module', 'path')
end
let(:path) do
FactoryGirl.build(
:metasploit_cache_module_path,
real_path: symlink_pathname.to_path
)
end
let(:real_basename) do
'real'
end
let(:real_pathname) do
parent_pathname.join(real_basename)
end
let(:symlink_basename) do
'symlink'
end
let(:symlink_pathname) do
parent_pathname.join(symlink_basename)
end
before(:each) do
real_pathname.mkpath
Dir.chdir(parent_pathname.to_path) do
File.symlink(real_basename, 'symlink')
end
end
it 'should convert real_path to a real path using File#real_path' do
expected_real_path = Metasploit::Model::File.realpath(path.real_path)
expect(path.real_path).not_to eq(expected_real_path)
path.valid?
expect(path.real_path).to eq(expected_real_path)
end
end
end
end
context 'database' do
context 'columns' do
it { should have_db_column(:gem).of_type(:string).with_options(null: true) }
it { should have_db_column(:name).of_type(:string).with_options(null: true) }
it { should have_db_column(:real_path).of_type(:text).with_options(null: false) }
end
context 'indices' do
it { should have_db_index([:gem, :name]).unique(true) }
it { should have_db_index(:real_path).unique(true) }
end
end
context 'factories' do
context :metasploit_cache_module_path do
subject(:metasploit_cache_module_path) do
FactoryGirl.build(:metasploit_cache_module_path)
end
it { should be_valid }
end
context :named_metasploit_cache_module_path do
subject(:named_metasploit_cache_module_path) do
FactoryGirl.build(:named_metasploit_cache_module_path)
end
it { should be_valid }
it 'has a gem' do
expect(named_metasploit_cache_module_path.gem).not_to be_nil
end
it 'has a name' do
expect(named_metasploit_cache_module_path.name).not_to be_nil
end
end
context :unnamed_metasploit_cache_module_path do
subject(:unnamed_metasploit_cache_module_path) do
FactoryGirl.build(:unnamed_metasploit_cache_module_path)
end
it { should be_valid }
it 'does not have a gem' do
expect(unnamed_metasploit_cache_module_path.gem).to be_nil
end
it 'does not have a name' do
expect(unnamed_metasploit_cache_module_path.name).to be_nil
end
end
end
context 'validations' do
context 'directory' do
let(:error) do
'must be a directory'
end
let(:path) do
FactoryGirl.build(
:metasploit_cache_module_path,
real_path: real_path
)
end
before(:each) do
path.valid?
end
context 'with #real_path' do
context 'with directory' do
let(:real_path) do
FactoryGirl.generate :metasploit_cache_module_path_directory_real_path
end
it 'should not record error on real_path' do
path.valid?
expect(path.errors[:real_path]).not_to include(error)
end
end
context 'with file' do
let(:pathname) do
Metasploit::Model::Spec.temporary_pathname.join(
'metasploit',
'cache',
'module',
'path',
'real',
'path',
'file'
)
end
let(:real_path) do
pathname.to_path
end
before(:each) do
Metasploit::Model::Spec::PathnameCollision.check!(pathname)
pathname.parent.mkpath
pathname.open('wb') do |f|
f.puts 'A file'
end
end
it 'should record error on real_path' do
path.valid?
expect(path.errors[:real_path]).to include(error)
end
end
end
context 'without #real_path' do
let(:real_path) do
nil
end
it 'should record error on real_path' do
path.valid?
expect(path.errors[:real_path]).to include(error)
end
end
end
context 'gem and name' do
let(:gem_error) do
"can't be blank if name is present"
end
let(:name_error) do
"can't be blank if gem is present"
end
subject(:path) do
FactoryGirl.build(
:metasploit_cache_module_path,
gem: gem,
name: name
)
end
before(:each) do
path.valid?
end
context 'with gem' do
let(:gem) do
FactoryGirl.generate :metasploit_cache_module_path_gem
end
context 'with name' do
let(:name) do
FactoryGirl.generate :metasploit_cache_module_path_name
end
it 'should not record error on gem' do
expect(path.errors[:gem]).not_to include(gem_error)
end
it 'should not record error on name' do
expect(path.errors[:name]).not_to include(name_error)
end
end
context 'without name' do
let(:name) do
nil
end
it 'should not record error on gem' do
expect(path.errors[:gem]).not_to include(gem_error)
end
it 'should record error on name' do
expect(path.errors[:name]).to include(name_error)
end
end
end
context 'without gem' do
let(:gem) do
nil
end
context 'with name' do
let(:name) do
FactoryGirl.generate :metasploit_cache_module_path_name
end
it 'should record error on gem' do
expect(path.errors[:gem]).to include(gem_error)
end
it 'should not record error on name' do
expect(path.errors[:name]).not_to include(name_error)
end
end
context 'without name' do
let(:name) do
nil
end
it 'should not record error on gem' do
expect(path.errors[:gem]).not_to include(gem_error)
end
it 'should not record error on name' do
expect(path.errors[:name]).not_to include(name_error)
end
end
end
end
context 'validate unique of name scoped to gem' do
context 'with different real_paths' do
#
# lets
#
let(:duplicate) do
FactoryGirl.build(
:named_metasploit_cache_module_path,
gem: original.gem,
name: original.name
)
end
#
# let!s
#
# let! so it exists in database for duplicate to validate against
let!(:original) do
FactoryGirl.create(
:named_metasploit_cache_module_path
)
end
context 'with default validation context' do
let(:error) {
I18n.translate!('errors.messages.taken')
}
it 'validates uniqueness of name scoped to gem' do
expect(duplicate).not_to be_valid
expect(duplicate.errors[:name]).to include(error)
end
end
context 'with :add validation context' do
it 'skips validating uniqueness of name scoped to gem' do
expect(duplicate).to be_valid(:add)
end
end
end
end
context 'real_path' do
#
# lets
#
let(:duplicate) {
FactoryGirl.build(:metasploit_cache_module_path, real_path: real_path)
}
let(:real_path) do
FactoryGirl.generate :metasploit_cache_module_path_real_path
end
#
# let!s
#
let!(:original) {
FactoryGirl.create(:metasploit_cache_module_path, real_path: real_path)
}
context 'with default validation context' do
let(:error) {
I18n.translate!('errors.messages.taken')
}
it 'should validate uniqueness of real path' do
expect(duplicate).not_to be_valid
expect(duplicate.errors[:real_path]).to include(error)
end
end
context 'with :add validation context' do
it 'skips validating uniqueness of real path' do
expect(duplicate).to be_valid(:add)
end
end
end
end
context '#directory?' do
subject(:directory?) do
path.directory?
end
let(:path) do
FactoryGirl.build(
:metasploit_cache_module_path,
real_path: real_path
)
end
context 'with #real_path' do
context 'with file' do
let(:pathname) do
Metasploit::Model::Spec.temporary_pathname.join(
'metasploit',
'cache',
'module',
'path',
'real',
'path',
'file'
)
end
let(:real_path) do
pathname.to_path
end
before(:each) do
Metasploit::Model::Spec::PathnameCollision.check!(pathname)
pathname.parent.mkpath
pathname.open('wb') do |f|
f.puts 'A file'
end
end
it { is_expected.to eq(false) }
end
context 'with directory' do
let(:real_path) do
FactoryGirl.generate :metasploit_cache_module_path_directory_real_path
end
it { is_expected.to eq(true) }
end
end
context 'without #real_path' do
let(:real_path) do
nil
end
it { is_expected.to eq(false) }
end
end
context '#named?' do
subject(:named?) do
path.named?
end
let(:path) do
FactoryGirl.build(
:metasploit_cache_module_path,
gem: gem,
name: name
)
end
context 'with blank gem' do
let(:gem) do
''
end
context 'with blank name' do
let(:name) do
''
end
it { is_expected.to eq(false) }
end
context 'without blank name' do
let(:name) do
FactoryGirl.generate :metasploit_cache_module_path_name
end
it { is_expected.to eq(false) }
end
end
context 'without blank gem' do
let(:gem) do
FactoryGirl.generate :metasploit_cache_module_path_gem
end
context 'with blank name' do
let(:name) do
''
end
it { is_expected.to eq(false) }
end
context 'without blank name' do
let(:name) do
FactoryGirl.generate :metasploit_cache_module_path_name
end
it { is_expected.to eq(true) }
end
end
end
context '#name_collision' do
subject(:name_collision) do
path.name_collision
end
let!(:collision) do
FactoryGirl.create(:named_metasploit_cache_module_path)
end
let!(:other_named) do
FactoryGirl.create(:named_metasploit_cache_module_path)
end
let!(:unnamed) do
FactoryGirl.create(:unnamed_metasploit_cache_module_path)
end
before(:each) do
path.valid?
end
context 'with named' do
context 'with same (gem, name)' do
let(:path) do
FactoryGirl.build(
:named_metasploit_cache_module_path,
gem: collision.gem,
name: collision.name
)
end
it 'should return collision' do
expect(name_collision).to eq(collision)
end
end
context 'without same (gem, name)' do
let(:path) do
FactoryGirl.build(:named_metasploit_cache_module_path)
end
it { should be_nil }
end
end
context 'without named' do
let(:path) do
FactoryGirl.build(:unnamed_metasploit_cache_module_path)
end
it { should be_nil }
end
end
context '#real_path_collision' do
subject(:real_path_collision) do
path.real_path_collision
end
let!(:collision) do
FactoryGirl.create(:metasploit_cache_module_path)
end
context 'with same real_path' do
let(:path) do
FactoryGirl.build(:metasploit_cache_module_path, real_path: collision.real_path)
end
it 'should return collision' do
expect(real_path_collision).to eq(collision)
end
end
context 'without same real_path' do
let(:path) do
FactoryGirl.build(:metasploit_cache_module_path)
end
it { should be_nil }
end
end
context '#was_named?' do
subject(:was_named?) do
path.was_named?
end
let(:gem) do
FactoryGirl.generate :metasploit_cache_module_path_gem
end
let(:name) do
FactoryGirl.generate :metasploit_cache_module_path_name
end
let(:path) do
FactoryGirl.build(
:metasploit_cache_module_path
)
end
before(:each) do
path.gem = gem_was
path.name = name_was
path.changed_attributes.clear
path.gem = gem
path.name = name
end
context 'with blank gem_was' do
let(:gem_was) do
nil
end
context 'with blank name_was' do
let(:name_was) do
nil
end
it { is_expected.to eq(false) }
end
context 'without blank name_was' do
let(:name_was) do
FactoryGirl.generate :metasploit_cache_module_path_name
end
it { is_expected.to eq(false) }
end
end
context 'without blank gem_was' do
let(:gem_was) do
FactoryGirl.generate :metasploit_cache_module_path_gem
end
context 'with blank name_was' do
let(:name_was) do
nil
end
it { is_expected.to eq(false) }
end
context 'without blank name_was' do
let(:name_was) do
FactoryGirl.generate :metasploit_cache_module_path_name
end
it { is_expected.to eq(true) }
end
end
end
end
Remove extra blank line
MSP-13194
RSpec.describe Metasploit::Cache::Module::Path do
it { should be_a ActiveModel::Dirty }
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :auxiliary_ancestors,
factory: :metasploit_cache_auxiliary_ancestor,
relative_path_prefix: 'auxiliary'
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :encoder_ancestors,
factory: :metasploit_cache_encoder_ancestor,
relative_path_prefix: 'encoders'
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :exploit_ancestors,
factory: :metasploit_cache_exploit_ancestor,
relative_path_prefix: 'exploits'
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :nop_ancestors,
factory: :metasploit_cache_nop_ancestor,
relative_path_prefix: 'nops'
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :single_payload_ancestors,
factory: :metasploit_cache_payload_single_ancestor,
relative_path_prefix: 'payloads/singles'
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :stage_payload_ancestors,
factory: :metasploit_cache_payload_stage_ancestor,
relative_path_prefix: 'payloads/stages'
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :stager_payload_ancestors,
factory: :metasploit_cache_payload_stager_ancestor,
relative_path_prefix: 'payloads/stagers'
it_should_behave_like 'Metasploit::Cache::Module::Path::AssociationExtension',
association: :post_ancestors,
factory: :metasploit_cache_post_ancestor,
relative_path_prefix: 'post'
it_should_behave_like 'Metasploit::Cache::RealPathname' do
let(:base_instance) do
FactoryGirl.build(:metasploit_cache_module_path)
end
end
context 'associations' do
it { is_expected.to have_many(:auxiliary_ancestors).class_name('Metasploit::Cache::Auxiliary::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
it { is_expected.to have_many(:encoder_ancestors).class_name('Metasploit::Cache::Encoder::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
it { is_expected.to have_many(:exploit_ancestors).class_name('Metasploit::Cache::Exploit::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
it { is_expected.to have_many(:nop_ancestors).class_name('Metasploit::Cache::Nop::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
it { is_expected.to have_many(:single_payload_ancestors).class_name('Metasploit::Cache::Payload::Single::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
it { is_expected.to have_many(:stage_payload_ancestors).class_name('Metasploit::Cache::Payload::Stage::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
it { is_expected.to have_many(:stager_payload_ancestors).class_name('Metasploit::Cache::Payload::Stager::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
it { is_expected.to have_many(:post_ancestors).class_name('Metasploit::Cache::Post::Ancestor').dependent(:destroy).with_foreign_key(:parent_path_id) }
end
context 'callbacks' do
context 'before_validation' do
context 'nilify blanks' do
let(:path) do
FactoryGirl.build(
:metasploit_cache_module_path,
gem: '',
name: ''
)
end
it 'should have empty gem' do
expect(path.gem).not_to be_nil
expect(path.gem).to be_empty
end
it 'should have empty name' do
expect(path.name).not_to be_nil
expect(path.name).to be_empty
end
context 'after validation' do
before(:each) do
path.valid?
end
it 'does not have a gem' do
expect(path.gem).to be_nil
end
it 'does not have a name' do
expect(path.name).to be_nil
end
end
end
context '#normalize_real_path' do
let(:parent_pathname) do
Metasploit::Model::Spec.temporary_pathname.join('metasploit', 'cache', 'module', 'path')
end
let(:path) do
FactoryGirl.build(
:metasploit_cache_module_path,
real_path: symlink_pathname.to_path
)
end
let(:real_basename) do
'real'
end
let(:real_pathname) do
parent_pathname.join(real_basename)
end
let(:symlink_basename) do
'symlink'
end
let(:symlink_pathname) do
parent_pathname.join(symlink_basename)
end
before(:each) do
real_pathname.mkpath
Dir.chdir(parent_pathname.to_path) do
File.symlink(real_basename, 'symlink')
end
end
it 'should convert real_path to a real path using File#real_path' do
expected_real_path = Metasploit::Model::File.realpath(path.real_path)
expect(path.real_path).not_to eq(expected_real_path)
path.valid?
expect(path.real_path).to eq(expected_real_path)
end
end
end
end
context 'database' do
context 'columns' do
it { should have_db_column(:gem).of_type(:string).with_options(null: true) }
it { should have_db_column(:name).of_type(:string).with_options(null: true) }
it { should have_db_column(:real_path).of_type(:text).with_options(null: false) }
end
context 'indices' do
it { should have_db_index([:gem, :name]).unique(true) }
it { should have_db_index(:real_path).unique(true) }
end
end
context 'factories' do
context :metasploit_cache_module_path do
subject(:metasploit_cache_module_path) do
FactoryGirl.build(:metasploit_cache_module_path)
end
it { should be_valid }
end
context :named_metasploit_cache_module_path do
subject(:named_metasploit_cache_module_path) do
FactoryGirl.build(:named_metasploit_cache_module_path)
end
it { should be_valid }
it 'has a gem' do
expect(named_metasploit_cache_module_path.gem).not_to be_nil
end
it 'has a name' do
expect(named_metasploit_cache_module_path.name).not_to be_nil
end
end
context :unnamed_metasploit_cache_module_path do
subject(:unnamed_metasploit_cache_module_path) do
FactoryGirl.build(:unnamed_metasploit_cache_module_path)
end
it { should be_valid }
it 'does not have a gem' do
expect(unnamed_metasploit_cache_module_path.gem).to be_nil
end
it 'does not have a name' do
expect(unnamed_metasploit_cache_module_path.name).to be_nil
end
end
end
context 'validations' do
context 'directory' do
let(:error) do
'must be a directory'
end
let(:path) do
FactoryGirl.build(
:metasploit_cache_module_path,
real_path: real_path
)
end
before(:each) do
path.valid?
end
context 'with #real_path' do
context 'with directory' do
let(:real_path) do
FactoryGirl.generate :metasploit_cache_module_path_directory_real_path
end
it 'should not record error on real_path' do
path.valid?
expect(path.errors[:real_path]).not_to include(error)
end
end
context 'with file' do
let(:pathname) do
Metasploit::Model::Spec.temporary_pathname.join(
'metasploit',
'cache',
'module',
'path',
'real',
'path',
'file'
)
end
let(:real_path) do
pathname.to_path
end
before(:each) do
Metasploit::Model::Spec::PathnameCollision.check!(pathname)
pathname.parent.mkpath
pathname.open('wb') do |f|
f.puts 'A file'
end
end
it 'should record error on real_path' do
path.valid?
expect(path.errors[:real_path]).to include(error)
end
end
end
context 'without #real_path' do
let(:real_path) do
nil
end
it 'should record error on real_path' do
path.valid?
expect(path.errors[:real_path]).to include(error)
end
end
end
context 'gem and name' do
let(:gem_error) do
"can't be blank if name is present"
end
let(:name_error) do
"can't be blank if gem is present"
end
subject(:path) do
FactoryGirl.build(
:metasploit_cache_module_path,
gem: gem,
name: name
)
end
before(:each) do
path.valid?
end
context 'with gem' do
let(:gem) do
FactoryGirl.generate :metasploit_cache_module_path_gem
end
context 'with name' do
let(:name) do
FactoryGirl.generate :metasploit_cache_module_path_name
end
it 'should not record error on gem' do
expect(path.errors[:gem]).not_to include(gem_error)
end
it 'should not record error on name' do
expect(path.errors[:name]).not_to include(name_error)
end
end
context 'without name' do
let(:name) do
nil
end
it 'should not record error on gem' do
expect(path.errors[:gem]).not_to include(gem_error)
end
it 'should record error on name' do
expect(path.errors[:name]).to include(name_error)
end
end
end
context 'without gem' do
let(:gem) do
nil
end
context 'with name' do
let(:name) do
FactoryGirl.generate :metasploit_cache_module_path_name
end
it 'should record error on gem' do
expect(path.errors[:gem]).to include(gem_error)
end
it 'should not record error on name' do
expect(path.errors[:name]).not_to include(name_error)
end
end
context 'without name' do
let(:name) do
nil
end
it 'should not record error on gem' do
expect(path.errors[:gem]).not_to include(gem_error)
end
it 'should not record error on name' do
expect(path.errors[:name]).not_to include(name_error)
end
end
end
end
context 'validate unique of name scoped to gem' do
context 'with different real_paths' do
#
# lets
#
let(:duplicate) do
FactoryGirl.build(
:named_metasploit_cache_module_path,
gem: original.gem,
name: original.name
)
end
#
# let!s
#
# let! so it exists in database for duplicate to validate against
let!(:original) do
FactoryGirl.create(
:named_metasploit_cache_module_path
)
end
context 'with default validation context' do
let(:error) {
I18n.translate!('errors.messages.taken')
}
it 'validates uniqueness of name scoped to gem' do
expect(duplicate).not_to be_valid
expect(duplicate.errors[:name]).to include(error)
end
end
context 'with :add validation context' do
it 'skips validating uniqueness of name scoped to gem' do
expect(duplicate).to be_valid(:add)
end
end
end
end
context 'real_path' do
#
# lets
#
let(:duplicate) {
FactoryGirl.build(:metasploit_cache_module_path, real_path: real_path)
}
let(:real_path) do
FactoryGirl.generate :metasploit_cache_module_path_real_path
end
#
# let!s
#
let!(:original) {
FactoryGirl.create(:metasploit_cache_module_path, real_path: real_path)
}
context 'with default validation context' do
let(:error) {
I18n.translate!('errors.messages.taken')
}
it 'should validate uniqueness of real path' do
expect(duplicate).not_to be_valid
expect(duplicate.errors[:real_path]).to include(error)
end
end
context 'with :add validation context' do
it 'skips validating uniqueness of real path' do
expect(duplicate).to be_valid(:add)
end
end
end
end
context '#directory?' do
subject(:directory?) do
path.directory?
end
let(:path) do
FactoryGirl.build(
:metasploit_cache_module_path,
real_path: real_path
)
end
context 'with #real_path' do
context 'with file' do
let(:pathname) do
Metasploit::Model::Spec.temporary_pathname.join(
'metasploit',
'cache',
'module',
'path',
'real',
'path',
'file'
)
end
let(:real_path) do
pathname.to_path
end
before(:each) do
Metasploit::Model::Spec::PathnameCollision.check!(pathname)
pathname.parent.mkpath
pathname.open('wb') do |f|
f.puts 'A file'
end
end
it { is_expected.to eq(false) }
end
context 'with directory' do
let(:real_path) do
FactoryGirl.generate :metasploit_cache_module_path_directory_real_path
end
it { is_expected.to eq(true) }
end
end
context 'without #real_path' do
let(:real_path) do
nil
end
it { is_expected.to eq(false) }
end
end
context '#named?' do
subject(:named?) do
path.named?
end
let(:path) do
FactoryGirl.build(
:metasploit_cache_module_path,
gem: gem,
name: name
)
end
context 'with blank gem' do
let(:gem) do
''
end
context 'with blank name' do
let(:name) do
''
end
it { is_expected.to eq(false) }
end
context 'without blank name' do
let(:name) do
FactoryGirl.generate :metasploit_cache_module_path_name
end
it { is_expected.to eq(false) }
end
end
context 'without blank gem' do
let(:gem) do
FactoryGirl.generate :metasploit_cache_module_path_gem
end
context 'with blank name' do
let(:name) do
''
end
it { is_expected.to eq(false) }
end
context 'without blank name' do
let(:name) do
FactoryGirl.generate :metasploit_cache_module_path_name
end
it { is_expected.to eq(true) }
end
end
end
context '#name_collision' do
subject(:name_collision) do
path.name_collision
end
let!(:collision) do
FactoryGirl.create(:named_metasploit_cache_module_path)
end
let!(:other_named) do
FactoryGirl.create(:named_metasploit_cache_module_path)
end
let!(:unnamed) do
FactoryGirl.create(:unnamed_metasploit_cache_module_path)
end
before(:each) do
path.valid?
end
context 'with named' do
context 'with same (gem, name)' do
let(:path) do
FactoryGirl.build(
:named_metasploit_cache_module_path,
gem: collision.gem,
name: collision.name
)
end
it 'should return collision' do
expect(name_collision).to eq(collision)
end
end
context 'without same (gem, name)' do
let(:path) do
FactoryGirl.build(:named_metasploit_cache_module_path)
end
it { should be_nil }
end
end
context 'without named' do
let(:path) do
FactoryGirl.build(:unnamed_metasploit_cache_module_path)
end
it { should be_nil }
end
end
context '#real_path_collision' do
subject(:real_path_collision) do
path.real_path_collision
end
let!(:collision) do
FactoryGirl.create(:metasploit_cache_module_path)
end
context 'with same real_path' do
let(:path) do
FactoryGirl.build(:metasploit_cache_module_path, real_path: collision.real_path)
end
it 'should return collision' do
expect(real_path_collision).to eq(collision)
end
end
context 'without same real_path' do
let(:path) do
FactoryGirl.build(:metasploit_cache_module_path)
end
it { should be_nil }
end
end
context '#was_named?' do
subject(:was_named?) do
path.was_named?
end
let(:gem) do
FactoryGirl.generate :metasploit_cache_module_path_gem
end
let(:name) do
FactoryGirl.generate :metasploit_cache_module_path_name
end
let(:path) do
FactoryGirl.build(
:metasploit_cache_module_path
)
end
before(:each) do
path.gem = gem_was
path.name = name_was
path.changed_attributes.clear
path.gem = gem
path.name = name
end
context 'with blank gem_was' do
let(:gem_was) do
nil
end
context 'with blank name_was' do
let(:name_was) do
nil
end
it { is_expected.to eq(false) }
end
context 'without blank name_was' do
let(:name_was) do
FactoryGirl.generate :metasploit_cache_module_path_name
end
it { is_expected.to eq(false) }
end
end
context 'without blank gem_was' do
let(:gem_was) do
FactoryGirl.generate :metasploit_cache_module_path_gem
end
context 'with blank name_was' do
let(:name_was) do
nil
end
it { is_expected.to eq(false) }
end
context 'without blank name_was' do
let(:name_was) do
FactoryGirl.generate :metasploit_cache_module_path_name
end
it { is_expected.to eq(true) }
end
end
end
end |
require 'spec_helper'
describe 'GFM autocomplete loading', feature: true, js: true do
let(:user) { create(:user) }
let(:project) { create(:project) }
before do
project.team << [user, :master]
login_as user
visit namespace_project_path(project.namespace, project)
end
it 'does not load on project#show' do
expect(evaluate_script('GitLab.GfmAutoComplete.dataSource')).to eq('')
end
it 'loads on new issue page' do
visit new_namespace_project_issue_path(project.namespace, project)
expect(evaluate_script('GitLab.GfmAutoComplete.dataSource')).not_to eq('')
end
end
Use admin user in tests
require 'spec_helper'
describe 'GFM autocomplete loading', feature: true, js: true do
let(:project) { create(:project) }
before do
login_as :admin
visit namespace_project_path(project.namespace, project)
end
it 'does not load on project#show' do
expect(evaluate_script('GitLab.GfmAutoComplete.dataSource')).to eq('')
end
it 'loads on new issue page' do
visit new_namespace_project_issue_path(project.namespace, project)
expect(evaluate_script('GitLab.GfmAutoComplete.dataSource')).not_to eq('')
end
end
|
# coding: UTF-8
require File.dirname(__FILE__) + "/common"
cartodb = get_cartodb_connection
psoe_id, pp_id = get_psoe_pp_id
processes = get_processes
autonomies = get_autonomies
provinces = get_provinces
variables = get_variables(1)
# votes per autonomy
query = <<-SQL
select votantes_totales, censo_total, #{AUTONOMIAS_VOTATIONS}.gadm1_cartodb_id, proceso_electoral_id, primer_partido_id, primer_partido_percent, segundo_partido_id, segundo_partido_percent,
#{variables.join(',')}
from #{AUTONOMIAS_VOTATIONS}, vars_socioeco_x_autonomia
where #{AUTONOMIAS_VOTATIONS}.gadm1_cartodb_id = vars_socioeco_x_autonomia.gadm1_cartodb_id
SQL
votes_per_autonomy = cartodb.query(query)[:rows]
base_path = FileUtils.pwd
FileUtils.rm_rf("#{base_path}/../json/generated_data")
FileUtils.mkdir_p("#{base_path}/../json/generated_data")
## AUTONOMIES
#############
puts
autonomies.each do |autonomy_hash|
variables.each do |variable|
puts
puts "Variable: #{variable}"
dir_path = "#{base_path}/../json/generated_data/#{variable}/autonomies/#{autonomy_hash[:name_1]}"
FileUtils.mkdir_p(dir_path)
json = {}
proceso_electoral_id = processes[variable.match(/\d+/)[0].to_i]
row = votes_per_autonomy.select{|h| h[:gadm1_cartodb_id] == autonomy_hash[:cartodb_id] && h[:proceso_electoral_id] == proceso_electoral_id }.first
unless row
putc 'x'
next
end
putc '.'
if row[:primer_partido_id].to_i != psoe_id && row[:primer_partido_id].to_i != pp_id
x_coordinate = 0
else
x_coordinate = ((row[:primer_partido_percent] - row[:segundo_partido_percent]).to_f * 300.0) / 100.0
x_coordinate = x_coordinate*-1 if row[:primer_partido_id] == psoe_id
end
radius = ((row[:votantes_totales].to_f / row[:censo_total].to_f) * 6000.0) / 100.0 + 20.0
json[autonomy_hash[:name_1]] ||= {}
json[autonomy_hash[:name_1]][:cartodb_id] = autonomy_hash[:cartodb_id]
json[autonomy_hash[:name_1]][:x_coordinate] = x_coordinate
json[autonomy_hash[:name_1]][:y_coordinate] = get_y_coordinate(row, variable.to_sym)
json[autonomy_hash[:name_1]][:radius] = radius.to_i
json[autonomy_hash[:name_1]][:parent_json_url] = nil
json[autonomy_hash[:name_1]][:children_json_url] = []
fd = File.open("#{dir_path}/#{variable}.json",'w+')
fd.write(json.to_json)
fd.close
end
end
Fixed autonomies JSON
# coding: UTF-8
require File.dirname(__FILE__) + "/common"
cartodb = get_cartodb_connection
psoe_id, pp_id = get_psoe_pp_id
processes = get_processes
autonomies = get_autonomies
provinces = get_provinces
variables = get_variables(1)
# votes per autonomy
query = <<-SQL
select votantes_totales, censo_total, #{AUTONOMIAS_VOTATIONS}.gadm1_cartodb_id, proceso_electoral_id, primer_partido_id, primer_partido_percent, segundo_partido_id, segundo_partido_percent,
#{variables.join(',')}
from #{AUTONOMIAS_VOTATIONS}, vars_socioeco_x_autonomia
where #{AUTONOMIAS_VOTATIONS}.gadm1_cartodb_id = vars_socioeco_x_autonomia.gadm1_cartodb_id
SQL
votes_per_autonomy = cartodb.query(query)[:rows]
base_path = FileUtils.pwd
FileUtils.rm_rf("#{base_path}/../json/generated_data")
FileUtils.mkdir_p("#{base_path}/../json/generated_data")
## AUTONOMIES
#############
puts
variables.each do |variable|
puts
puts "Variable: #{variable}"
json = {}
autonomies.each do |autonomy_hash|
dir_path = "#{base_path}/../json/generated_data"
proceso_electoral_id = processes[variable.match(/\d+/)[0].to_i]
row = votes_per_autonomy.select{|h| h[:gadm1_cartodb_id] == autonomy_hash[:cartodb_id] && h[:proceso_electoral_id] == proceso_electoral_id }.first
unless row
putc 'x'
next
end
putc '.'
if row[:primer_partido_id].to_i != psoe_id && row[:primer_partido_id].to_i != pp_id
x_coordinate = 0
else
x_coordinate = ((row[:primer_partido_percent] - row[:segundo_partido_percent]).to_f * 300.0) / 100.0
x_coordinate = x_coordinate*-1 if row[:primer_partido_id] == psoe_id
end
radius = ((row[:votantes_totales].to_f / row[:censo_total].to_f) * 6000.0) / 100.0 + 20.0
autonomy_name = autonomy_hash[:name_1].tr(' ','_')
json[autonomy_name] ||= {}
json[autonomy_name][:cartodb_id] = autonomy_hash[:cartodb_id]
json[autonomy_name][:x_coordinate] = x_coordinate
json[autonomy_name][:y_coordinate] = get_y_coordinate(row, variable.to_sym)
json[autonomy_name][:radius] = radius.to_i
json[autonomy_name][:color] = "#D94B5F"
json[autonomy_name][:parent_json_url] = nil
json[autonomy_name][:children_json_url] = []
fd = File.open("#{dir_path}/autonomies_#{variable}.json",'w+')
fd.write(json.to_json)
fd.close
end
end
|
require 'spec_helper'
describe 'using director with config server', type: :integration do
let (:manifest_hash) { Bosh::Spec::Deployments.simple_manifest }
let (:cloud_config) { Bosh::Spec::Deployments.simple_cloud_config }
let(:config_server_helper) { Bosh::Spec::ConfigServerHelper.new(current_sandbox.port_provider.get_port(:config_server_port)) }
context 'when config server cretificates are not trusted' do
with_reset_sandbox_before_each(config_server_enabled: true, with_config_server_trusted_certs: false)
before do
target_and_login
upload_stemcell
end
it 'throws certificate validator error' do
manifest_hash['jobs'].first['properties'] = {'test_property' => '((test_property))'}
output, exit_code = deploy_from_scratch(manifest_hash: manifest_hash, cloud_config_hash: cloud_config, failure_expected: true, return_exit_code: true)
expect(exit_code).to_not eq(0)
expect(output).to include('Error 100: SSL certificate verification failed')
end
end
context 'when config server certificates are trusted' do
with_reset_sandbox_before_each(config_server_enabled: true)
before do
target_and_login
upload_stemcell
end
context 'when deployment manifest has placeholders' do
it 'raises an error when config server does not have values for placeholders' do
manifest_hash['jobs'].first['properties'] = {'test_property' => '((test_property))'}
output, exit_code = deploy_from_scratch(manifest_hash: manifest_hash, cloud_config_hash: cloud_config, failure_expected: true, return_exit_code: true)
expect(exit_code).to_not eq(0)
expect(output).to include('Failed to find keys in the config server: test_property')
end
it 'replaces placeholders in the manifest when config server has value for placeholders' do
config_server_helper.put_value('test_property', 'cats are happy')
manifest_hash['jobs'].first['properties'] = {'test_property' => '((test_property))'}
deploy_from_scratch(manifest_hash: manifest_hash, cloud_config_hash: cloud_config)
vm = director.vm('foobar', '0')
template = vm.read_job_template('foobar', 'bin/foobar_ctl')
expect(template).to include('test_property=cats are happy')
end
end
context 'when runtime manifest has placeholders' do
let(:runtime_config) { Bosh::Spec::Deployments.runtime_config_with_addon_placeholders }
it 'replaces placeholders in the addons' do
bosh_runner.run("upload release #{spec_asset('dummy2-release.tgz')}")
config_server_helper.put_value('release_name', 'dummy2')
config_server_helper.put_value('addon_prop', 'i am Groot')
expect(upload_runtime_config(runtime_config_hash: runtime_config)).to include("Successfully updated runtime config")
config_server_helper.put_value('test_property', 'cats are happy')
manifest_hash['jobs'].first['properties'] = {'test_property' => '((test_property))'}
deploy_from_scratch(manifest_hash: manifest_hash, cloud_config_hash: cloud_config)
vm = director.vm('foobar', '0')
template = vm.read_job_template('dummy_with_properties', 'bin/dummy_with_properties_ctl')
expect(template).to include("echo 'i am Groot'")
end
end
context 'when running an errand that has placeholders' do
let(:errand_manifest){ Bosh::Spec::Deployments.manifest_errand_with_placeholders }
it 'replaces placeholder in properties' do
config_server_helper.put_value('placeholder', 'test value')
deploy_from_scratch(manifest_hash: errand_manifest, cloud_config_hash: cloud_config)
errand_result = bosh_runner.run('run errand fake-errand-name --keep-alive')
expect(errand_result).to include('test value')
end
end
end
end
Links properties works with config server values
Links properties can be fetched from the config server
This commit does not include removing the resolved values
from the DB. They still exist in the spec_json row of the
instances table.
[#119700369]
https://www.pivotaltracker.com/story/show/119700369
Signed-off-by: Jamil Shamy <1a6c312d8b7ebde5d54d0b4f1a4353b7c149e3bf@pivotal.io>
require 'spec_helper'
describe 'using director with config server', type: :integration do
def upload_links_release
FileUtils.cp_r(LINKS_RELEASE_TEMPLATE, ClientSandbox.links_release_dir, :preserve => true)
bosh_runner.run_in_dir('create release --force', ClientSandbox.links_release_dir)
bosh_runner.run_in_dir('upload release', ClientSandbox.links_release_dir)
end
let (:manifest_hash) { Bosh::Spec::Deployments.simple_manifest }
let (:cloud_config) { Bosh::Spec::Deployments.simple_cloud_config }
let (:config_server_helper) { Bosh::Spec::ConfigServerHelper.new(current_sandbox.port_provider.get_port(:config_server_port)) }
context 'when config server certificates are not trusted' do
with_reset_sandbox_before_each(config_server_enabled: true, with_config_server_trusted_certs: false)
before do
target_and_login
upload_stemcell
end
it 'throws certificate validator error' do
manifest_hash['jobs'].first['properties'] = {'test_property' => '((test_property))'}
output, exit_code = deploy_from_scratch(manifest_hash: manifest_hash, cloud_config_hash: cloud_config, failure_expected: true, return_exit_code: true)
expect(exit_code).to_not eq(0)
expect(output).to include('Error 100: SSL certificate verification failed')
end
end
context 'when config server certificates are trusted' do
with_reset_sandbox_before_each(config_server_enabled: true)
before do
target_and_login
upload_stemcell
end
context 'when deployment manifest has placeholders' do
it 'raises an error when config server does not have values for placeholders' do
manifest_hash['jobs'].first['properties'] = {'test_property' => '((test_property))'}
output, exit_code = deploy_from_scratch(manifest_hash: manifest_hash, cloud_config_hash: cloud_config, failure_expected: true, return_exit_code: true)
expect(exit_code).to_not eq(0)
expect(output).to include('Failed to find keys in the config server: test_property')
end
it 'replaces placeholders in the manifest when config server has value for placeholders' do
config_server_helper.put_value('test_property', 'cats are happy')
manifest_hash['jobs'].first['properties'] = {'test_property' => '((test_property))'}
deploy_from_scratch(manifest_hash: manifest_hash, cloud_config_hash: cloud_config)
vm = director.vm('foobar', '0')
template = vm.read_job_template('foobar', 'bin/foobar_ctl')
expect(template).to include('test_property=cats are happy')
end
end
context 'when runtime manifest has placeholders' do
let(:runtime_config) { Bosh::Spec::Deployments.runtime_config_with_addon_placeholders }
it 'replaces placeholders in the addons' do
bosh_runner.run("upload release #{spec_asset('dummy2-release.tgz')}")
config_server_helper.put_value('release_name', 'dummy2')
config_server_helper.put_value('addon_prop', 'i am Groot')
expect(upload_runtime_config(runtime_config_hash: runtime_config)).to include("Successfully updated runtime config")
config_server_helper.put_value('test_property', 'cats are happy')
manifest_hash['jobs'].first['properties'] = {'test_property' => '((test_property))'}
deploy_from_scratch(manifest_hash: manifest_hash, cloud_config_hash: cloud_config)
vm = director.vm('foobar', '0')
template = vm.read_job_template('dummy_with_properties', 'bin/dummy_with_properties_ctl')
expect(template).to include("echo 'i am Groot'")
end
end
context 'when running an errand that has placeholders' do
let(:errand_manifest){ Bosh::Spec::Deployments.manifest_errand_with_placeholders }
it 'replaces placeholder in properties' do
config_server_helper.put_value('placeholder', 'test value')
deploy_from_scratch(manifest_hash: errand_manifest, cloud_config_hash: cloud_config)
errand_result = bosh_runner.run('run errand fake-errand-name --keep-alive')
expect(errand_result).to include('test value')
end
end
end
context 'when links exist' do
with_reset_sandbox_before_each(config_server_enabled: true)
let(:cloud_config) do
cloud_config_hash = Bosh::Spec::Deployments.simple_cloud_config
cloud_config_hash['azs'] = [{ 'name' => 'z1' }]
cloud_config_hash['networks'].first['subnets'].first['static'] = ['192.168.1.10', '192.168.1.11', '192.168.1.12', '192.168.1.13']
cloud_config_hash['networks'].first['subnets'].first['az'] = 'z1'
cloud_config_hash['compilation']['az'] = 'z1'
cloud_config_hash['networks'] << {
'name' => 'dynamic-network',
'type' => 'dynamic',
'subnets' => [{'az' => 'z1'}]
}
cloud_config_hash
end
let(:my_job) do
job_spec = Bosh::Spec::Deployments.simple_job(
name: 'my_job',
templates: [
{'name' => 'http_server_with_provides'},
{'name' => 'http_proxy_with_requires'},
],
instances: 1
)
job_spec['azs'] = ['z1']
job_spec['properties'] = {'listen_port' => 9035, 'name_space' => {'fibonacci' => '((fibonacci_placeholder))'}}
job_spec
end
let(:manifest) do
manifest = Bosh::Spec::NetworkingManifest.deployment_manifest
manifest['jobs'] = [my_job]
manifest['properties'] = {'listen_port' => 9999}
manifest
end
before do
target_and_login
upload_links_release
upload_stemcell
upload_cloud_config(cloud_config_hash: cloud_config)
end
it 'replaces the placeholder values of properties consumed through links' do
config_server_helper.put_value('fibonacci_placeholder', 'fibonacci_value')
deploy_simple_manifest(manifest_hash: manifest)
link_vm = director.vm('my_job', '0')
template = YAML.load(link_vm.read_job_template('http_proxy_with_requires', 'config/config.yml'))
expect(template['links']['properties']['fibonacci']).to eq('fibonacci_value')
end
context 'when manual links are involved' do
let (:job_with_manual_consumes_link) do
job_spec = Bosh::Spec::Deployments.simple_job(
name: 'property_job',
templates: [{
'name' => 'consumer',
'consumes' => {
'provider' => {
'properties' => {'a' => '((a_placeholder))', 'b' => '((b_placeholder))', 'c' => '((c_placeholder))'},
'instances' => [{'name' => 'external_db', 'address' => '192.168.15.4'}],
'networks' => {'network_1' => 2, 'network_2' => 3}
}
}
}],
instances: 1,
static_ips: ['192.168.1.10'],
properties: {}
)
job_spec['azs'] = ['z1']
job_spec['networks'] << {
'name' => 'dynamic-network',
'default' => ['dns', 'gateway']
}
job_spec
end
it 'resolves the properties defined inside the links section of the deployment manifest' do
config_server_helper.put_value('a_placeholder', 'a_value')
config_server_helper.put_value('b_placeholder', 'b_value')
config_server_helper.put_value('c_placeholder', 'c_value')
manifest['jobs'] = [job_with_manual_consumes_link]
deploy_simple_manifest(manifest_hash: manifest)
link_vm = director.vm('property_job', '0')
template = YAML.load(link_vm.read_job_template('consumer', 'config.yml'))
expect(template['a']).to eq('a_value')
expect(template['b']).to eq('b_value')
expect(template['c']).to eq('c_value')
end
end
end
end
|
require 'spec_helper'
require 'pact_broker/pacticipants/repository'
require 'support/test_data_builder'
module PactBroker
module Pacticipants
describe Repository do
describe "#create" do
let(:repository) { Repository.new }
subject { repository.create(name: "Foo") }
context "when the pacticipant does not already exist" do
before do
TestDataBuilder.new.create_pacticipant("Bar")
end
subject { repository.create(name: "Foo") }
it "returns the new pacticipant" do
expect(subject).to be_a(PactBroker::Domain::Pacticipant)
expect(subject.name).to eq "Foo"
end
end
context "when a race condition occurs and the pacticipant was already created by another request" do
before do
TestDataBuilder.new.create_pacticipant("Foo")
end
it "does not raise an error" do
subject
end
it "returns the existing pacticipant" do
expect(subject).to be_a(PactBroker::Domain::Pacticipant)
expect(subject.name).to eq "Foo"
end
end
end
describe "#find" do
before do
TestDataBuilder.new
.create_pacticipant("Foo")
.create_label("in")
.create_pacticipant("Bar")
.create_label("in")
.create_label("blah")
.create_pacticipant("Wiffle")
.create_label("out")
end
subject { Repository.new.find label_name: "in" }
it "returns the pacticipants with the given label" do
expect(subject.collect(&:name)).to eq ["Bar", "Foo"]
end
end
describe "#find_by_name" do
before do
td.create_pacticipant("Foo Bar")
end
subject { Repository.new.find_by_name('foo bar') }
context "when the name is a different case" do
context "with case sensitivity turned on" do
before do
allow(PactBroker.configuration).to receive(:use_case_sensitive_resource_names).and_return(true)
end
it "returns nil" do
expect(subject).to be nil
end
end
context "with case sensitivity turned off" do
before do
allow(PactBroker.configuration).to receive(:use_case_sensitive_resource_names).and_return(false)
end
it "returns the pacticipant" do
expect(subject).to_not be nil
expect(subject.name).to eq "Foo Bar"
end
end
context "with case sensitivity turned off and multiple records found", skip: DB.mysql? do
before do
td.create_pacticipant("Foo bar")
allow(PactBroker.configuration).to receive(:use_case_sensitive_resource_names).and_return(false)
end
it "raises an error" do
expect { subject }.to raise_error PactBroker::Error, /Found multiple pacticipants.*foo bar/
end
end
context "with case sensitivity turned off no record found" do
subject { Repository.new.find_by_name('blah') }
it { is_expected.to be nil }
end
end
end
describe "#pacticipant_names" do
before do
TestDataBuilder.new
.create_pacticipant("Plants")
.create_pacticipant("Animals")
end
subject { Repository.new.pacticipant_names }
it "returns an array of pacticipant names" do
expect(subject).to eq ["Animals", "Plants"]
end
end
describe "#find_all_pacticipant_versions_in_reverse_order" do
before do
TestDataBuilder.new
.create_consumer("Foo")
.create_consumer_version("1.2.3")
.create_consumer_version("4.5.6")
.create_consumer("Bar")
.create_consumer_version("8.9.0")
end
subject { Repository.new.find_all_pacticipant_versions_in_reverse_order "Foo" }
it "returns all the application versions for the given consumer" do
expect(subject.collect(&:number)).to eq ["4.5.6", "1.2.3"]
end
end
end
end
end
test: add specs finding pacticipant by name for case sensitive and insensitive configurations
require 'spec_helper'
require 'pact_broker/pacticipants/repository'
require 'support/test_data_builder'
module PactBroker
module Pacticipants
describe Repository do
describe "#create" do
let(:repository) { Repository.new }
subject { repository.create(name: "Foo") }
context "when the pacticipant does not already exist" do
before do
TestDataBuilder.new.create_pacticipant("Bar")
end
subject { repository.create(name: "Foo") }
it "returns the new pacticipant" do
expect(subject).to be_a(PactBroker::Domain::Pacticipant)
expect(subject.name).to eq "Foo"
end
end
context "when a race condition occurs and the pacticipant was already created by another request" do
before do
TestDataBuilder.new.create_pacticipant("Foo")
end
it "does not raise an error" do
subject
end
it "returns the existing pacticipant" do
expect(subject).to be_a(PactBroker::Domain::Pacticipant)
expect(subject.name).to eq "Foo"
end
end
end
describe "#find" do
before do
TestDataBuilder.new
.create_pacticipant("Foo")
.create_label("in")
.create_pacticipant("Bar")
.create_label("in")
.create_label("blah")
.create_pacticipant("Wiffle")
.create_label("out")
end
subject { Repository.new.find label_name: "in" }
it "returns the pacticipants with the given label" do
expect(subject.collect(&:name)).to eq ["Bar", "Foo"]
end
end
describe "#find_by_name" do
before do
td.create_pacticipant("Foo-Bar")
end
subject { Repository.new.find_by_name('foo-bar') }
context "when the name is a different case" do
context "with case sensitivity turned on" do
before do
allow(PactBroker.configuration).to receive(:use_case_sensitive_resource_names).and_return(true)
end
it "returns nil" do
expect(subject).to be nil
end
end
context "with case sensitivity turned off" do
before do
allow(PactBroker.configuration).to receive(:use_case_sensitive_resource_names).and_return(false)
end
it "returns the pacticipant" do
expect(subject).to_not be nil
expect(subject.name).to eq "Foo-Bar"
end
end
context "with case sensitivity turned off and multiple records found", skip: DB.mysql? do
# Can't be created in MySQL - duplicate record
before do
td.create_pacticipant("Foo-bar")
allow(PactBroker.configuration).to receive(:use_case_sensitive_resource_names).and_return(false)
end
it "raises an error" do
expect { subject }.to raise_error PactBroker::Error, /Found multiple pacticipants.*foo-bar/
end
end
context "with case sensitivity turned off and searching for a name with an underscore" do
before do
allow(PactBroker.configuration).to receive(:use_case_sensitive_resource_names).and_return(false)
end
subject { Repository.new.find_by_name('foo_bar') }
it { is_expected.to be nil }
end
context "with case sensitivity turned on and searching for a name with an underscore" do
subject { Repository.new.find_by_name('foo_bar') }
it { is_expected.to be nil }
end
context "with case sensitivity turned off no record found" do
subject { Repository.new.find_by_name('blah') }
it { is_expected.to be nil }
end
end
end
describe "#pacticipant_names" do
before do
TestDataBuilder.new
.create_pacticipant("Plants")
.create_pacticipant("Animals")
end
subject { Repository.new.pacticipant_names }
it "returns an array of pacticipant names" do
expect(subject).to eq ["Animals", "Plants"]
end
end
describe "#find_all_pacticipant_versions_in_reverse_order" do
before do
TestDataBuilder.new
.create_consumer("Foo")
.create_consumer_version("1.2.3")
.create_consumer_version("4.5.6")
.create_consumer("Bar")
.create_consumer_version("8.9.0")
end
subject { Repository.new.find_all_pacticipant_versions_in_reverse_order "Foo" }
it "returns all the application versions for the given consumer" do
expect(subject.collect(&:number)).to eq ["4.5.6", "1.2.3"]
end
end
end
end
end
|
require 'spec_helper'
require 'pry'
module UniaraVirtualParser
module Services
describe Absences do
describe '.subject' do
let(:subject) { described_class.absences 'batman' }
before do
stub_request(:get,"http://virtual.uniara.com.br/alunos/consultas/faltas").
to_return(status: 200, body: uniara_virtual_fixture("absences.html").read, headers:{})
end
it 'fetches the content of uniara virtual and brings the array with absences' do
binding.pry
expect(subject.first.absences.first.name).to eq "REDES DE COMPUTADORES"
expect(subject.first.absences.first.frequency).to eq "80h"
expect(subject.length).to eq 2
end
end
end
end
end
Update absences_spec.rb
require 'spec_helper'
module UniaraVirtualParser
module Services
describe Absences do
describe '.subject' do
let(:subject) { described_class.absences 'batman' }
before do
stub_request(:get,"http://virtual.uniara.com.br/alunos/consultas/faltas").
to_return(status: 200, body: uniara_virtual_fixture("absences.html").read, headers:{})
end
it 'fetches the content of uniara virtual and brings the array with absences' do
expect(subject.first.absences.first.name).to eq "REDES DE COMPUTADORES"
expect(subject.first.absences.first.frequency).to eq "80h"
expect(subject.length).to eq 2
end
end
end
end
end
|
#
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
# NOTE: most of the tests of this functionality are baked into the func tests for the dnf package provider
# run this test only for following platforms.
exclude_test = !(%w{rhel fedora amazon}.include?(ohai[:platform_family]) && File.exist?("/usr/bin/dnf"))
describe Chef::Provider::Package::Dnf::PythonHelper, :requires_root, external: exclude_test do
let(:helper) { Chef::Provider::Package::Dnf::PythonHelper.instance }
it "propagates stacktraces on stderr from the forked subprocess", :rhel do
allow(helper).to receive(:dnf_command).and_return("ruby -e 'raise \"your hands in the air\"'")
expect { helper.package_query(:whatprovides, "tcpdump") }.to raise_error(/your hands in the air/)
end
it "compares EVRAs with dots in the release correctly" do
expect(helper.compare_versions("0:1.8.29-6.el8.x86_64", "0:1.8.29-6.el8_3.1.x86_64")).to eql(-1)
end
end
Reset the DNF helper to greenify tests
More stable helper now survives the func tests and now throws this test off.
Signed-off-by: Lamont Granquist <0ab8dc438f73addc98d9ad5925ec8f2b97991703@scriptkiddie.org>
#
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
# NOTE: most of the tests of this functionality are baked into the func tests for the dnf package provider
# run this test only for following platforms.
exclude_test = !(%w{rhel fedora amazon}.include?(ohai[:platform_family]) && File.exist?("/usr/bin/dnf"))
describe Chef::Provider::Package::Dnf::PythonHelper, :requires_root, external: exclude_test do
let(:helper) { Chef::Provider::Package::Dnf::PythonHelper.instance }
it "propagates stacktraces on stderr from the forked subprocess", :rhel do
Singleton.__init__(Chef::Provider::Package::Dnf::PythonHelper)
allow(helper).to receive(:dnf_command).and_return("ruby -e 'raise \"your hands in the air\"'")
expect { helper.package_query(:whatprovides, "tcpdump") }.to raise_error(/your hands in the air/)
end
it "compares EVRAs with dots in the release correctly" do
expect(helper.compare_versions("0:1.8.29-6.el8.x86_64", "0:1.8.29-6.el8_3.1.x86_64")).to eql(-1)
end
end
|
require 'spec_helper'
describe Request::Protocol, '.get' do
let(:object) { described_class }
subject { object.get(input) }
context 'with "http"' do
let(:input) { 'http' }
it { should eql(Request::Protocol::HTTP) }
end
context 'with "https"' do
let(:input) { 'https' }
it { should eql(Request::Protocol::HTTPS) }
end
context 'with "ftp"' do
let(:input) { 'ftp' }
it 'should raise error' do
expect { subject }.to raise_error(KeyError, 'key not found: "ftp"')
end
end
end
Fix specs to accept jruby exception message formats
require 'spec_helper'
describe Request::Protocol, '.get' do
let(:object) { described_class }
subject { object.get(input) }
context 'with "http"' do
let(:input) { 'http' }
it { should eql(Request::Protocol::HTTP) }
end
context 'with "https"' do
let(:input) { 'https' }
it { should eql(Request::Protocol::HTTPS) }
end
context 'with "ftp"' do
let(:input) { 'ftp' }
it 'should raise error' do
# jruby has different message format
expectation =
begin
{}.fetch('ftp')
rescue KeyError => error
error
end
expect { subject }.to raise_error(KeyError, expectation.message)
end
end
end
|
#! /usr/bin/env ruby
#
# jenkins-metrics
#
# DESCRIPTION:
# This plugin extracts the metrics from a Jenkins Master with Metrics plugin installed
# Anonymous user must have Metrics permissions.
#
# OUTPUT:
# metric data
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
# gem: rest-client
# gem: socket
# gem: json
#
# USAGE:
# #YELLOW
#
# NOTES:
#
# LICENSE:
# Copyright 2015, Cornel Foltea (cornel.foltea@gmail.com)
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
#
require 'rubygems' if RUBY_VERSION < '1.9.0'
require 'sensu-plugin/metric/cli'
require 'rest-client'
require 'socket'
require 'json'
class JenkinsMetrics < Sensu::Plugin::Metric::CLI::Graphite
SKIP_ROOT_KEYS = %w(version)
option :scheme,
description: 'Metric naming scheme',
short: '-s SCHEME',
long: '--scheme SCHEME',
default: "#{Socket.gethostname}.jenkins"
option :server,
description: 'Jenkins Host',
short: '-s SERVER',
long: '--server SERVER',
default: 'localhost'
option :port,
description: 'Jenkins Port',
short: '-p PORT',
long: '--port PORT',
default: '8080'
option :uri,
description: 'Jenkins Metrics URI',
short: '-u URI',
long: '--uri URI',
default: '/metrics/currentUser/metrics'
def run
begin
r = RestClient::Resource.new("http://#{config[:server]}:#{config[:port]}#{config[:uri]}", timeout: 5).get
all_metrics = JSON.parse(r)
metric_groups = all_metrics.keys - SKIP_ROOT_KEYS
metric_groups.each do |metric_groups_key|
all_metrics[metric_groups_key].each do |metric_key, metric_value|
metric_value.each do |metric_hash_key, metric_hash_value|
output([config[:scheme], metric_groups_key, metric_key, metric_hash_key].join('.'), metric_hash_value) \
if metric_hash_value.is_a?(Numeric) && (metric_hash_key == 'count' || metric_hash_key == 'value')
end
end
end
ok
rescue Errno::ECONNREFUSED
critical "Jenkins is not responding"
rescue RestClient::RequestTimeout
critical "Jenkins Connection timed out"
end
ok
end
end
single quotes pref by rubocop
#! /usr/bin/env ruby
#
# jenkins-metrics
#
# DESCRIPTION:
# This plugin extracts the metrics from a Jenkins Master with Metrics plugin installed
# Anonymous user must have Metrics permissions.
#
# OUTPUT:
# metric data
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
# gem: rest-client
# gem: socket
# gem: json
#
# USAGE:
# #YELLOW
#
# NOTES:
#
# LICENSE:
# Copyright 2015, Cornel Foltea (cornel.foltea@gmail.com)
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
#
require 'rubygems' if RUBY_VERSION < '1.9.0'
require 'sensu-plugin/metric/cli'
require 'rest-client'
require 'socket'
require 'json'
class JenkinsMetrics < Sensu::Plugin::Metric::CLI::Graphite
SKIP_ROOT_KEYS = %w(version)
option :scheme,
description: 'Metric naming scheme',
short: '-s SCHEME',
long: '--scheme SCHEME',
default: "#{Socket.gethostname}.jenkins"
option :server,
description: 'Jenkins Host',
short: '-s SERVER',
long: '--server SERVER',
default: 'localhost'
option :port,
description: 'Jenkins Port',
short: '-p PORT',
long: '--port PORT',
default: '8080'
option :uri,
description: 'Jenkins Metrics URI',
short: '-u URI',
long: '--uri URI',
default: '/metrics/currentUser/metrics'
def run
begin
r = RestClient::Resource.new("http://#{config[:server]}:#{config[:port]}#{config[:uri]}", timeout: 5).get
all_metrics = JSON.parse(r)
metric_groups = all_metrics.keys - SKIP_ROOT_KEYS
metric_groups.each do |metric_groups_key|
all_metrics[metric_groups_key].each do |metric_key, metric_value|
metric_value.each do |metric_hash_key, metric_hash_value|
output([config[:scheme], metric_groups_key, metric_key, metric_hash_key].join('.'), metric_hash_value) \
if metric_hash_value.is_a?(Numeric) && (metric_hash_key == 'count' || metric_hash_key == 'value')
end
end
end
ok
rescue Errno::ECONNREFUSED
critical 'Jenkins is not responding'
rescue RestClient::RequestTimeout
critical 'Jenkins Connection timed out'
end
ok
end
end
|
#!/usr/bin/env ruby
#
# Pull php-fpm metrics from php-fpm status page
# ===
#
# Requires `crack` gem to parse xml.
#
# Copyright 2014 Ilari Mäkelä <ilari@i28.fi>
#
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
require 'rubygems' if RUBY_VERSION < '1.9.0'
require 'sensu-plugin/metric/cli'
require 'net/https'
require 'uri'
require 'crack'
class PhpfpmMetrics < Sensu::Plugin::Metric::CLI::Graphite
option :url,
:short => "-u URL",
:long => "--url URL",
:description => "Full URL to php-fpm status page, example: http://yoursite.com/php-fpm-status"
option :scheme,
:description => "Metric naming scheme, text to prepend to metric",
:short => "-s SCHEME",
:long => "--scheme SCHEME",
:default => "#{Socket.gethostname}.php_fpm"
def run
found = false
attempts = 0
until (found || attempts >= 10)
attempts+=1
if config[:url]
uri = URI.parse(config[:url])
http = Net::HTTP.new(uri.host, uri.port)
if uri.scheme == 'https'
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
request = Net::HTTP::Get.new(uri.request_uri + '?xml')
response = http.request(request)
if response.code=="200"
found = true
elsif response.header['location']!=nil
config[:url] = response.header['location']
end
end
end # until
stats = Crack::XML.parse(response.body)
path = "#{config[:scheme]}"
stat = [
'start_since',
'accepted_conn',
'listen_queue',
'max_listen_queue',
'listen_queue_len',
'idle_processes',
'active_processes',
'total_processes',
'max_active_processes',
'max_children_reached'
]
stat.each do |name|
output "#{config[:scheme]}.#{name}", stats['status'][name]
end
ok
end
end
Fixed rubocop test offences
#!/usr/bin/env ruby
#
# Pull php-fpm metrics from php-fpm status page
# ===
#
# Requires `crack` gem to parse xml.
#
# Copyright 2014 Ilari Mäkelä <ilari at i28.fi>
#
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
require 'rubygems' if RUBY_VERSION < '1.9.0'
require 'sensu-plugin/metric/cli'
require 'net/https'
require 'uri'
require 'crack'
class PhpfpmMetrics < Sensu::Plugin::Metric::CLI::Graphite
option :url,
:short => "-u URL",
:long => "--url URL",
:description => "Full URL to php-fpm status page, example: http://yoursite.com/php-fpm-status"
option :scheme,
:description => "Metric naming scheme, text to prepend to metric",
:short => "-s SCHEME",
:long => "--scheme SCHEME",
:default => "#{Socket.gethostname}.php_fpm"
def run
found = false
attempts = 0
until (found || attempts >= 10)
attempts+=1
if config[:url]
uri = URI.parse(config[:url])
http = Net::HTTP.new(uri.host, uri.port)
if uri.scheme == 'https'
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
request = Net::HTTP::Get.new(uri.request_uri + '?xml')
response = http.request(request)
if response.code=="200"
found = true
elsif response.header['location']!=nil
config[:url] = response.header['location']
end
end
end # until
stats = Crack::XML.parse(response.body)
stat = [
'start_since',
'accepted_conn',
'listen_queue',
'max_listen_queue',
'listen_queue_len',
'idle_processes',
'active_processes',
'total_processes',
'max_active_processes',
'max_children_reached'
]
stat.each do |name|
output "#{config[:scheme]}.#{name}", stats['status'][name]
end
ok
end
end
|
# Memoized working directory to run the tests from
# NOTE: currently there is an issue with beaker's create_tmpdir_on helper on cygwin
# and OSX platforms: the `chown` command always fails with an error about not
# recognizing the Administrator:Administrator user/group. Also, the call to
# check user presence via `getent` also fails. Until this is fixed, we add this
# shim that delegates to a non-`chown`/non-`getent`-executing version for the
# purposes of our test setup.
#
# TODO: fix via: https://tickets.puppetlabs.com/browse/BKR-496
def tmpdir_on(hosts, path_prefix = '', user = nil)
first_host = Array(hosts).first
return create_tmpdir_on(hosts, path_prefix, user) unless \
first_host.is_cygwin? || first_host.platform =~ %r{osx}
block_on hosts do |host|
# use default user logged into this host
unless user
user = host['user']
end
raise 'Host platform not supported by `tmpdir_on`.' unless defined? host.tmpdir
host.tmpdir(path_prefix)
end
end
def target_dir
$target_dir ||= tmpdir_on(workstation, 'pdk_acceptance') # rubocop:disable Style/GlobalVars
end
def install_dir(host)
if host.platform =~ %r{windows}
'/cygdrive/c/Program\ Files/Puppet\ Labs/DevelopmentKit'
else
'/opt/puppetlabs/sdk'
end
end
def pdk_rubygems_cert_dir(host)
"#{install_dir(host)}/private/ruby/2.1.9/lib/ruby/2.1.0/rubygems/ssl_certs"
end
def command_prefix(host)
command = "PATH=#{install_dir(host)}/bin:#{install_dir(host)}/private/ruby/2.1.9/bin:#{install_dir(host)}/private/git/bin:$PATH && cd #{target_dir} &&"
command = "#{command} cmd.exe /C" unless host.platform !~ %r{windows}
command
end
(maint) Update install directory for package testing
# Memoized working directory to run the tests from
# NOTE: currently there is an issue with beaker's create_tmpdir_on helper on cygwin
# and OSX platforms: the `chown` command always fails with an error about not
# recognizing the Administrator:Administrator user/group. Also, the call to
# check user presence via `getent` also fails. Until this is fixed, we add this
# shim that delegates to a non-`chown`/non-`getent`-executing version for the
# purposes of our test setup.
#
# TODO: fix via: https://tickets.puppetlabs.com/browse/BKR-496
def tmpdir_on(hosts, path_prefix = '', user = nil)
first_host = Array(hosts).first
return create_tmpdir_on(hosts, path_prefix, user) unless \
first_host.is_cygwin? || first_host.platform =~ %r{osx}
block_on hosts do |host|
# use default user logged into this host
unless user
user = host['user']
end
raise 'Host platform not supported by `tmpdir_on`.' unless defined? host.tmpdir
host.tmpdir(path_prefix)
end
end
def target_dir
$target_dir ||= tmpdir_on(workstation, 'pdk_acceptance') # rubocop:disable Style/GlobalVars
end
def install_dir(host)
if host.platform =~ %r{windows}
'/cygdrive/c/Program\ Files/Puppet\ Labs/DevelopmentKit'
else
'/opt/puppetlabs/pdk'
end
end
def pdk_rubygems_cert_dir(host)
"#{install_dir(host)}/private/ruby/2.1.9/lib/ruby/2.1.0/rubygems/ssl_certs"
end
def command_prefix(host)
command = "PATH=#{install_dir(host)}/bin:#{install_dir(host)}/private/ruby/2.1.9/bin:#{install_dir(host)}/private/git/bin:$PATH && cd #{target_dir} &&"
command = "#{command} cmd.exe /C" unless host.platform !~ %r{windows}
command
end
|
require_relative '../item.rb'
class Equippable < Item
def initialize(params = {})
super(params)
@name = params[:name] || "Equippable"
if params[:consumable].nil? then @consumable = false
else @consumable = params[:consumable] end
@stat_change = params[:stat_change] || StatChange.new({})
@type = :equippable
end
def equip(entity)
prev_item = entity.outfit[@type]
entity.outfit[@type] = self
alter_stats(self, entity, true)
if (!prev_item.nil?)
alter_stats(prev_item, entity, false)
entity.add_item(prev_item)
end
print "#{entity.name} equips #{self.name}!\n\n"
end
def unequip(entity)
entity.outfit[@type] = nil
alter_stats(self, entity, false)
end
def use(entity)
print "Type 'equip #{@name}' to equip this item.\n\n"
end
attr_accessor :stat_change, :type
end
# Defines the stats that change when equipping the equippable item.
class StatChange
def initialize(params)
@attack = params[:attack] || 0
@defense = params[:defense] || 0
end
def ==(rhs)
if (@attack == rhs.attack && @defense == rhs.defense)
return true
end
return false
end
attr_accessor :attack, :defense
end
# TODO: ensure nothing goes below zero.
def alter_stats(item, entity, equipping)
if equipping
entity.attack += item.stat_change.attack
entity.defense += item.stat_change.defense
else
entity.attack -= item.stat_change.attack
entity.defense -= item.stat_change.defense
end
end
Fix nil value bug
require_relative '../item.rb'
class Equippable < Item
def initialize(params = {})
super(params)
@name = params[:name] || "Equippable"
if params[:consumable].nil? then @consumable = false
else @consumable = params[:consumable] end
@stat_change = params[:stat_change] || StatChange.new({})
@type = :equippable
end
def equip(entity)
prev_item = entity.outfit[@type]
entity.outfit[@type] = self
alter_stats(self, entity, true)
if (!prev_item.nil?)
alter_stats(prev_item, entity, false)
entity.add_item(prev_item)
end
print "#{entity.name} equips #{@name}!\n\n"
end
def unequip(entity)
entity.outfit.delete(@type)
alter_stats(self, entity, false)
print "#{entity.name} unequips #{@name}!\n\n"
end
def use(entity)
print "Type 'equip #{@name}' to equip this item.\n\n"
end
attr_accessor :stat_change, :type
end
# Defines the stats that change when equipping the equippable item.
class StatChange
def initialize(params)
@attack = params[:attack] || 0
@defense = params[:defense] || 0
end
def ==(rhs)
if (@attack == rhs.attack && @defense == rhs.defense)
return true
end
return false
end
attr_accessor :attack, :defense
end
# TODO: ensure nothing goes below zero.
def alter_stats(item, entity, equipping)
if equipping
entity.attack += item.stat_change.attack
entity.defense += item.stat_change.defense
else
entity.attack -= item.stat_change.attack
entity.defense -= item.stat_change.defense
end
end
|
#!/usr/bin/env ruby
gem 'json'
require 'json'
require 'date'
require 'rexml/document'
# Extract CFBundleShortVersionString from a plist file
def info_plist_version(path)
REXML::Document.new(File.read(path))
.root
.elements["//key[text()='CFBundleShortVersionString']"]
.next_element
.text
end
def git_tag_version(path)
`git -C #{path} tag --points-at HEAD`.chop.gsub(/^v?/, '')
end
def formatted_sample(samples, test, lib)
sample = samples["#{test}Tests"]["test#{lib}"]
return '¹' unless sample # n/a
'%.2f' % sample
end
def formatted_samples(samples, test)
libs = %w{GRDB SQLite FMDB SQLiteSwift CoreData Realm}
libs.map { |lib| formatted_sample(samples, test, lib) || '¹' }
end
# Parse input
samples = JSON.parse(STDIN.read)
# Now that we have samples, we are reasonably sure that we
# have checkouts for all dependencies.
# BUILD_ROOT
exit 1 unless `xcodebuild -showBuildSettings -project Tests/Performance/GRDBPerformance/GRDBPerformance.xcodeproj -target GRDBOSXPerformanceComparisonTests -disableAutomaticPackageResolution` =~ /BUILD_ROOT = (.*)$/
BUILD_ROOT = $1
# DERIVED_DATA
tmp = BUILD_ROOT
while !File.exists?(File.join(tmp, 'SourcePackages'))
parent = File.dirname(tmp)
exit 1 if tmp == parent
tmp = parent
end
DERIVED_DATA = tmp
# SPM_CHECKOUTS
SPM_CHECKOUTS = File.join(DERIVED_DATA, 'SourcePackages', 'checkouts')
# Extract versions
grdb_version = info_plist_version('Support/Info.plist')
fmdb_version = info_plist_version("#{SPM_CHECKOUTS}/fmdb/src/fmdb/Info.plist")
sqlite_swift_version = git_tag_version("#{SPM_CHECKOUTS}/SQLite.swift")
realm_version = git_tag_version("#{SPM_CHECKOUTS}/realm-cocoa")
`xcodebuild -version` =~ /Xcode (.*)$/; xcode_version = $1
`curl -s https://support-sp.apple.com/sp/product?cc=$(system_profiler SPHardwareDataType | awk '/Serial/ {print $4}' | cut -c 9-)` =~ /<configCode>(.*)<\/configCode>/; hardware = $1
STDERR.puts "GRDB #{grdb_version}"
STDERR.puts "FMDB #{fmdb_version}"
STDERR.puts "SQLite.swift #{sqlite_swift_version}"
STDERR.puts "Realm #{realm_version}"
STDERR.puts "Xcode #{xcode_version}"
STDERR.puts "Hardware #{hardware}"
# Generate
puts <<-REPORT
# Comparing the Performances of Swift SQLite libraries
*Last updated #{Date.today.strftime('%B %-d, %Y')}*
Below are performance benchmarks made on for [GRDB #{grdb_version}](https://github.com/groue/GRDB.swift), [FMDB #{fmdb_version}](https://github.com/ccgus/fmdb), and [SQLite.swift #{sqlite_swift_version}](https://github.com/stephencelis/SQLite.swift). They are compared to Core Data, [Realm #{realm_version}](https://realm.io) and the raw use of the SQLite C API from Swift.
This report was generated on a #{hardware}, with Xcode #{xcode_version}, by running the following command:
```sh
make test_performance | Tests/parsePerformanceTests.rb | Tests/generatePerformanceReport.rb
```
All tests use the default settings of each library. For each library, we:
- Build and consume database rows with raw SQL and column indexes (aiming at the best performance)
- Build and consume database rows with column names (sacrificing performance for maintainability)
- Build and consume records values to and from database rows (aiming at the shortest code from database to records)
- Build and consume records values to and from database rows, with help from the Codable standard protocol
- Build and consume records values to and from database rows, with [change tracking](https://github.com/groue/GRDB.swift/blob/master/README.md#record-comparison) (records know if they have unsaved changes)
As a bottom line, the raw SQLite C API is used as efficiently as possible, without any error checking.
| | GRDB | Raw SQLite | FMDB | SQLite.swift | Core Data | Realm |
|:-------------------------------- | ----:| ----------:| ----:| ------------:| ---------:| -----:|
| **Column indexes** | | | | | | |
| Fetch | #{formatted_samples(samples, 'FetchPositionalValues').join(" | ")} |
| Insert | #{formatted_samples(samples, 'InsertPositionalValues').join(" | ")} |
| **Column names** | | | | | | |
| Fetch | #{formatted_samples(samples, 'FetchNamedValues').join(" | ")} |
| Insert | #{formatted_samples(samples, 'InsertNamedValues').join(" | ")} |
| **Records** | | | | | | |
| Fetch | #{formatted_samples(samples, 'FetchRecordStruct').join(" | ")} |
| Insert | #{formatted_samples(samples, 'InsertRecordStruct').join(" | ")} |
| **Codable Records** | | | | | | |
| Fetch | #{formatted_samples(samples, 'FetchRecordDecodable').join(" | ")} |
| Insert | #{formatted_samples(samples, 'InsertRecordEncodable').join(" | ")} |
| **Optimized Records** | | | | | | |
| Fetch | #{formatted_samples(samples, 'FetchRecordOptimized').join(" | ")} |
| Insert | #{formatted_samples(samples, 'InsertRecordOptimized').join(" | ")} |
| **Records with change tracking** | | | | | | |
| Fetch | #{formatted_samples(samples, 'FetchRecordClass').join(" | ")} |
| Insert | #{formatted_samples(samples, 'InsertRecordClass').join(" | ")} |
¹ Not applicable
- **Column indexes**:
- **Fetch** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/FetchPositionalValuesTests.swift))
This test fetches 200000 rows of 10 ints and extracts each int given its position in the row.
It uses FMDB's `-[FMResultSet longForColumnIndex:]`, GRDB's `Row.value(atIndex:)`, and the low-level SQL API of SQLite.swift.
- **Insert** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/InsertPositionalValuesTests.swift))
This test inserts 50000 rows of 10 ints, by setting query arguments given their position.
It uses FMDB's `-[FMDatabase executeUpdate:withArgumentsInArray:]` with statement caching, GRDB's `UpdateStatement.execute(arguments:Array)`, and the low-level SQL API of SQLite.swift.
- **Column names**:
- **Fetch** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/FetchNamedValuesTests.swift))
This test fetches 200000 rows of 10 ints and extracts each int given its column name.
It uses FMDB's `-[FMResultSet longForColumn:]`, GRDB's `Row.value(named:)`, and the high-level query builder of SQLite.swift.
- **Insert** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/InsertNamedValuesTests.swift))
This test inserts 50000 rows of 10 ints, by setting query arguments given their argument name.
It uses FMDB's `-[FMDatabase executeUpdate:withParameterDictionary:]` with statement caching, GRDB's `UpdateStatement.execute(arguments:Dictionary)`, and the high-level query builder of SQLite.swift.
- **Records**:
- **Fetch** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/FetchRecordStructTests.swift))
This test fetches an array of 200000 record objects initiated from rows of 10 ints.
It builds records from FMDB's `-[FMResultSet resultDictionary]`, GRDB's built-in [FetchableRecord](https://github.com/groue/GRDB.swift/blob/master/README.md#fetchablerecord-protocol) protocol, and the values returned by the high-level query builder of SQLite.swift.
- **Insert** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/InsertRecordStructTests.swift))
This tests inserts 50000 records with the persistence method provided by GRDB's [PersistableRecord](https://github.com/groue/GRDB.swift/blob/master/README.md#persistablerecord-protocol) protocol.
- **Codable Records**:
- **Fetch** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/FetchRecordDecodableTests.swift))
This test fetches an array of 200000 record objects initiated from rows of 10 ints.
It builds records from GRDB's built-in support for the [Decodable standard protocols](https://github.com/groue/GRDB.swift/blob/master/README.md#codable-records).
- **Insert** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/InsertRecordEncodableTests.swift))
This tests inserts 50000 records with the persistence method provided by GRDB's built-in support for the [Encodable standard protocols](https://github.com/groue/GRDB.swift/blob/master/README.md#codable-records).
- **Optimized Records**:
- **Fetch** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/FetchRecordDecodableTests.swift))
This test shows how to optimize Decodable Records for fetching.
- **Insert** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/InsertRecordEncodableTests.swift))
This test shows how to optimize Encodable Records for batch inserts.
- **Records with change tracking**:
- **Fetch** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/FetchRecordClassTests.swift))
This test fetches an array of 200000 record objects initiated from rows of 10 ints.
It builds records from FMDB's `-[FMResultSet resultDictionary]`, GRDB's built-in [Record](https://github.com/groue/GRDB.swift/blob/master/README.md#record-class) class.
- **Insert** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/InsertRecordClassTests.swift))
This tests inserts 50000 records with the persistence method provided by GRDB's [Record](https://github.com/groue/GRDB.swift/blob/master/README.md#record-class) class.
REPORT
Performance report: fallback for hardware name
The https://apple.stackexchange.com/a/98089 technique does not work on my mac
#!/usr/bin/env ruby
gem 'json'
require 'json'
require 'date'
require 'rexml/document'
# Extract CFBundleShortVersionString from a plist file
def info_plist_version(path)
REXML::Document.new(File.read(path))
.root
.elements["//key[text()='CFBundleShortVersionString']"]
.next_element
.text
end
def git_tag_version(path)
`git -C #{path} tag --points-at HEAD`.chop.gsub(/^v?/, '')
end
def formatted_sample(samples, test, lib)
sample = samples["#{test}Tests"]["test#{lib}"]
return '¹' unless sample # n/a
'%.2f' % sample
end
def formatted_samples(samples, test)
libs = %w{GRDB SQLite FMDB SQLiteSwift CoreData Realm}
libs.map { |lib| formatted_sample(samples, test, lib) || '¹' }
end
# Parse input
samples = JSON.parse(STDIN.read)
# Now that we have samples, we are reasonably sure that we
# have checkouts for all dependencies.
# BUILD_ROOT
exit 1 unless `xcodebuild -showBuildSettings -project Tests/Performance/GRDBPerformance/GRDBPerformance.xcodeproj -target GRDBOSXPerformanceComparisonTests -disableAutomaticPackageResolution` =~ /BUILD_ROOT = (.*)$/
BUILD_ROOT = $1
# DERIVED_DATA
tmp = BUILD_ROOT
while !File.exists?(File.join(tmp, 'SourcePackages'))
parent = File.dirname(tmp)
exit 1 if tmp == parent
tmp = parent
end
DERIVED_DATA = tmp
# SPM_CHECKOUTS
SPM_CHECKOUTS = File.join(DERIVED_DATA, 'SourcePackages', 'checkouts')
# Extract versions
GRDB_VERSION = info_plist_version('Support/Info.plist')
FMDB_VERSION = info_plist_version("#{SPM_CHECKOUTS}/fmdb/src/fmdb/Info.plist")
SQLITE_SWIFT_VERSION = git_tag_version("#{SPM_CHECKOUTS}/SQLite.swift")
REALM_VERSION = git_tag_version("#{SPM_CHECKOUTS}/realm-cocoa")
`xcodebuild -version` =~ /Xcode (.*)$/
XCODE_VERSION = $1
# Hardware name: https://apple.stackexchange.com/a/98089
`curl -s https://support-sp.apple.com/sp/product?cc=$(
system_profiler SPHardwareDataType \
| awk '/Serial/ {print $4}' \
| cut -c 9-)` =~ /<configCode>(.*)<\/configCode>/
hardware = $1
if hardware
HARDWARE = hardware
else
# in case the previous technique does not work
HARDWARE = `system_profiler SPHardwareDataType | awk '/Model Identifier/ {print $3}'`.chomp
end
STDERR.puts "GRDB_VERSION: #{GRDB_VERSION}"
STDERR.puts "FMDB_VERSION: #{FMDB_VERSION}"
STDERR.puts "SQLITE_SWIFT_VERSION: #{SQLITE_SWIFT_VERSION}"
STDERR.puts "REALM_VERSION: #{REALM_VERSION}"
STDERR.puts "XCODE_VERSION: #{XCODE_VERSION}"
STDERR.puts "HARDWARE: #{HARDWARE}"
# Generate
puts <<-REPORT
# Comparing the Performances of Swift SQLite libraries
*Last updated #{Date.today.strftime('%B %-d, %Y')}*
Below are performance benchmarks made on for [GRDB #{GRDB_VERSION}](https://github.com/groue/GRDB.swift), [FMDB #{FMDB_VERSION}](https://github.com/ccgus/fmdb), and [SQLite.swift #{SQLITE_SWIFT_VERSION}](https://github.com/stephencelis/SQLite.swift). They are compared to Core Data, [Realm #{REALM_VERSION}](https://realm.io) and the raw use of the SQLite C API from Swift.
This report was generated on a #{HARDWARE}, with Xcode #{XCODE_VERSION}, by running the following command:
```sh
make test_performance | Tests/parsePerformanceTests.rb | Tests/generatePerformanceReport.rb
```
All tests use the default settings of each library. For each library, we:
- Build and consume database rows with raw SQL and column indexes (aiming at the best performance)
- Build and consume database rows with column names (sacrificing performance for maintainability)
- Build and consume records values to and from database rows (aiming at the shortest code from database to records)
- Build and consume records values to and from database rows, with help from the Codable standard protocol
- Build and consume records values to and from database rows, with [change tracking](https://github.com/groue/GRDB.swift/blob/master/README.md#record-comparison) (records know if they have unsaved changes)
As a bottom line, the raw SQLite C API is used as efficiently as possible, without any error checking.
| | GRDB | Raw SQLite | FMDB | SQLite.swift | Core Data | Realm |
|:-------------------------------- | ----:| ----------:| ----:| ------------:| ---------:| -----:|
| **Column indexes** | | | | | | |
| Fetch | #{formatted_samples(samples, 'FetchPositionalValues').join(" | ")} |
| Insert | #{formatted_samples(samples, 'InsertPositionalValues').join(" | ")} |
| **Column names** | | | | | | |
| Fetch | #{formatted_samples(samples, 'FetchNamedValues').join(" | ")} |
| Insert | #{formatted_samples(samples, 'InsertNamedValues').join(" | ")} |
| **Records** | | | | | | |
| Fetch | #{formatted_samples(samples, 'FetchRecordStruct').join(" | ")} |
| Insert | #{formatted_samples(samples, 'InsertRecordStruct').join(" | ")} |
| **Codable Records** | | | | | | |
| Fetch | #{formatted_samples(samples, 'FetchRecordDecodable').join(" | ")} |
| Insert | #{formatted_samples(samples, 'InsertRecordEncodable').join(" | ")} |
| **Optimized Records** | | | | | | |
| Fetch | #{formatted_samples(samples, 'FetchRecordOptimized').join(" | ")} |
| Insert | #{formatted_samples(samples, 'InsertRecordOptimized').join(" | ")} |
| **Records with change tracking** | | | | | | |
| Fetch | #{formatted_samples(samples, 'FetchRecordClass').join(" | ")} |
| Insert | #{formatted_samples(samples, 'InsertRecordClass').join(" | ")} |
¹ Not applicable
- **Column indexes**:
- **Fetch** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/FetchPositionalValuesTests.swift))
This test fetches 200000 rows of 10 ints and extracts each int given its position in the row.
It uses FMDB's `-[FMResultSet longForColumnIndex:]`, GRDB's `Row.value(atIndex:)`, and the low-level SQL API of SQLite.swift.
- **Insert** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/InsertPositionalValuesTests.swift))
This test inserts 50000 rows of 10 ints, by setting query arguments given their position.
It uses FMDB's `-[FMDatabase executeUpdate:withArgumentsInArray:]` with statement caching, GRDB's `UpdateStatement.execute(arguments:Array)`, and the low-level SQL API of SQLite.swift.
- **Column names**:
- **Fetch** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/FetchNamedValuesTests.swift))
This test fetches 200000 rows of 10 ints and extracts each int given its column name.
It uses FMDB's `-[FMResultSet longForColumn:]`, GRDB's `Row.value(named:)`, and the high-level query builder of SQLite.swift.
- **Insert** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/InsertNamedValuesTests.swift))
This test inserts 50000 rows of 10 ints, by setting query arguments given their argument name.
It uses FMDB's `-[FMDatabase executeUpdate:withParameterDictionary:]` with statement caching, GRDB's `UpdateStatement.execute(arguments:Dictionary)`, and the high-level query builder of SQLite.swift.
- **Records**:
- **Fetch** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/FetchRecordStructTests.swift))
This test fetches an array of 200000 record objects initiated from rows of 10 ints.
It builds records from FMDB's `-[FMResultSet resultDictionary]`, GRDB's built-in [FetchableRecord](https://github.com/groue/GRDB.swift/blob/master/README.md#fetchablerecord-protocol) protocol, and the values returned by the high-level query builder of SQLite.swift.
- **Insert** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/InsertRecordStructTests.swift))
This tests inserts 50000 records with the persistence method provided by GRDB's [PersistableRecord](https://github.com/groue/GRDB.swift/blob/master/README.md#persistablerecord-protocol) protocol.
- **Codable Records**:
- **Fetch** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/FetchRecordDecodableTests.swift))
This test fetches an array of 200000 record objects initiated from rows of 10 ints.
It builds records from GRDB's built-in support for the [Decodable standard protocols](https://github.com/groue/GRDB.swift/blob/master/README.md#codable-records).
- **Insert** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/InsertRecordEncodableTests.swift))
This tests inserts 50000 records with the persistence method provided by GRDB's built-in support for the [Encodable standard protocols](https://github.com/groue/GRDB.swift/blob/master/README.md#codable-records).
- **Optimized Records**:
- **Fetch** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/FetchRecordDecodableTests.swift))
This test shows how to optimize Decodable Records for fetching.
- **Insert** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/InsertRecordEncodableTests.swift))
This test shows how to optimize Encodable Records for batch inserts.
- **Records with change tracking**:
- **Fetch** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/FetchRecordClassTests.swift))
This test fetches an array of 200000 record objects initiated from rows of 10 ints.
It builds records from FMDB's `-[FMResultSet resultDictionary]`, GRDB's built-in [Record](https://github.com/groue/GRDB.swift/blob/master/README.md#record-class) class.
- **Insert** ([source](https://github.com/groue/GRDB.swift/blob/master/Tests/Performance/GRDBPerformance/InsertRecordClassTests.swift))
This tests inserts 50000 records with the persistence method provided by GRDB's [Record](https://github.com/groue/GRDB.swift/blob/master/README.md#record-class) class.
REPORT
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{tuwien_logon}
s.version = "0.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Clemens Helm"]
s.date = %q{2010-04-21}
s.description = %q{Provides an authentication solution for ruby applications being used at the Vienna University of Technology}
s.email = %q{clemens.helm@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/tuwien_logon.rb",
"lib/tuwien_logon/authentication.rb",
"lib/tuwien_logon/configuration.rb",
"lib/tuwien_logon/user_info.rb",
"lib/tuwien_logon/user_info_request.rb",
"test/helper.rb",
"test/test_authentication.rb",
"test/test_tuwien_logon.rb",
"test/test_user_info.rb",
"test/test_user_info_request.rb",
"tuwien_logon.gemspec"
]
s.homepage = %q{http://github.com/dropswap/tuwien_logon}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.6}
s.summary = %q{Ruby client for Vienna UT authentication service}
s.test_files = [
"test/helper.rb",
"test/test_authentication.rb",
"test/test_tuwien_logon.rb",
"test/test_user_info.rb",
"test/test_user_info_request.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<thoughtbot-shoulda>, [">= 0"])
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
end
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
end
end
Regenerated gemspec for version 0.3.1
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{tuwien_logon}
s.version = "0.3.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Clemens Helm"]
s.date = %q{2010-04-22}
s.description = %q{Provides an authentication solution for ruby applications being used at the Vienna University of Technology}
s.email = %q{clemens.helm@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/tuwien_logon.rb",
"lib/tuwien_logon/authentication.rb",
"lib/tuwien_logon/configuration.rb",
"lib/tuwien_logon/user_info.rb",
"lib/tuwien_logon/user_info_request.rb",
"test/helper.rb",
"test/test_authentication.rb",
"test/test_tuwien_logon.rb",
"test/test_user_info.rb",
"test/test_user_info_request.rb",
"tuwien_logon.gemspec"
]
s.homepage = %q{http://github.com/dropswap/tuwien_logon}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.6}
s.summary = %q{Ruby client for Vienna UT authentication service}
s.test_files = [
"test/helper.rb",
"test/test_authentication.rb",
"test/test_tuwien_logon.rb",
"test/test_user_info.rb",
"test/test_user_info_request.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<thoughtbot-shoulda>, [">= 0"])
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
end
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
end
end
|
class WebinyDev
def WebinyDev.configure(config, settings)
# Configure The Box
config.vm.box = settings["box"] ||= "webiny/webiny-dev"
config.vm.hostname = settings["hostname"] ||= "webiny-dev"
# Allow SSH Agent Forward from The Box
config.ssh.forward_agent = true
# Configure A Private Network IP
config.vm.network :private_network, ip: settings["ip"] ||= "192.168.22.22"
if settings['networking'][0]['public']
config.vm.network "public_network", type: "dhcp", bridge: settings["bridge_interface"] ||= "en0: Wi-Fi 2 (AirPort)"
end
# Configure A Few VirtualBox Settings
config.vm.provider "virtualbox" do |vb|
vb.name = settings["name"] ||= "webiny-dev"
vb.customize ["modifyvm", :id, "--memory", settings["memory"] ||= "2048"]
vb.customize ["modifyvm", :id, "--cpus", settings["cpus"] ||= "1"]
vb.customize ["modifyvm", :id, "--natdnsproxy1", "on"]
vb.customize ["modifyvm", :id, "--natdnshostresolver1", "on"]
vb.customize ["modifyvm", :id, "--ostype", "Ubuntu_64"]
end
# Configure A Few VMware Settings
["vmware_fusion", "vmware_workstation"].each do |vmware|
config.vm.provider vmware do |v|
v.vmx["displayName"] = "webiny-dev"
v.vmx["memsize"] = settings["memory"] ||= 2048
v.vmx["numvcpus"] = settings["cpus"] ||= 1
v.vmx["guestOS"] = "ubuntu-64"
end
end
# Configure A Few Parallels Settings
config.vm.provider "parallels" do |v|
v.update_guest_tools = true
v.memory = settings["memory"] ||= 2048
v.cpus = settings["cpus"] ||= 1
end
# Standardize Ports Naming Schema
if (settings.has_key?("ports"))
settings["ports"].each do |port|
port["guest"] ||= port["to"]
port["host"] ||= port["send"]
port["protocol"] ||= "tcp"
end
else
settings["ports"] = []
end
# Default Port Forwarding
default_ports = {
80 => 8000,
443 => 44300,
3306 => 33060,
5432 => 54320
}
# Use Default Port Forwarding Unless Overridden
default_ports.each do |guest, host|
unless settings["ports"].any? { |mapping| mapping["guest"] == guest }
config.vm.network "forwarded_port", guest: guest, host: host, auto_correct: true
end
end
# Add Custom Ports From Configuration
if settings.has_key?("ports")
settings["ports"].each do |port|
config.vm.network "forwarded_port", guest: port["guest"], host: port["host"], protocol: port["protocol"], auto_correct: true
end
end
# Configure The Public Key For SSH Access
if settings.include? 'authorize'
if File.exists? File.expand_path(settings["authorize"])
config.vm.provision "shell" do |s|
s.inline = "echo $1 | grep -xq \"$1\" /home/vagrant/.ssh/authorized_keys || echo $1 | tee -a /home/vagrant/.ssh/authorized_keys"
s.args = [File.read(File.expand_path(settings["authorize"]))]
end
end
end
# Copy The SSH Private Keys To The Box
if settings.include? 'keys'
settings["keys"].each do |key|
config.vm.provision "shell" do |s|
s.privileged = false
s.inline = "echo \"$1\" > /home/vagrant/.ssh/$2 && chmod 600 /home/vagrant/.ssh/$2"
s.args = [File.read(File.expand_path(key)), key.split('/').last]
end
end
end
# Register All Of The Configured Shared Folders
if settings['folders'].kind_of?(Array)
settings["folders"].each do |folder|
config.vm.synced_folder folder["map"], folder["to"], type: folder["type"] ||= nil
end
end
# Update Composer On Every Provision
config.vm.provision "shell" do |s|
s.inline = "/usr/local/bin/composer self-update --no-progress"
end
end
end
Updated shared folder mount options
class WebinyDev
def WebinyDev.configure(config, settings)
# Configure The Box
config.vm.box = settings["box"] ||= "webiny/webiny-dev"
config.vm.hostname = settings["hostname"] ||= "webiny-dev"
# Allow SSH Agent Forward from The Box
config.ssh.forward_agent = true
# Configure A Private Network IP
config.vm.network :private_network, ip: settings["ip"] ||= "192.168.22.22"
if settings['networking'][0]['public']
config.vm.network "public_network", type: "dhcp", bridge: settings["bridge_interface"] ||= "en0: Wi-Fi 2 (AirPort)"
end
# Configure A Few VirtualBox Settings
config.vm.provider "virtualbox" do |vb|
vb.name = settings["name"] ||= "webiny-dev"
vb.customize ["modifyvm", :id, "--memory", settings["memory"] ||= "2048"]
vb.customize ["modifyvm", :id, "--cpus", settings["cpus"] ||= "1"]
vb.customize ["modifyvm", :id, "--natdnsproxy1", "on"]
vb.customize ["modifyvm", :id, "--natdnshostresolver1", "on"]
vb.customize ["modifyvm", :id, "--ostype", "Ubuntu_64"]
end
# Configure A Few VMware Settings
["vmware_fusion", "vmware_workstation"].each do |vmware|
config.vm.provider vmware do |v|
v.vmx["displayName"] = "webiny-dev"
v.vmx["memsize"] = settings["memory"] ||= 2048
v.vmx["numvcpus"] = settings["cpus"] ||= 1
v.vmx["guestOS"] = "ubuntu-64"
end
end
# Configure A Few Parallels Settings
config.vm.provider "parallels" do |v|
v.update_guest_tools = true
v.memory = settings["memory"] ||= 2048
v.cpus = settings["cpus"] ||= 1
end
# Standardize Ports Naming Schema
if (settings.has_key?("ports"))
settings["ports"].each do |port|
port["guest"] ||= port["to"]
port["host"] ||= port["send"]
port["protocol"] ||= "tcp"
end
else
settings["ports"] = []
end
# Default Port Forwarding
default_ports = {
80 => 8000,
443 => 44300,
3306 => 33060,
5432 => 54320
}
# Use Default Port Forwarding Unless Overridden
default_ports.each do |guest, host|
unless settings["ports"].any? { |mapping| mapping["guest"] == guest }
config.vm.network "forwarded_port", guest: guest, host: host, auto_correct: true
end
end
# Add Custom Ports From Configuration
if settings.has_key?("ports")
settings["ports"].each do |port|
config.vm.network "forwarded_port", guest: port["guest"], host: port["host"], protocol: port["protocol"], auto_correct: true
end
end
# Configure The Public Key For SSH Access
if settings.include? 'authorize'
if File.exists? File.expand_path(settings["authorize"])
config.vm.provision "shell" do |s|
s.inline = "echo $1 | grep -xq \"$1\" /home/vagrant/.ssh/authorized_keys || echo $1 | tee -a /home/vagrant/.ssh/authorized_keys"
s.args = [File.read(File.expand_path(settings["authorize"]))]
end
end
end
# Copy The SSH Private Keys To The Box
if settings.include? 'keys'
settings["keys"].each do |key|
config.vm.provision "shell" do |s|
s.privileged = false
s.inline = "echo \"$1\" > /home/vagrant/.ssh/$2 && chmod 600 /home/vagrant/.ssh/$2"
s.args = [File.read(File.expand_path(key)), key.split('/').last]
end
end
end
# Register All Of The Configured Shared Folders
if settings['folders'].kind_of?(Array)
settings["folders"].each do |folder|
config.vm.synced_folder folder["map"], folder["to"], type: folder["type"] ||= nil, mount_options: %w{nolock,vers=3,udp,noatime,actimeo=1}
end
end
# Update Composer On Every Provision
config.vm.provision "shell" do |s|
s.inline = "/usr/local/bin/composer self-update --no-progress"
end
end
end
|
module Phytosanitary
# Represent a risks
class Risk
attr_reader :code
attr_reader :group
def self.get(risk_level)
return Unknown if risk_level.blank?
new(risk_level)
end
def initialize(risk_level)
@code = risk_level.to_s
@group = Pesticide::RisksGroupAbacus.find_group_of(@code)
end
def self.risks_of(variant)
return [Unknown] unless maaid = variant.france_maaid
risks = Pesticide::Agent.find(maaid).risks
risks.map { |risk| get(risk) }
end
Unknown = Struct.new('UnknownRisk', :code, :group)
.new(nil, Phytosanitary::Group::Unknown)
end
end
Resole phytosanitary bug
module Phytosanitary
# Represent a risks
class Risk
attr_reader :code
attr_reader :group
def self.get(risk_level)
return Unknown if risk_level.blank?
new(risk_level)
end
def initialize(risk_level)
@code = risk_level.to_s
@group = Pesticide::RisksGroupAbacus.find_group_of(@code)
end
def self.risks_of(variant)
return [Unknown] unless maaid = variant&.france_maaid
risks = Pesticide::Agent.find(maaid).risks
risks.map { |risk| get(risk) }
end
Unknown = Struct.new('UnknownRisk', :code, :group)
.new(nil, Phytosanitary::Group::Unknown)
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.