CombinedText stringlengths 4 3.42M |
|---|
module VestalVersions
# Enables versioned ActiveRecord::Base instances to revert to a previously saved version.
module Reversion
def self.included(base) # :nodoc:
base.class_eval do
include InstanceMethods
end
end
# Provides the base instance methods required to revert a versioned instance.
module InstanceMethods
# Returns the current version number for the versioned object.
def version
@version ||= last_version
end
# Accepts a value corresponding to a specific version record, builds a history of changes
# between that version and the current version, and then iterates over that history updating
# the object's attributes until the it's reverted to its prior state.
#
# The single argument should adhere to one of the formats as documented in the +at+ method of
# VestalVersions::Versions.
#
# After the object is reverted to the target version, it is not saved. In order to save the
# object after the reversion, use the +revert_to!+ method.
#
# The version number of the object will reflect whatever version has been reverted to, and
# the return value of the +revert_to+ method is also the target version number.
def revert_to(value)
to_number = versions.number_at(value)
changes_between(version, to_number).each do |attribute, change|
write_attribute(attribute, change.last)
end
reset_version(to_number)
end
# Behaves similarly to the +revert_to+ method except that it automatically saves the record
# after the reversion. The return value is the success of the save.
def revert_to!(value)
revert_to(value)
reset_version if saved = save
saved
end
# Returns a boolean specifying whether the object has been reverted to a previous version or
# if the object represents the latest version in the version history.
def reverted?
version != last_version
end
private
# Returns the number of the last created version in the object's version history.
#
# If no associated versions exist, the object is considered at version 1.
def last_version
@last_version ||= versions.maximum(:number) || 1
end
# Clears the cached version number instance variables so that they can be recalculated.
# Useful after a new version is created.
def reset_version(version = nil)
@last_version = nil if version.nil?
@version = version
end
end
end
end
use (higher level) attribute writer instead of write_attribute
module VestalVersions
# Enables versioned ActiveRecord::Base instances to revert to a previously saved version.
module Reversion
def self.included(base) # :nodoc:
base.class_eval do
include InstanceMethods
end
end
# Provides the base instance methods required to revert a versioned instance.
module InstanceMethods
# Returns the current version number for the versioned object.
def version
@version ||= last_version
end
# Accepts a value corresponding to a specific version record, builds a history of changes
# between that version and the current version, and then iterates over that history updating
# the object's attributes until the it's reverted to its prior state.
#
# The single argument should adhere to one of the formats as documented in the +at+ method of
# VestalVersions::Versions.
#
# After the object is reverted to the target version, it is not saved. In order to save the
# object after the reversion, use the +revert_to!+ method.
#
# The version number of the object will reflect whatever version has been reverted to, and
# the return value of the +revert_to+ method is also the target version number.
def revert_to(value)
to_number = versions.number_at(value)
changes_between(version, to_number).each do |attribute, change|
self.send(:"#{attribute}=", change.last)
end
reset_version(to_number)
end
# Behaves similarly to the +revert_to+ method except that it automatically saves the record
# after the reversion. The return value is the success of the save.
def revert_to!(value)
revert_to(value)
reset_version if saved = save
saved
end
# Returns a boolean specifying whether the object has been reverted to a previous version or
# if the object represents the latest version in the version history.
def reverted?
version != last_version
end
private
# Returns the number of the last created version in the object's version history.
#
# If no associated versions exist, the object is considered at version 1.
def last_version
@last_version ||= versions.maximum(:number) || 1
end
# Clears the cached version number instance variables so that they can be recalculated.
# Useful after a new version is created.
def reset_version(version = nil)
@last_version = nil if version.nil?
@version = version
end
end
end
end
|
require 'stockboy/provider'
require 'stockboy/providers/imap/search_options'
require 'net/imap'
require 'mail'
module Stockboy::Providers
# Read data from a file attachment in IMAP email
#
# == Job template DSL
#
# provider :imap do
# host "imap.example.com"
# username "arthur@example.com"
# password "424242"
# mailbox "INBOX"
# subject "Daily Report"
# since Date.today
# file_name /report-[0-9]+\.csv/
# end
#
class IMAP < Stockboy::Provider
# @!group Options
# Host name or IP address for IMAP server connection
#
# @!attribute [rw] host
# @return [String]
# @example
# host "imap.example.com"
#
dsl_attr :host
# User name for connection credentials
#
# @!attribute [rw] username
# @return [String]
# @example
# username "arthur@example.com"
#
dsl_attr :username
# Password for connection credentials
#
# @!attribute [rw] password
# @return [String]
# @example
# password "424242"
#
dsl_attr :password
# Where to look for email on the server (usually "INBOX")
#
# @!attribute [rw] mailbox
# @return [String]
# @example
# mailbox "INBOX"
#
dsl_attr :mailbox
# Substring to find contained in matching email subject
#
# @!attribute [rw] subject
# @return [String]
# @example
# subject "Daily Report"
#
dsl_attr :subject
# Email address of the sender
#
# @!attribute [rw] from
# @return [String]
# @example
# from "sender+12345@example.com"
#
dsl_attr :from
# Minimum time sent for matching email
#
# @!attribute [rw] since
# @return [String]
# @example
# since Date.today
#
dsl_attr :since, alias: :newer_than
# Key-value tokens for IMAP search options
#
# @!attribute [rw] search
# @return [String]
# @example
# search ['FLAGGED', 'BODY', 'Report attached']
#
dsl_attr :search
# Name or pattern for matching attachment files. First matching attachment
# is picked, or the first attachment if not specified.
#
# @!attribute [rw] attachment
# @return [String, Regexp]
# @example
# attachment "daily-report.csv"
# attachment /daily-report-[0-9]+.csv/
#
dsl_attr :attachment, alias: :file_name
# @macro file_size_options
dsl_attr :file_smaller, alias: :smaller_than
dsl_attr :file_larger, alias: :larger_than
# Method for choosing which email message to process from potential
# matches. Default is last by date sent.
#
# @!attribute [rw] pick
# @return [Symbol, Proc]
# @example
# pick :last
# pick :first
# pick ->(list) {
# list.max_by { |msgid| client.fetch(msgid, 'SENTON').to_i }
# }
#
dsl_attr :pick
# @!endgroup
# Initialize a new IMAP reader
#
def initialize(opts={}, &block)
super(opts, &block)
@host = opts[:host]
@username = opts[:username]
@password = opts[:password]
@mailbox = opts[:mailbox]
@subject = opts[:subject]
@from = opts[:from]
@since = opts[:since]
@search = opts[:search]
@attachment = opts[:attachment]
@file_smaller = opts[:file_smaller]
@file_larger = opts[:file_larger]
@pick = opts[:pick] || :last
DSL.new(self).instance_eval(&block) if block_given?
end
def client
raise(ArgumentError, "no block given") unless block_given?
first_connection = @open_client.nil?
if first_connection
@open_client = ::Net::IMAP.new(host)
@open_client.login(username, password)
@open_client.examine(mailbox)
end
yield @open_client
rescue ::Net::IMAP::Error => e
errors.add :response, "IMAP connection error"
ensure
if first_connection
@open_client.disconnect
@open_client = nil
end
end
def delete_data
raise Stockboy::OutOfSequence, "must confirm #matching_message or calling #data" unless picked_matching_message?
logger.info "Deleting message #{username}:#{host} message_uid: #{matching_message}"
client do |imap|
imap.uid_store(matching_message, "+FLAGS", [:Deleted])
imap.expunge
end
end
def matching_message
return @matching_message if @matching_message
message_ids = search(default_search_options)
@matching_message = pick_from(message_ids) unless message_ids.empty?
end
def clear
super
@matching_message = nil
@data_time = nil
@data_size = nil
end
def search(options=nil)
client { |imap| imap.sort(['DATE'], search_keys(options), 'UTF-8') }
end
def search_keys(options=nil)
return options if options.is_a?(Array)
SearchOptions.new(options || default_search_options).to_imap
end
private
def default_search_options
{subject: subject, from: from, since: since}
end
def fetch_data
client do |imap|
return false unless matching_message
mail = ::Mail.new(imap.fetch(matching_message, 'RFC822')[0].attr['RFC822'])
if part = mail.attachments.detect { |part| validate_attachment(part) }
validate_file(part.decoded)
if valid?
logger.info "Getting file from #{username}:#{host} message_uid #{matching_message}"
@data = part.decoded
@data_time = normalize_imap_datetime(mail.date)
logger.info "Got file from #{username}:#{host} message_uid #{matching_message}"
end
end
end
!@data.nil?
end
def validate
errors.add_on_blank [:host, :username, :password]
errors.empty?
end
def picked_matching_message?
!!@matching_message
end
def validate_attachment(part)
case attachment
when String
part.filename == attachment
when Regexp
part.filename =~ attachment
else
true
end
end
# If activesupport is loaded, it mucks with DateTime#to_time to return
# self when it has a utc_offset. Handle both to always return a Time.utc.
#
def normalize_imap_datetime(datetime)
datetime.respond_to?(:getutc) ?
datetime.getutc.to_time : datetime.to_time.utc
end
def validate_file(data_file)
return errors.add :response, "No matching attachments" unless data_file
validate_file_smaller(data_file)
validate_file_larger(data_file)
end
def validate_file_smaller(data_file)
@data_size ||= data_file.bytesize
if file_smaller && @data_size > file_smaller
errors.add :response, "File size larger than #{file_smaller}"
end
end
def validate_file_larger(data_file)
@data_size ||= data_file.bytesize
if file_larger && @data_size < file_larger
errors.add :response, "File size smaller than #{file_larger}"
end
end
end
end
Allow IMAP search directly with a string
require 'stockboy/provider'
require 'stockboy/providers/imap/search_options'
require 'net/imap'
require 'mail'
module Stockboy::Providers
# Read data from a file attachment in IMAP email
#
# == Job template DSL
#
# provider :imap do
# host "imap.example.com"
# username "arthur@example.com"
# password "424242"
# mailbox "INBOX"
# subject "Daily Report"
# since Date.today
# file_name /report-[0-9]+\.csv/
# end
#
class IMAP < Stockboy::Provider
# @!group Options
# Host name or IP address for IMAP server connection
#
# @!attribute [rw] host
# @return [String]
# @example
# host "imap.example.com"
#
dsl_attr :host
# User name for connection credentials
#
# @!attribute [rw] username
# @return [String]
# @example
# username "arthur@example.com"
#
dsl_attr :username
# Password for connection credentials
#
# @!attribute [rw] password
# @return [String]
# @example
# password "424242"
#
dsl_attr :password
# Where to look for email on the server (usually "INBOX")
#
# @!attribute [rw] mailbox
# @return [String]
# @example
# mailbox "INBOX"
#
dsl_attr :mailbox
# Substring to find contained in matching email subject
#
# @!attribute [rw] subject
# @return [String]
# @example
# subject "Daily Report"
#
dsl_attr :subject
# Email address of the sender
#
# @!attribute [rw] from
# @return [String]
# @example
# from "sender+12345@example.com"
#
dsl_attr :from
# Minimum time sent for matching email
#
# @!attribute [rw] since
# @return [String]
# @example
# since Date.today
#
dsl_attr :since, alias: :newer_than
# Key-value tokens for IMAP search options
#
# @!attribute [rw] search
# @return [String]
# @example
# search ['FLAGGED', 'BODY', 'Report attached']
#
dsl_attr :search
# Name or pattern for matching attachment files. First matching attachment
# is picked, or the first attachment if not specified.
#
# @!attribute [rw] attachment
# @return [String, Regexp]
# @example
# attachment "daily-report.csv"
# attachment /daily-report-[0-9]+.csv/
#
dsl_attr :attachment, alias: :file_name
# @macro file_size_options
dsl_attr :file_smaller, alias: :smaller_than
dsl_attr :file_larger, alias: :larger_than
# Method for choosing which email message to process from potential
# matches. Default is last by date sent.
#
# @!attribute [rw] pick
# @return [Symbol, Proc]
# @example
# pick :last
# pick :first
# pick ->(list) {
# list.max_by { |msgid| client.fetch(msgid, 'SENTON').to_i }
# }
#
dsl_attr :pick
# @!endgroup
# Initialize a new IMAP reader
#
def initialize(opts={}, &block)
super(opts, &block)
@host = opts[:host]
@username = opts[:username]
@password = opts[:password]
@mailbox = opts[:mailbox]
@subject = opts[:subject]
@from = opts[:from]
@since = opts[:since]
@search = opts[:search]
@attachment = opts[:attachment]
@file_smaller = opts[:file_smaller]
@file_larger = opts[:file_larger]
@pick = opts[:pick] || :last
DSL.new(self).instance_eval(&block) if block_given?
end
def client
raise(ArgumentError, "no block given") unless block_given?
first_connection = @open_client.nil?
if first_connection
@open_client = ::Net::IMAP.new(host)
@open_client.login(username, password)
@open_client.examine(mailbox)
end
yield @open_client
rescue ::Net::IMAP::Error => e
errors.add :response, "IMAP connection error"
ensure
if first_connection
@open_client.disconnect
@open_client = nil
end
end
def delete_data
raise Stockboy::OutOfSequence, "must confirm #matching_message or calling #data" unless picked_matching_message?
logger.info "Deleting message #{username}:#{host} message_uid: #{matching_message}"
client do |imap|
imap.uid_store(matching_message, "+FLAGS", [:Deleted])
imap.expunge
end
end
def matching_message
return @matching_message if @matching_message
message_ids = search(default_search_options)
@matching_message = pick_from(message_ids) unless message_ids.empty?
end
def clear
super
@matching_message = nil
@data_time = nil
@data_size = nil
end
def search(options=nil)
client { |imap| imap.sort(['DATE'], search_keys(options), 'UTF-8') }
end
def search_keys(options=nil)
case options
when Array, String then options
else SearchOptions.new(options || default_search_options).to_imap
end
end
private
def default_search_options
{subject: subject, from: from, since: since}
end
def fetch_data
client do |imap|
return false unless matching_message
mail = ::Mail.new(imap.fetch(matching_message, 'RFC822')[0].attr['RFC822'])
if part = mail.attachments.detect { |part| validate_attachment(part) }
validate_file(part.decoded)
if valid?
logger.info "Getting file from #{username}:#{host} message_uid #{matching_message}"
@data = part.decoded
@data_time = normalize_imap_datetime(mail.date)
logger.info "Got file from #{username}:#{host} message_uid #{matching_message}"
end
end
end
!@data.nil?
end
def validate
errors.add_on_blank [:host, :username, :password]
errors.empty?
end
def picked_matching_message?
!!@matching_message
end
def validate_attachment(part)
case attachment
when String
part.filename == attachment
when Regexp
part.filename =~ attachment
else
true
end
end
# If activesupport is loaded, it mucks with DateTime#to_time to return
# self when it has a utc_offset. Handle both to always return a Time.utc.
#
def normalize_imap_datetime(datetime)
datetime.respond_to?(:getutc) ?
datetime.getutc.to_time : datetime.to_time.utc
end
def validate_file(data_file)
return errors.add :response, "No matching attachments" unless data_file
validate_file_smaller(data_file)
validate_file_larger(data_file)
end
def validate_file_smaller(data_file)
@data_size ||= data_file.bytesize
if file_smaller && @data_size > file_smaller
errors.add :response, "File size larger than #{file_smaller}"
end
end
def validate_file_larger(data_file)
@data_size ||= data_file.bytesize
if file_larger && @data_size < file_larger
errors.add :response, "File size smaller than #{file_larger}"
end
end
end
end
|
module Vic
class Colorscheme::Highlight
include Vic::Color
attr_accessor :group
# Creates an instance of Vic::Colorscheme::Highlight. Uses
# `update_arguments!` to set the arguments.
#
# @param [String] group the group name, 'Normal', 'Function', etc.
# @param [Hash] args the arguments to set
# @return [Vic::Colorscheme::Highlight] the new highlight
def initialize(group, args={})
@group = group
update_arguments!(args)
end
# Sets the methods term, term=, start, start=, etc. for settings arguments.
self.class_eval do
%w{term start stop cterm ctermfg ctermbg gui guifg guibg}.each do |m|
# Getter method
define_method(m) do
arg = argument_set.find_by_key(m)
return arg.val if arg
end
# Setter method
define_method("#{m}=") do |val|
arg = argument_set.find_by_key(m)
if arg
arg.val = val
else
arg = Argument.new(m, val)
argument_set.add arg
end
# Return self for chaining
self
end
end
end
# Sets guifg and ctermfg simultaneously. `hex` is automatically converted to
# the 256 color code for ctermfg.
#
# @param [String] hex a hexidecimal color
def fg=(hex)
self.guifg = hex
self.ctermfg = Color.hex_to_256(hex)
# Return self for chaining
self
end
# Sets guibg and ctermbg simultaneously. `hex` is automatically converted to
# the 256 color code for ctermbg.
def bg=(hex)
self.guibg = hex
self.ctermbg = Color.hex_to_256(hex)
# Return self for chaining
self
end
# Updates/sets the current highlight's arguments.
#
# @param [Hash] args the arguments to update/set, `:guibg => '#333333'`
# @return [Vic::Colorscheme::Highlight::ArgumentSet] the updated argument set
def update_arguments!(args={})
args.each {|key, val| send("#{key}=", val)}
arguments
end
# Returns the set of arguments for the given highlight
#
# @return [Vic::Colorscheme::Highlight::ArgumentSet] the argument set
def argument_set
@argument_set ||= ArgumentSet.new
end
alias_method :arguments, :argument_set
# Writes the highlight contents.
#
# @return [String] the highlight as a string
def write
"hi #{group} #{arguments.sort_by_key.map(&:write).compact.join(' ')}"
end
end
end
convert group to symbol in constructor
module Vic
class Colorscheme::Highlight
include Vic::Color
attr_accessor :group
# Creates an instance of Vic::Colorscheme::Highlight. Uses
# `update_arguments!` to set the arguments.
#
# @param [String] group the group name, 'Normal', 'Function', etc.
# @param [Hash] args the arguments to set
# @return [Vic::Colorscheme::Highlight] the new highlight
def initialize(group, args={})
# Convert to group name to symbol to ensure consistency
@group = group.to_sym
update_arguments!(args)
end
# Sets the methods term, term=, start, start=, etc. for settings arguments.
self.class_eval do
%w{term start stop cterm ctermfg ctermbg gui guifg guibg}.each do |m|
# Getter method
define_method(m) do
arg = argument_set.find_by_key(m)
return arg.val if arg
end
# Setter method
define_method("#{m}=") do |val|
arg = argument_set.find_by_key(m)
if arg
arg.val = val
else
arg = Argument.new(m, val)
argument_set.add arg
end
# Return self for chaining
self
end
end
end
# Sets guifg and ctermfg simultaneously. `hex` is automatically converted to
# the 256 color code for ctermfg.
#
# @param [String] hex a hexidecimal color
def fg=(hex)
self.guifg = hex
self.ctermfg = Color.hex_to_256(hex)
# Return self for chaining
self
end
# Sets guibg and ctermbg simultaneously. `hex` is automatically converted to
# the 256 color code for ctermbg.
def bg=(hex)
self.guibg = hex
self.ctermbg = Color.hex_to_256(hex)
# Return self for chaining
self
end
# Updates/sets the current highlight's arguments.
#
# @param [Hash] args the arguments to update/set, `:guibg => '#333333'`
# @return [Vic::Colorscheme::Highlight::ArgumentSet] the updated argument set
def update_arguments!(args={})
args.each {|key, val| send("#{key}=", val)}
arguments
end
# Returns the set of arguments for the given highlight
#
# @return [Vic::Colorscheme::Highlight::ArgumentSet] the argument set
def argument_set
@argument_set ||= ArgumentSet.new
end
alias_method :arguments, :argument_set
# Writes the highlight contents.
#
# @return [String] the highlight as a string
def write
"hi #{group} #{arguments.sort_by_key.map(&:write).compact.join(' ')}"
end
end
end
|
module StraightServer
# This module contains common features of Gateway, later to be included
# in one of the classes below.
module GatewayModule
# Temporary fix for straight server benchmarking
@@redis = StraightServer::Config.redis[:connection] if StraightServer::Config.redis
@@websockets = {}
def fetch_transactions_for(address)
try_adapters(@blockchain_adapters) { |b| b.fetch_transactions_for(address) }
end
class InvalidSignature < Exception; end
class InvalidOrderId < Exception; end
class CallbackUrlBadResponse < Exception; end
class WebsocketExists < Exception; end
class WebsocketForCompletedOrder < Exception; end
class GatewayInactive < Exception; end
class NoBlockchainAdapters < Exception
def message
"No blockchain adapters were found! StraightServer cannot query the blockchain.\n" +
"Check your ~/.straight/config.yml file and make sure valid blockchain adapters\n" +
"are present."
end
end
class NoWebsocketsForNewGateway < Exception
def message
"You're trying to get access to websockets on a Gateway that hasn't been saved yet"
end
end
class OrderCountersDisabled < Exception
def message
"Please enable order counting in config file! You can do is using the following option:\n\n" +
" count_orders: true\n\n" +
"and don't forget to provide Redis connection info by adding this to the config file as well:\n\n" +
" redis:\n" +
" host: localhost\n" +
" port: 6379\n" +
" db: null\n"
end
end
CALLBACK_URL_ATTEMPT_TIMEFRAME = 3600 # seconds
############# Initializers methods ########################################################
# We have separate methods, because with GatewayOnDB they are called from #after_initialize
# but in GatewayOnConfig they are called from #initialize intself.
# #########################################################################################
#
def initialize_exchange_rate_adapters
@exchange_rate_adapters ||= []
if self.exchange_rate_adapter_names
self.exchange_rate_adapter_names.each do |adapter|
begin
@exchange_rate_adapters << Straight::ExchangeRate.const_get("#{adapter}Adapter").new
rescue NameError => e
raise NameError, "No such adapter exists: Straight::ExchangeRate::#{adapter}Adapter"
end
end
end
end
def initialize_blockchain_adapters
@blockchain_adapters = []
StraightServer::Config.blockchain_adapters.each do |a|
adapter = begin
Straight::Blockchain.const_get("#{a}Adapter")
rescue NameError
begin
Kernel.const_get(a)
rescue NameError
StraightServer.logger.warn("No blockchain adapter with the name #{a} was found!")
nil
end
end
@blockchain_adapters << adapter.mainnet_adapter if adapter
end
raise NoBlockchainAdapters if @blockchain_adapters.empty?
end
def initialize_callbacks
# When the status of an order changes, we send an http request to the callback_url
# and also notify a websocket client (if present, of course).
@order_callbacks = [
lambda do |order|
StraightServer::Thread.new do
send_callback_http_request order
send_order_to_websocket_client order
end
end
]
end
def initialize_status_check_schedule
@status_check_schedule = Straight::GatewayModule::DEFAULT_STATUS_CHECK_SCHEDULE
end
#
############# END OF Initializers methods ##################################################
# Creates a new order and saves into the DB. Checks if the MD5 hash
# is correct first.
def create_order(attrs={})
raise GatewayInactive unless self.active
StraightServer.logger.info "Creating new order with attrs: #{attrs}"
signature = attrs.delete(:signature)
if !check_signature || sign_with_secret(attrs[:id]) == signature
raise InvalidOrderId if check_signature && (attrs[:id].nil? || attrs[:id].to_i <= 0)
order = order_for_keychain_id(
amount: attrs[:amount],
keychain_id: increment_last_keychain_id!,
currency: attrs[:currency],
btc_denomination: attrs[:btc_denomination]
)
order.id = attrs[:id].to_i if attrs[:id]
order.data = attrs[:data] if attrs[:data]
order.gateway = self
order.save
self.save
StraightServer.logger.info "Order #{order.id} created: #{order.to_h}"
order
else
StraightServer.logger.warn "Invalid signature, cannot create an order for gateway (#{id})"
raise InvalidSignature
end
end
# Used to track the current keychain_id number, which is used by
# Straight::Gateway to generate addresses from the pubkey. The number is supposed
# to be incremented by 1. In the case of a Config file type of Gateway, the value
# is stored in a file in the .straight directory.
def increment_last_keychain_id!
self.last_keychain_id += 1
self.save
self.last_keychain_id
end
def add_websocket_for_order(ws, order)
raise WebsocketExists unless websockets[order.id].nil?
raise WebsocketForCompletedOrder unless order.status < 2
StraightServer.logger.info "Opening ws connection for #{order.id}"
ws.on(:close) do |event|
websockets.delete(order.id)
StraightServer.logger.info "Closing ws connection for #{order.id}"
end
websockets[order.id] = ws
ws
end
def websockets
raise NoWebsocketsForNewGateway unless self.id
@@websockets[self.id]
end
def send_order_to_websocket_client(order)
if ws = websockets[order.id]
ws.send(order.to_json)
ws.close
end
end
def sign_with_secret(content, level: 1)
result = content.to_s
level.times do
result = OpenSSL::HMAC.digest('sha256', secret, result).unpack("H*").first
end
result
end
def order_status_changed(order)
statuses = Order::STATUSES.invert
if StraightServer::Config.count_orders
increment_order_counter!(statuses[order.old_status], -1) if order.old_status
increment_order_counter!(statuses[order.status])
end
super
end
def order_counters(reload: false)
return @order_counters if @order_counters && !reload
@order_counters = {
new: get_order_counter(:new),
unconfirmed: get_order_counter(:unconfirmed),
paid: get_order_counter(:paid),
underpaid: get_order_counter(:underpaid),
overpaid: get_order_counter(:overpaid),
expired: get_order_counter(:expired)
}
end
def get_order_counter(counter_name)
raise OrderCountersDisabled unless StraightServer::Config.count_orders
@@redis.get("#{StraightServer::Config.redis[:prefix]}:gateway_#{id}:#{counter_name}_orders_counter").to_i || 0
end
def increment_order_counter!(counter_name, by=1)
raise OrderCountersDisabled unless StraightServer::Config.count_orders
@@redis.incrby("#{StraightServer::Config.redis[:prefix]}:gateway_#{id}:#{counter_name}_orders_counter", by)
end
private
# Tries to send a callback HTTP request to the resource specified
# in the #callback_url. If it fails for any reason, it keeps trying for an hour (3600 seconds)
# making 10 http requests, each delayed by twice the time the previous one was delayed.
# This method is supposed to be running in a separate thread.
def send_callback_http_request(order, delay: 5)
return if callback_url.nil?
StraightServer.logger.info "Attempting to send request to the callback url for order #{order.id} to #{callback_url}..."
# Composing the request uri here
signature = self.check_signature ? "&signature=#{sign_with_secret(order.id, level: 2)}" : ''
data = order.data ? "&data=#{order.data}" : ''
uri = URI.parse(callback_url + '?' + order.to_http_params + signature + data)
begin
response = Net::HTTP.get_response(uri)
order.callback_response = { code: response.code, body: response.body }
order.save
raise CallbackUrlBadResponse unless response.code.to_i == 200
rescue Exception => e
if delay < CALLBACK_URL_ATTEMPT_TIMEFRAME
sleep(delay)
send_callback_http_request(order, delay: delay*2)
else
StraightServer.logger.warn "Callback request for order #{order.id} failed, see order's #callback_response field for details"
end
end
StraightServer.logger.info "Callback request for order #{order.id} performed successfully"
end
end
# Uses database to load and save attributes
class GatewayOnDB < Sequel::Model(:gateways)
include Straight::GatewayModule
include GatewayModule
plugin :timestamps, create: :created_at, update: :updated_at
plugin :serialization, :marshal, :exchange_rate_adapter_names
plugin :serialization, :marshal
plugin :after_initialize
def self.find_by_hashed_id(s)
self.where(hashed_id: s).first
end
def before_create
super
encrypt_secret
end
def after_create
@@websockets[self.id] = {}
update(hashed_id: OpenSSL::HMAC.digest('sha256', Config.server_secret, self.id.to_s).unpack("H*").first)
end
def after_initialize
@status_check_schedule = Straight::GatewayModule::DEFAULT_STATUS_CHECK_SCHEDULE
@@websockets[self.id] ||= {} if self.id
initialize_callbacks
initialize_exchange_rate_adapters
initialize_blockchain_adapters
initialize_status_check_schedule
end
# We cannot allow to store gateway secret in a DB plaintext, this would be completetly unsecure.
# Althougth we use symmetrical encryption here and store the encryption key in the
# server's in a special file (~/.straight/server_secret), which in turn can also be stolen,
# this is still marginally better than doing nothing.
#
# Also, server admnistrators now have the freedom of developing their own strategy
# of storing that secret - it doesn't have to be stored on the same machine.
def secret
decrypt_secret
end
def self.find_by_id(id)
self[id]
end
private
def encrypt_secret
cipher = OpenSSL::Cipher::AES.new(128, :CBC)
cipher.encrypt
cipher.key = OpenSSL::HMAC.digest('sha256', 'nonce', Config.server_secret).unpack("H*").first[0,16]
cipher.iv = iv = OpenSSL::HMAC.digest('sha256', 'nonce', "#{self.id}#{Config.server_secret}").unpack("H*").first[0,16]
encrypted = cipher.update(self[:secret]) << cipher.final()
base64_encrypted = Base64.strict_encode64(encrypted).encode('utf-8')
result = "#{iv}:#{base64_encrypted}"
# Check whether we can decrypt. It should not be possible to encrypt the
# gateway secret unless we are sure we can decrypt it.
if decrypt_secret(result) == self[:secret]
self.secret = result
else
raise "Decrypted and original secrets don't match! Cannot proceed with writing the encrypted gateway secret."
end
end
def decrypt_secret(encrypted_field=self[:secret])
decipher = OpenSSL::Cipher::AES.new(128, :CBC)
iv, encrypted = encrypted_field.split(':')
decipher.decrypt
decipher.key = OpenSSL::HMAC.digest('sha256', 'nonce', Config.server_secret).unpack("H*").first[0,16]
decipher.iv = iv
decipher.update(Base64.decode64(encrypted)) + decipher.final
end
end
# Uses a config file to load attributes and a special _last_keychain_id file
# to store last_keychain_id
class GatewayOnConfig
include Straight::GatewayModule
include GatewayModule
# This is the key that allows users (those, who use the gateway,
# online stores, for instance) to connect and create orders.
# It is not used directly, but is mixed with all the params being sent
# and a MD5 hash is calculted. Then the gateway checks whether the
# MD5 hash is correct.
attr_accessor :secret
# This is used to generate the next address to accept payments
attr_accessor :last_keychain_id
# If set to false, doesn't require an unique id of the order along with
# the signed md5 hash of that id + secret to be passed into the #create_order method.
attr_accessor :check_signature
# A url to which the gateway will send an HTTP request with the status of the order data
# (in JSON) when the status of the order is changed. The response should always be 200,
# otherwise the gateway will awesome something went wrong and will keep trying to send requests
# to this url according to a specific shedule.
attr_accessor :callback_url
# This will be assigned the number that is the order in which this gateway follows in
# the config file.
attr_accessor :id
attr_accessor :exchange_rate_adapter_names
attr_accessor :orders_expiration_period
# This affects whether it is possible to create a new order with the gateway.
# If it's set to false, then it won't be possible to create a new order, but
# it will keep checking on the existing ones.
attr_accessor :active
def self.find_by_hashed_id(s)
self.find_by_id(s)
end
def initialize
initialize_callbacks
initialize_exchange_rate_adapters
initialize_blockchain_adapters
initialize_status_check_schedule
end
# Because this is a config based gateway, we only save last_keychain_id
# and nothing more.
def save
save_last_keychain_id!
end
# Loads last_keychain_id from a file in the .straight dir.
# If the file doesn't exist, we create it. Later, whenever an attribute is updated,
# we save it to the file.
def load_last_keychain_id!
@last_keychain_id_file ||= StraightServer::Initializer::ConfigDir.path + "/#{name}_last_keychain_id"
if File.exists?(@last_keychain_id_file)
self.last_keychain_id = File.read(@last_keychain_id_file).to_i
else
self.last_keychain_id = 0
save
end
end
def save_last_keychain_id!
@last_keychain_id_file ||= StraightServer::Initializer::ConfigDir.path + "/#{name}_last_keychain_id"
File.open(@last_keychain_id_file, 'w') {|f| f.write(last_keychain_id) }
end
# This method is a replacement for the Sequel's model one used in DB version of the gateway
# and it finds gateways using the index of @@gateways Array.
def self.find_by_id(id)
@@gateways[id.to_i-1]
end
# This will later be used in the #find_by_id. Because we don't use a DB,
# the id will actually be the index of an element in this Array. Thus,
# the order in which gateways follow in the config file is important.
@@gateways = []
# Create instances of Gateway by reading attributes from Config
i = 0
StraightServer::Config.gateways.each do |name, attrs|
i += 1
gateway = self.new
gateway.pubkey = attrs['pubkey']
gateway.confirmations_required = attrs['confirmations_required'].to_i
gateway.order_class = attrs['order_class']
gateway.secret = attrs['secret']
gateway.check_signature = attrs['check_signature']
gateway.callback_url = attrs['callback_url']
gateway.default_currency = attrs['default_currency']
gateway.orders_expiration_period = attrs['orders_expiration_period']
gateway.active = attrs['active']
gateway.name = name
gateway.id = i
gateway.exchange_rate_adapter_names = attrs['exchange_rate_adapters']
gateway.initialize_exchange_rate_adapters
gateway.load_last_keychain_id!
@@websockets[i] = {}
@@gateways << gateway
end if StraightServer::Config.gateways
end
# It may not be a perfect way to implement such a thing, but it gives enough flexibility to people
# so they can simply start using a single gateway on their machines, a gateway which attributes are defined
# in a config file instead of a DB. That way they don't need special tools to access the DB and create
# a gateway, but can simply edit the config file.
Gateway = if StraightServer::Config.gateways_source == 'config'
GatewayOnConfig
else
GatewayOnDB
end
end
Fix: replace StraightServer logger with puts when reporting non-existent blockchain adapter
module StraightServer
# This module contains common features of Gateway, later to be included
# in one of the classes below.
module GatewayModule
# Temporary fix for straight server benchmarking
@@redis = StraightServer::Config.redis[:connection] if StraightServer::Config.redis
@@websockets = {}
def fetch_transactions_for(address)
try_adapters(@blockchain_adapters) { |b| b.fetch_transactions_for(address) }
end
class InvalidSignature < Exception; end
class InvalidOrderId < Exception; end
class CallbackUrlBadResponse < Exception; end
class WebsocketExists < Exception; end
class WebsocketForCompletedOrder < Exception; end
class GatewayInactive < Exception; end
class NoBlockchainAdapters < Exception
def message
"No blockchain adapters were found! StraightServer cannot query the blockchain.\n" +
"Check your ~/.straight/config.yml file and make sure valid blockchain adapters\n" +
"are present."
end
end
class NoWebsocketsForNewGateway < Exception
def message
"You're trying to get access to websockets on a Gateway that hasn't been saved yet"
end
end
class OrderCountersDisabled < Exception
def message
"Please enable order counting in config file! You can do is using the following option:\n\n" +
" count_orders: true\n\n" +
"and don't forget to provide Redis connection info by adding this to the config file as well:\n\n" +
" redis:\n" +
" host: localhost\n" +
" port: 6379\n" +
" db: null\n"
end
end
CALLBACK_URL_ATTEMPT_TIMEFRAME = 3600 # seconds
############# Initializers methods ########################################################
# We have separate methods, because with GatewayOnDB they are called from #after_initialize
# but in GatewayOnConfig they are called from #initialize intself.
# #########################################################################################
#
def initialize_exchange_rate_adapters
@exchange_rate_adapters ||= []
if self.exchange_rate_adapter_names
self.exchange_rate_adapter_names.each do |adapter|
begin
@exchange_rate_adapters << Straight::ExchangeRate.const_get("#{adapter}Adapter").new
rescue NameError => e
raise NameError, "No such adapter exists: Straight::ExchangeRate::#{adapter}Adapter"
end
end
end
end
def initialize_blockchain_adapters
@blockchain_adapters = []
StraightServer::Config.blockchain_adapters.each do |a|
adapter = begin
Straight::Blockchain.const_get("#{a}Adapter")
rescue NameError
begin
Kernel.const_get(a)
rescue NameError
puts "WARNING: No blockchain adapter with the name #{a} was found!"
nil
end
end
@blockchain_adapters << adapter.mainnet_adapter if adapter
end
raise NoBlockchainAdapters if @blockchain_adapters.empty?
end
def initialize_callbacks
# When the status of an order changes, we send an http request to the callback_url
# and also notify a websocket client (if present, of course).
@order_callbacks = [
lambda do |order|
StraightServer::Thread.new do
send_callback_http_request order
send_order_to_websocket_client order
end
end
]
end
def initialize_status_check_schedule
@status_check_schedule = Straight::GatewayModule::DEFAULT_STATUS_CHECK_SCHEDULE
end
#
############# END OF Initializers methods ##################################################
# Creates a new order and saves into the DB. Checks if the MD5 hash
# is correct first.
def create_order(attrs={})
raise GatewayInactive unless self.active
StraightServer.logger.info "Creating new order with attrs: #{attrs}"
signature = attrs.delete(:signature)
if !check_signature || sign_with_secret(attrs[:id]) == signature
raise InvalidOrderId if check_signature && (attrs[:id].nil? || attrs[:id].to_i <= 0)
order = order_for_keychain_id(
amount: attrs[:amount],
keychain_id: increment_last_keychain_id!,
currency: attrs[:currency],
btc_denomination: attrs[:btc_denomination]
)
order.id = attrs[:id].to_i if attrs[:id]
order.data = attrs[:data] if attrs[:data]
order.gateway = self
order.save
self.save
StraightServer.logger.info "Order #{order.id} created: #{order.to_h}"
order
else
StraightServer.logger.warn "Invalid signature, cannot create an order for gateway (#{id})"
raise InvalidSignature
end
end
# Used to track the current keychain_id number, which is used by
# Straight::Gateway to generate addresses from the pubkey. The number is supposed
# to be incremented by 1. In the case of a Config file type of Gateway, the value
# is stored in a file in the .straight directory.
def increment_last_keychain_id!
self.last_keychain_id += 1
self.save
self.last_keychain_id
end
def add_websocket_for_order(ws, order)
raise WebsocketExists unless websockets[order.id].nil?
raise WebsocketForCompletedOrder unless order.status < 2
StraightServer.logger.info "Opening ws connection for #{order.id}"
ws.on(:close) do |event|
websockets.delete(order.id)
StraightServer.logger.info "Closing ws connection for #{order.id}"
end
websockets[order.id] = ws
ws
end
def websockets
raise NoWebsocketsForNewGateway unless self.id
@@websockets[self.id]
end
def send_order_to_websocket_client(order)
if ws = websockets[order.id]
ws.send(order.to_json)
ws.close
end
end
def sign_with_secret(content, level: 1)
result = content.to_s
level.times do
result = OpenSSL::HMAC.digest('sha256', secret, result).unpack("H*").first
end
result
end
def order_status_changed(order)
statuses = Order::STATUSES.invert
if StraightServer::Config.count_orders
increment_order_counter!(statuses[order.old_status], -1) if order.old_status
increment_order_counter!(statuses[order.status])
end
super
end
def order_counters(reload: false)
return @order_counters if @order_counters && !reload
@order_counters = {
new: get_order_counter(:new),
unconfirmed: get_order_counter(:unconfirmed),
paid: get_order_counter(:paid),
underpaid: get_order_counter(:underpaid),
overpaid: get_order_counter(:overpaid),
expired: get_order_counter(:expired)
}
end
def get_order_counter(counter_name)
raise OrderCountersDisabled unless StraightServer::Config.count_orders
@@redis.get("#{StraightServer::Config.redis[:prefix]}:gateway_#{id}:#{counter_name}_orders_counter").to_i || 0
end
def increment_order_counter!(counter_name, by=1)
raise OrderCountersDisabled unless StraightServer::Config.count_orders
@@redis.incrby("#{StraightServer::Config.redis[:prefix]}:gateway_#{id}:#{counter_name}_orders_counter", by)
end
private
# Tries to send a callback HTTP request to the resource specified
# in the #callback_url. If it fails for any reason, it keeps trying for an hour (3600 seconds)
# making 10 http requests, each delayed by twice the time the previous one was delayed.
# This method is supposed to be running in a separate thread.
def send_callback_http_request(order, delay: 5)
return if callback_url.nil?
StraightServer.logger.info "Attempting to send request to the callback url for order #{order.id} to #{callback_url}..."
# Composing the request uri here
signature = self.check_signature ? "&signature=#{sign_with_secret(order.id, level: 2)}" : ''
data = order.data ? "&data=#{order.data}" : ''
uri = URI.parse(callback_url + '?' + order.to_http_params + signature + data)
begin
response = Net::HTTP.get_response(uri)
order.callback_response = { code: response.code, body: response.body }
order.save
raise CallbackUrlBadResponse unless response.code.to_i == 200
rescue Exception => e
if delay < CALLBACK_URL_ATTEMPT_TIMEFRAME
sleep(delay)
send_callback_http_request(order, delay: delay*2)
else
StraightServer.logger.warn "Callback request for order #{order.id} failed, see order's #callback_response field for details"
end
end
StraightServer.logger.info "Callback request for order #{order.id} performed successfully"
end
end
# Uses database to load and save attributes
class GatewayOnDB < Sequel::Model(:gateways)
include Straight::GatewayModule
include GatewayModule
plugin :timestamps, create: :created_at, update: :updated_at
plugin :serialization, :marshal, :exchange_rate_adapter_names
plugin :serialization, :marshal
plugin :after_initialize
def self.find_by_hashed_id(s)
self.where(hashed_id: s).first
end
def before_create
super
encrypt_secret
end
def after_create
@@websockets[self.id] = {}
update(hashed_id: OpenSSL::HMAC.digest('sha256', Config.server_secret, self.id.to_s).unpack("H*").first)
end
def after_initialize
@status_check_schedule = Straight::GatewayModule::DEFAULT_STATUS_CHECK_SCHEDULE
@@websockets[self.id] ||= {} if self.id
initialize_callbacks
initialize_exchange_rate_adapters
initialize_blockchain_adapters
initialize_status_check_schedule
end
# We cannot allow to store gateway secret in a DB plaintext, this would be completetly unsecure.
# Althougth we use symmetrical encryption here and store the encryption key in the
# server's in a special file (~/.straight/server_secret), which in turn can also be stolen,
# this is still marginally better than doing nothing.
#
# Also, server admnistrators now have the freedom of developing their own strategy
# of storing that secret - it doesn't have to be stored on the same machine.
def secret
decrypt_secret
end
def self.find_by_id(id)
self[id]
end
private
def encrypt_secret
cipher = OpenSSL::Cipher::AES.new(128, :CBC)
cipher.encrypt
cipher.key = OpenSSL::HMAC.digest('sha256', 'nonce', Config.server_secret).unpack("H*").first[0,16]
cipher.iv = iv = OpenSSL::HMAC.digest('sha256', 'nonce', "#{self.id}#{Config.server_secret}").unpack("H*").first[0,16]
encrypted = cipher.update(self[:secret]) << cipher.final()
base64_encrypted = Base64.strict_encode64(encrypted).encode('utf-8')
result = "#{iv}:#{base64_encrypted}"
# Check whether we can decrypt. It should not be possible to encrypt the
# gateway secret unless we are sure we can decrypt it.
if decrypt_secret(result) == self[:secret]
self.secret = result
else
raise "Decrypted and original secrets don't match! Cannot proceed with writing the encrypted gateway secret."
end
end
def decrypt_secret(encrypted_field=self[:secret])
decipher = OpenSSL::Cipher::AES.new(128, :CBC)
iv, encrypted = encrypted_field.split(':')
decipher.decrypt
decipher.key = OpenSSL::HMAC.digest('sha256', 'nonce', Config.server_secret).unpack("H*").first[0,16]
decipher.iv = iv
decipher.update(Base64.decode64(encrypted)) + decipher.final
end
end
# Uses a config file to load attributes and a special _last_keychain_id file
# to store last_keychain_id
class GatewayOnConfig
include Straight::GatewayModule
include GatewayModule
# This is the key that allows users (those, who use the gateway,
# online stores, for instance) to connect and create orders.
# It is not used directly, but is mixed with all the params being sent
# and a MD5 hash is calculted. Then the gateway checks whether the
# MD5 hash is correct.
attr_accessor :secret
# This is used to generate the next address to accept payments
attr_accessor :last_keychain_id
# If set to false, doesn't require an unique id of the order along with
# the signed md5 hash of that id + secret to be passed into the #create_order method.
attr_accessor :check_signature
# A url to which the gateway will send an HTTP request with the status of the order data
# (in JSON) when the status of the order is changed. The response should always be 200,
# otherwise the gateway will awesome something went wrong and will keep trying to send requests
# to this url according to a specific shedule.
attr_accessor :callback_url
# This will be assigned the number that is the order in which this gateway follows in
# the config file.
attr_accessor :id
attr_accessor :exchange_rate_adapter_names
attr_accessor :orders_expiration_period
# This affects whether it is possible to create a new order with the gateway.
# If it's set to false, then it won't be possible to create a new order, but
# it will keep checking on the existing ones.
attr_accessor :active
def self.find_by_hashed_id(s)
self.find_by_id(s)
end
def initialize
initialize_callbacks
initialize_exchange_rate_adapters
initialize_blockchain_adapters
initialize_status_check_schedule
end
# Because this is a config based gateway, we only save last_keychain_id
# and nothing more.
def save
save_last_keychain_id!
end
# Loads last_keychain_id from a file in the .straight dir.
# If the file doesn't exist, we create it. Later, whenever an attribute is updated,
# we save it to the file.
def load_last_keychain_id!
@last_keychain_id_file ||= StraightServer::Initializer::ConfigDir.path + "/#{name}_last_keychain_id"
if File.exists?(@last_keychain_id_file)
self.last_keychain_id = File.read(@last_keychain_id_file).to_i
else
self.last_keychain_id = 0
save
end
end
def save_last_keychain_id!
@last_keychain_id_file ||= StraightServer::Initializer::ConfigDir.path + "/#{name}_last_keychain_id"
File.open(@last_keychain_id_file, 'w') {|f| f.write(last_keychain_id) }
end
# This method is a replacement for the Sequel's model one used in DB version of the gateway
# and it finds gateways using the index of @@gateways Array.
def self.find_by_id(id)
@@gateways[id.to_i-1]
end
# This will later be used in the #find_by_id. Because we don't use a DB,
# the id will actually be the index of an element in this Array. Thus,
# the order in which gateways follow in the config file is important.
@@gateways = []
# Create instances of Gateway by reading attributes from Config
i = 0
StraightServer::Config.gateways.each do |name, attrs|
i += 1
gateway = self.new
gateway.pubkey = attrs['pubkey']
gateway.confirmations_required = attrs['confirmations_required'].to_i
gateway.order_class = attrs['order_class']
gateway.secret = attrs['secret']
gateway.check_signature = attrs['check_signature']
gateway.callback_url = attrs['callback_url']
gateway.default_currency = attrs['default_currency']
gateway.orders_expiration_period = attrs['orders_expiration_period']
gateway.active = attrs['active']
gateway.name = name
gateway.id = i
gateway.exchange_rate_adapter_names = attrs['exchange_rate_adapters']
gateway.initialize_exchange_rate_adapters
gateway.load_last_keychain_id!
@@websockets[i] = {}
@@gateways << gateway
end if StraightServer::Config.gateways
end
# It may not be a perfect way to implement such a thing, but it gives enough flexibility to people
# so they can simply start using a single gateway on their machines, a gateway which attributes are defined
# in a config file instead of a DB. That way they don't need special tools to access the DB and create
# a gateway, but can simply edit the config file.
Gateway = if StraightServer::Config.gateways_source == 'config'
GatewayOnConfig
else
GatewayOnDB
end
end
|
class Tangerine::Video < Tangerine::Base
attr_accessor :size,
:title,
:thumbnail,
:content,
:updated_at,
:embed_code,
:description,
:length,
:status,
:uploaded_at,
:flight_start_time,
:width,
:height,
:labels,
:metadata,
:stat
finder do
Tangerine.query('contentType' => 'Video').parsed_response['list']['item']
end
# def self.all
# Tangerine.query('contentType' => 'Video')
# end
def initialize(options={})
@options = options
add_metadata
add_labels
options.delete('metadata')
options.delete('labels')
super(options)
end
def player(options={})
options.merge!(:embed_code => embed_code, :width => width, :height => height)
Tangerine::Player.new(options)
end
def self.where(options)
embed_codes = options[:embed_code].join(',')
result = Tangerine.query('embedCode' => embed_codes, 'fields' => 'labels,metadata')
items = result.parsed_response['list']['item']
items = Tangerine::Base.prepare_items(items)
items.collect { |item| Tangerine::Video.new(item) }
Tangerine::Video.order_videos!(items, options[:embed_code])
end
def self.order_videos!(videos, embed_codes)
ordered = []
embed_codes.each do |code|
ordered << videos.select { |video| video.embed_code == code }
end
ordered
end
def as_json(options = {})
{:size => size,
:title => title,
:thumbnail => thumbnail,
:content => content,
:updated_at => updated_at,
:embed_code => embed_code,
:description => description,
:length => length,
:status => status,
:uploaded_at => uploaded_at,
:flight_start_time => flight_start_time,
:width => width,
:height => height,
:labels => labels,
:metadata => metadata,
:stat => stat
}
end
protected
def add_labels
return unless @options['labels']
self.labels = Tangerine::Base.prepare_items(@options['labels']['label'])
end
def add_metadata
return unless @options['metadata']
self.metadata = {}
meta_data = @options['metadata']['metadataItem']
items = Tangerine::Base.prepare_items(meta_data)
items.each do |meta_item|
self.metadata[meta_item['name'].to_sym] = meta_item['value']
end
end
end
Videos are now returned in the order they are displayed in Ooyala.
class Tangerine::Video < Tangerine::Base
attr_accessor :size,
:title,
:thumbnail,
:content,
:updated_at,
:embed_code,
:description,
:length,
:status,
:uploaded_at,
:flight_start_time,
:width,
:height,
:labels,
:metadata,
:stat
finder do
Tangerine.query('contentType' => 'Video').parsed_response['list']['item']
end
# def self.all
# Tangerine.query('contentType' => 'Video')
# end
def initialize(options={})
@options = options
add_metadata
add_labels
options.delete('metadata')
options.delete('labels')
super(options)
end
def player(options={})
options.merge!(:embed_code => embed_code, :width => width, :height => height)
Tangerine::Player.new(options)
end
def self.where(options)
embed_codes = options[:embed_code].join(',')
result = Tangerine.query('embedCode' => embed_codes, 'fields' => 'labels,metadata')
items = result.parsed_response['list']['item']
items = Tangerine::Base.prepare_items(items)
videos = items.collect { |item| Tangerine::Video.new(item) }
Tangerine::Video.order_videos!(videos, options[:embed_code])
end
def self.order_videos!(videos, embed_codes)
ordered = []
embed_codes.each do |code|
ordered << videos.select { |video| video.embed_code == code }
end
ordered.flatten
end
def as_json(options = {})
{:size => size,
:title => title,
:thumbnail => thumbnail,
:content => content,
:updated_at => updated_at,
:embed_code => embed_code,
:description => description,
:length => length,
:status => status,
:uploaded_at => uploaded_at,
:flight_start_time => flight_start_time,
:width => width,
:height => height,
:labels => labels,
:metadata => metadata,
:stat => stat
}
end
protected
def add_labels
return unless @options['labels']
self.labels = Tangerine::Base.prepare_items(@options['labels']['label'])
end
def add_metadata
return unless @options['metadata']
self.metadata = {}
meta_data = @options['metadata']['metadataItem']
items = Tangerine::Base.prepare_items(meta_data)
items.each do |meta_item|
self.metadata[meta_item['name'].to_sym] = meta_item['value']
end
end
end
|
module TaskTempest
class Bookkeeper
attr_reader :logger
def initialize(options)
options.each{ |k, v| instance_variable_set("@#{k}", v) }
end
def report(executions)
@timer ||= Time.now
@executions ||= []
@executions += executions
if Time.now - @timer > @interval
logger.info "[STATS] " + make_book.inspect
@executions.clear
@timer = Time.now
end
end
def make_book
# Do some setup.
ObjectSpace.garbage_collect
queue = @queue_factory.call
book = {}
# Reset memoized objects.
@memory = nil
@files = nil
# Task success/error counts.
book[:tasks] = {}
book[:tasks][:counts] = task_counts
book[:tasks][:per_thread] = tasks_per_thread
book[:tasks][:durations] = task_durations
book[:tasks][:throughput] = task_throughput
# Thread (worker) info.
book[:threads] = {}
book[:threads][:busy] = @storm.busy_workers.length
book[:threads][:idle] = @storm.size - book[:threads][:busy]
book[:threads][:saturation] = (book[:threads][:busy] / @storm.size.to_f * 100).round(2)
# Memory, Object, GC info.
book[:memory] = {}
book[:memory][:live_objects] = ObjectSpace.live_objects rescue nil
book[:memory][:resident] = format_memory(get_memory(:resident))
book[:memory][:virtual] = format_memory(get_memory(:virtual))
# Open file counts.
book[:files] = {}
book[:files][:total_count] = get_files(:total)
book[:files][:tcp_count] = get_files(:tcp)
# Queue info.
book[:queue] = {}
book[:queue][:size] = queue.size if queue.respond_to?(:size)
book[:queue][:backlog] = @storm.executions.inject(0){ |memo, e| memo += 1 unless e.started?; memo }
book
end
def task_counts
tot = @executions.length
err = @executions.sum{ |e| e.exception ? 1 : 0 }
pct = begin
if tot > 0
(err.to_f / tot)
else
0.0
end
end
{ :tot => tot, :err => err, :pct => pct.round(2) }
end
def task_throughput
duration = Time.now - @timer
per_sec = @executions.length.to_f / duration
per_min = (per_sec * 60).round(2)
"#{per_min}/m"
end
def tasks_per_thread
counts_by_thread = @storm.threads.inject({}) do |memo, thread|
memo[thread] = 0
memo
end
@executions.each{ |e| counts_by_thread[e.thread] += 1 }
counts = counts_by_thread.values
{ :min => counts.min, :max => counts.max, :avg => counts.avg.round(2) }
end
def task_durations
durations = @executions.collect{ |execution| execution.duration }
if durations.length > 0
{ :min => durations.min.round(3), :max => durations.max.round(3), :avg => durations.avg.round(3) }
else
"n/a"
end
end
def get_memory(which)
@memory ||= `ps -o rss= -o vsz= -p #{Process.pid}`.split.collect{ |s| s.strip } rescue [nil, nil]
case which
when :resident
@memory[0].to_i
when :virtual
@memory[1].to_i
end
end
def get_files(which)
@files ||= begin
output = `lsof -p #{Process.pid}` rescue ""
output.split("\n")
end
case which
when :total
@files.length
when :tcp
@files.inject(0){ |memo, line| memo += 1 if line.downcase =~ /tcp/; memo }
end
end
KB = 1024
MB = KB**2
def format_memory(memory)
if memory > MB
(memory / MB).to_s + "M"
else
(memory / KB).to_s + "K"
end
end
end
end
Higher precision on pct stat.
module TaskTempest
class Bookkeeper
attr_reader :logger
def initialize(options)
options.each{ |k, v| instance_variable_set("@#{k}", v) }
end
def report(executions)
@timer ||= Time.now
@executions ||= []
@executions += executions
if Time.now - @timer > @interval
logger.info "[STATS] " + make_book.inspect
@executions.clear
@timer = Time.now
end
end
def make_book
# Do some setup.
ObjectSpace.garbage_collect
queue = @queue_factory.call
book = {}
# Reset memoized objects.
@memory = nil
@files = nil
# Task success/error counts.
book[:tasks] = {}
book[:tasks][:counts] = task_counts
book[:tasks][:per_thread] = tasks_per_thread
book[:tasks][:durations] = task_durations
book[:tasks][:throughput] = task_throughput
# Thread (worker) info.
book[:threads] = {}
book[:threads][:busy] = @storm.busy_workers.length
book[:threads][:idle] = @storm.size - book[:threads][:busy]
book[:threads][:saturation] = (book[:threads][:busy] / @storm.size.to_f * 100).round(2)
# Memory, Object, GC info.
book[:memory] = {}
book[:memory][:live_objects] = ObjectSpace.live_objects rescue nil
book[:memory][:resident] = format_memory(get_memory(:resident))
book[:memory][:virtual] = format_memory(get_memory(:virtual))
# Open file counts.
book[:files] = {}
book[:files][:total_count] = get_files(:total)
book[:files][:tcp_count] = get_files(:tcp)
# Queue info.
book[:queue] = {}
book[:queue][:size] = queue.size if queue.respond_to?(:size)
book[:queue][:backlog] = @storm.executions.inject(0){ |memo, e| memo += 1 unless e.started?; memo }
book
end
def task_counts
tot = @executions.length
err = @executions.sum{ |e| e.exception ? 1 : 0 }
pct = begin
if tot > 0
(err.to_f / tot)
else
0.0
end
end
{ :tot => tot, :err => err, :pct => pct.round(3) }
end
def task_throughput
duration = Time.now - @timer
per_sec = @executions.length.to_f / duration
per_min = (per_sec * 60).round(2)
"#{per_min}/m"
end
def tasks_per_thread
counts_by_thread = @storm.threads.inject({}) do |memo, thread|
memo[thread] = 0
memo
end
@executions.each{ |e| counts_by_thread[e.thread] += 1 }
counts = counts_by_thread.values
{ :min => counts.min, :max => counts.max, :avg => counts.avg.round(2) }
end
def task_durations
durations = @executions.collect{ |execution| execution.duration }
if durations.length > 0
{ :min => durations.min.round(3), :max => durations.max.round(3), :avg => durations.avg.round(3) }
else
"n/a"
end
end
def get_memory(which)
@memory ||= `ps -o rss= -o vsz= -p #{Process.pid}`.split.collect{ |s| s.strip } rescue [nil, nil]
case which
when :resident
@memory[0].to_i
when :virtual
@memory[1].to_i
end
end
def get_files(which)
@files ||= begin
output = `lsof -p #{Process.pid}` rescue ""
output.split("\n")
end
case which
when :total
@files.length
when :tcp
@files.inject(0){ |memo, line| memo += 1 if line.downcase =~ /tcp/; memo }
end
end
KB = 1024
MB = KB**2
def format_memory(memory)
if memory > MB
(memory / MB).to_s + "M"
else
(memory / KB).to_s + "K"
end
end
end
end |
require 'thor'
class Alchemy::RoutesInjector < Thor
include Thor::Actions
def initialize; super; end
no_tasks do
def inject
@mountpoint = ask "\nWhere do you want to mount Alchemy CMS? (/)"
@mountpoint = "/" if @mountpoint.empty?
sentinel = /\.routes\.draw do(?:\s*\|map\|)?\s*$/
inject_into_file "./config/routes.rb", "\n mount Alchemy::Engine => '#{@mountpoint}'\n", { :after => sentinel, :verbose => true }
end
end
end
namespace :alchemy do
desc "Installs Alchemy CMS into your app."
task :install do
Rake::Task["db:create"].invoke
Rake::Task["alchemy:install:migrations"].invoke
Rake::Task["alchemy:mount"].invoke
system("rails g alchemy:scaffold")
Rake::Task["db:migrate"].invoke
Rake::Task["alchemy:db:seed"].invoke
puts <<-EOF
\\o/ Successfully installed Alchemy CMS \\o/
Now:
1. Start your Rails server:
rails server
2. Open your browser and enter the following URL:
http://localhost:3000/#{@mountpoint}
3. Follow the instructions to complete the installation!
Thank you for using Alchemy CMS!
http://alchemy-cms.com/
EOF
end
desc "Mounts Alchemy into your routes."
task :mount do
Alchemy::RoutesInjector.new.inject
end
end
Adds a Alchemy demo kit note to rake install task.
require 'thor'
class Alchemy::RoutesInjector < Thor
include Thor::Actions
def initialize; super; end
no_tasks do
def inject
@mountpoint = ask "\nWhere do you want to mount Alchemy CMS? (/)"
@mountpoint = "/" if @mountpoint.empty?
sentinel = /\.routes\.draw do(?:\s*\|map\|)?\s*$/
inject_into_file "./config/routes.rb", "\n mount Alchemy::Engine => '#{@mountpoint}'\n", { :after => sentinel, :verbose => true }
end
end
end
namespace :alchemy do
desc "Installs Alchemy CMS into your app."
task :install do
Rake::Task["db:create"].invoke
Rake::Task["alchemy:install:migrations"].invoke
Rake::Task["alchemy:mount"].invoke
system("rails g alchemy:scaffold")
Rake::Task["db:migrate"].invoke
Rake::Task["alchemy:db:seed"].invoke
puts <<-EOF
\\o/ Successfully installed Alchemy CMS \\o/
Now cd into your app folder and
1. Start your Rails server:
rails server
2. Open your browser and enter the following URL:
http://localhost:3000/#{@mountpoint}
3. Follow the instructions to complete the installation!
== First time Alchemy user?
Then we recommend to install the Alchemy demo kit.
Just add `gem "alchemy-demo_kit"` to your apps Gemfile and run `bundle install`.
Thank you for using Alchemy CMS!
http://alchemy-cms.com/
EOF
end
desc "Mounts Alchemy into your routes."
task :mount do
Alchemy::RoutesInjector.new.inject
end
end
|
module WikiCloth
class WikiBuffer::Var < WikiBuffer
def initialize(data="",options={})
super(data,options)
self.buffer_type = "var"
@in_quotes = false
end
def skip_html?
true
end
def function_name
@fname
end
def to_s
if self.is_function?
ret = default_functions(function_name,params.collect { |p| p.strip })
ret ||= @options[:link_handler].function(function_name, params.collect { |p| p.strip })
ret.to_s
else
ret = @options[:link_handler].include_resource("#{params[0]}".strip,params[1..-1])
# template params
ret = ret.to_s.gsub(/\{\{\{\s*([A-Za-z0-9]+)\s*\}\}\}/) { |match| get_param($1) }
# put template at beginning of buffer
self.data = ret
""
end
end
def get_param(name)
ret = nil
# numbered params
if name =~ /^[0-9]+$/
ret = self.params[name.to_i].instance_of?(Hash) ? self.params[name.to_i][:value] : self.params[name.to_i]
end
# named params
self.params.each do |param|
ret = param[:value] if param[:name] == name
end
ret
end
def default_functions(name,params)
case name
when "#if"
params.first.blank? ? params[2] : params[1]
when "#switch"
params.length.times do |i|
temp = params[i].split("=")
return temp[1].strip if temp[0].strip == params[0] && i != 0
end
return ""
when "#expr"
Math.eval(params.first)
when "#ifeq"
if params[0] =~ /^[0-9A-Fa-f]+$/ && params[1] =~ /^[0-9A-Fa-f]+$/
params[0].to_i == params[1].to_i ? params[2] : params[3]
else
params[0] == params[1] ? params[2] : params[3]
end
when "#len"
params.first.length
when "#sub"
params.first[params[1].to_i,params[2].to_i]
when "#pad"
case params[3]
when "right"
params[0].ljust(params[1].to_i,params[2])
when "center"
params[0].center(params[1].to_i,params[2])
else
params[0].rjust(params[1].to_i,params[2])
end
end
end
def is_function?
self.function_name.nil? || self.function_name.blank? ? false : true
end
protected
def function_name=(val)
@fname = val
end
def new_char()
case
when current_char == '|' && @in_quotes == false
self.current_param = self.data
self.data = ""
self.params << ""
# Start of either a function or a namespace change
when current_char == ':' && @in_quotes == false && self.params.size <= 1
self.function_name = self.data
self.data = ""
# Dealing with variable names within functions
# and variables
when current_char == '=' && @in_quotes == false && !is_function?
self.current_param = self.data
self.data = ""
self.name_current_param()
# End of a template, variable, or function
when current_char == '}' && previous_char == '}'
self.data.chop!
self.current_param = self.data
self.data = ""
return false
else
self.data += current_char
end
return true
end
end
end
template params with default values
module WikiCloth
class WikiBuffer::Var < WikiBuffer
def initialize(data="",options={})
super(data,options)
self.buffer_type = "var"
@in_quotes = false
end
def skip_html?
true
end
def function_name
@fname
end
def to_s
if self.is_function?
ret = default_functions(function_name,params.collect { |p| p.strip })
ret ||= @options[:link_handler].function(function_name, params.collect { |p| p.strip })
ret.to_s
else
ret = @options[:link_handler].include_resource("#{params[0]}".strip,params[1..-1])
# template params
ret = ret.to_s.gsub(/\{\{\{\s*([A-Za-z0-9]+)+(|\|+([^}]+))\s*\}\}\}/) { |match| get_param($1.strip,$3.to_s.strip) }
# put template at beginning of buffer
self.data = ret
""
end
end
def get_param(name,default=nil)
ret = nil
# numbered params
if name =~ /^[0-9]+$/
ret = self.params[name.to_i].instance_of?(Hash) ? self.params[name.to_i][:value] : self.params[name.to_i]
end
# named params
self.params.each do |param|
ret = param[:value] if param[:name] == name
end
ret.nil? ? default : ret
end
def default_functions(name,params)
case name
when "#if"
params.first.blank? ? params[2] : params[1]
when "#switch"
params.length.times do |i|
temp = params[i].split("=")
return temp[1].strip if temp[0].strip == params[0] && i != 0
end
return ""
when "#expr"
Math.eval(params.first)
when "#ifeq"
if params[0] =~ /^[0-9A-Fa-f]+$/ && params[1] =~ /^[0-9A-Fa-f]+$/
params[0].to_i == params[1].to_i ? params[2] : params[3]
else
params[0] == params[1] ? params[2] : params[3]
end
when "#len"
params.first.length
when "#sub"
params.first[params[1].to_i,params[2].to_i]
when "#pad"
case params[3]
when "right"
params[0].ljust(params[1].to_i,params[2])
when "center"
params[0].center(params[1].to_i,params[2])
else
params[0].rjust(params[1].to_i,params[2])
end
end
end
def is_function?
self.function_name.nil? || self.function_name.blank? ? false : true
end
protected
def function_name=(val)
@fname = val
end
def new_char()
case
when current_char == '|' && @in_quotes == false
self.current_param = self.data
self.data = ""
self.params << ""
# Start of either a function or a namespace change
when current_char == ':' && @in_quotes == false && self.params.size <= 1
self.function_name = self.data
self.data = ""
# Dealing with variable names within functions
# and variables
when current_char == '=' && @in_quotes == false && !is_function?
self.current_param = self.data
self.data = ""
self.name_current_param()
# End of a template, variable, or function
when current_char == '}' && previous_char == '}'
self.data.chop!
self.current_param = self.data
self.data = ""
return false
else
self.data += current_char
end
return true
end
end
end
|
desc "Remove system packages to update when none"
namespace :cleanup do
task :updates => :environment do
update_files = Dir.glob("data/update/*").select do |file|
File.readlines(file).count >= 2
end.map do |file|
File.basename(file).gsub(/\.(apt|yum)$/, "")
end
# destroy if no update file or server is nil
Upgrade.includes(:server).each do |upgrade|
upgrade.destroy if upgrade.server.blank? || !upgrade.server.name.in?(update_files)
end
end
end
Don't cleanup updates at all, they should be destroyed with the server if needed
|
require 'descriptive_statistics/safe'
module XMLMunger
class StateError < StandardError; end
class ListHeuristics
def initialize(list)
raise ArgumentError, "Argument must be an array" unless list.is_a?(Array)
@list = list
end
def empty?
@empty ||= @list.count.zero?
end
def singleton?
@singleton ||= @list.count == 1
end
def multiple?
@multiple ||= @list.count > 1
end
def common_type(of = nil)
@common_types ||= {}
of ||= @list
@common_types[of] ||= of.map{|x|x.class.ancestors}.reduce(:&).first
end
def skipped_types
@skipped_types ||= [:strings]
end
def shared_key_hashes?
@shared_key_hashes ||=
multiple? &&
common_type == Hash &&
(keys = @list.first.keys) &&
@list[1..-1].all? { |hash| hash.keys == keys }
end
def to_variable_hash
return {} if empty?
return {nil => @list.first} if singleton?
if shared_key_hashes?
merged = merge_hashes(@list)
typed = classify(merged)
else
type, data = identity(@list)
typed = { nil => { type: type, data: data } }
end
apply(typed)
end
private
# call data extraction functions
# returns a variable hash
def apply(input)
filter_types(input).reduce({}) do |out, (var,with)|
func = "extract_#{with[:type]}".to_sym
self.send(func, with[:data]).each do |key,val|
ind = [var,key].map(&:to_s).reject{ |s| s.empty? }.join('_')
out[ind] = val
end
out
end
end
# Allow caller to ignore certain data types
def filter_types(input)
input.reject { |k,v|
( skipped_types + [:notype, :other] ).include?(v[:type])
}
end
# merge multiple hashes with the same keys
# resulting hash values are arrays of the input values
def merge_hashes(hashes)
keys = hashes.first.keys
container = Hash[*keys.map{|k|[k,[]]}.flatten(1)]
hashes.each { |hash| hash.each { |(k,v)| container[k] << v } }
container
end
# discover type information for each
# key,value pair of the input hash
def classify(hash)
hash.reduce({}) do |acc, (var, vals)|
type, data = identity(vals)
acc[var] = {
type: type,
data: data
}
acc
end
end
# assign the list of values into its proper type
# also return the appropriate transformation of the input list
TYPES = [:boolean?, :singleton?, :days?, :numeric?, :strings?, :notype?]
def identity(vals, memo = {})
TYPES.each do |key|
if compute(key, vals, memo)
type = key[0...-1].to_sym
val = compute(type, vals, memo)
return type, val
end
end
return :other, vals
end
# memoized computations for #identity
def compute(what, vals, store)
store[what] ||= case what
# ifs
when :singleton?
compute(:unique, vals, store).count == 1
when :boolean?
all_type?(vals, TrueClass, FalseClass)
when :days?
all_type?(vals, Date, Time)
when :numeric?
compute(:numeric, vals, store).all?
when :strings?
common_type(vals) <= String
when :notype?
common_type(vals) == Object
# thens
when :singleton
compute(:unique, vals, store).first
when :unique
vals.uniq
when :numeric
vals.map{ |x| to_numeric(x) }
when :days
dates = vals.map{ |x| x.to_date }
epoch = Date.new(1970,1,1)
dates.map { |d| (d - epoch).to_i }
else
vals
end
end
# Data Extraction Functions
def extract_singleton(item)
{nil => item}
end
def extract_boolean(vals)
has, vec = 0, 0
vals.each do |bool|
case bool
when FalseClass
has |= 1
vec -= 1
when TrueClass
has |= 2
vec += 1
end
end
{has: has, vec: vec}
end
def extract_strings(items)
h = Hash.new(0)
items.each{ |i| h[i] += 1 }
h.reduce({}) do |acc,(item,count)|
acc[var_name_for_string(item)] = count
acc
end
end
def extract_days(days)
sorted_days = days.sort
difference_comps(sorted_days)
end
def extract_numeric(numbers)
case
when is_sequence?(numbers,3), all_large?(numbers)
{} # do nothing; junk data
else
difference_comps(numbers)
end
end
# Utility Functions
def to_numeric(anything)
float = Float(anything)
int = Integer(anything) rescue float
float == int ? int : float
rescue
nil
end
def is_sequence?(nums, min_length = nil)
(min_length.nil? || nums.count >= min_length) &&
nums == (nums.min..nums.max).to_a
end
def all_large?(nums)
nums.all? { |n|
n > 1000000
}
end
def all_type?(objects, *types)
objects.all? { |obj|
types.any?{ |c| obj.is_a?(c) }
}
end
def difference_comps(data)
stats = {}
stats[:length] = data.count
stats[:min] = data.min
stats[:max] = data.max
if stats[:length] > 1
diffs = data.each_cons(2).map { |a,b| b-a }
diffs.extend(DescriptiveStatistics)
stats[:min_diff] = diffs.min
stats[:max_diff] = diffs.max
stats[:avg_diff] = diffs.median
end
stats
end
def var_name_for_string(key)
base = "is_"
if key.nil?
base += "nil"
else
base += key.to_s.strip.gsub(/\s+/, "_").downcase
end
base
end
end
end
fix large range issue
require 'descriptive_statistics/safe'
module XMLMunger
class StateError < StandardError; end
class ListHeuristics
def initialize(list)
raise ArgumentError, "Argument must be an array" unless list.is_a?(Array)
@list = list
end
def empty?
@empty ||= @list.count.zero?
end
def singleton?
@singleton ||= @list.count == 1
end
def multiple?
@multiple ||= @list.count > 1
end
def common_type(of = nil)
@common_types ||= {}
of ||= @list
@common_types[of] ||= of.map{|x|x.class.ancestors}.reduce(:&).first
end
def skipped_types
@skipped_types ||= [:strings]
end
def shared_key_hashes?
@shared_key_hashes ||=
multiple? &&
common_type == Hash &&
(keys = @list.first.keys) &&
@list[1..-1].all? { |hash| hash.keys == keys }
end
def to_variable_hash
return {} if empty?
return {nil => @list.first} if singleton?
if shared_key_hashes?
merged = merge_hashes(@list)
typed = classify(merged)
else
type, data = identity(@list)
typed = { nil => { type: type, data: data } }
end
apply(typed)
end
private
# call data extraction functions
# returns a variable hash
def apply(input)
filter_types(input).reduce({}) do |out, (var,with)|
func = "extract_#{with[:type]}".to_sym
self.send(func, with[:data]).each do |key,val|
ind = [var,key].map(&:to_s).reject{ |s| s.empty? }.join('_')
out[ind] = val
end
out
end
end
# Allow caller to ignore certain data types
def filter_types(input)
input.reject { |k,v|
( skipped_types + [:notype, :other] ).include?(v[:type])
}
end
# merge multiple hashes with the same keys
# resulting hash values are arrays of the input values
def merge_hashes(hashes)
keys = hashes.first.keys
container = Hash[*keys.map{|k|[k,[]]}.flatten(1)]
hashes.each { |hash| hash.each { |(k,v)| container[k] << v } }
container
end
# discover type information for each
# key,value pair of the input hash
def classify(hash)
hash.reduce({}) do |acc, (var, vals)|
type, data = identity(vals)
acc[var] = {
type: type,
data: data
}
acc
end
end
# assign the list of values into its proper type
# also return the appropriate transformation of the input list
TYPES = [:boolean?, :singleton?, :days?, :numeric?, :strings?, :notype?]
def identity(vals, memo = {})
TYPES.each do |key|
if compute(key, vals, memo)
type = key[0...-1].to_sym
val = compute(type, vals, memo)
return type, val
end
end
return :other, vals
end
# memoized computations for #identity
def compute(what, vals, store)
store[what] ||= case what
# ifs
when :singleton?
compute(:unique, vals, store).count == 1
when :boolean?
all_type?(vals, TrueClass, FalseClass)
when :days?
all_type?(vals, Date, Time)
when :numeric?
compute(:numeric, vals, store).all?
when :strings?
common_type(vals) <= String
when :notype?
common_type(vals) == Object
# thens
when :singleton
compute(:unique, vals, store).first
when :unique
vals.uniq
when :numeric
vals.map{ |x| to_numeric(x) }
when :days
dates = vals.map{ |x| x.to_date }
epoch = Date.new(1970,1,1)
dates.map { |d| (d - epoch).to_i }
else
vals
end
end
# Data Extraction Functions
def extract_singleton(item)
{nil => item}
end
def extract_boolean(vals)
has, vec = 0, 0
vals.each do |bool|
case bool
when FalseClass
has |= 1
vec -= 1
when TrueClass
has |= 2
vec += 1
end
end
{has: has, vec: vec}
end
def extract_strings(items)
h = Hash.new(0)
items.each{ |i| h[i] += 1 }
h.reduce({}) do |acc,(item,count)|
acc[var_name_for_string(item)] = count
acc
end
end
def extract_days(days)
sorted_days = days.sort
difference_comps(sorted_days)
end
def extract_numeric(numbers)
case
when is_sequence?(numbers,3), all_large?(numbers)
{} # do nothing; junk data
else
difference_comps(numbers)
end
end
# Utility Functions
def to_numeric(anything)
float = Float(anything)
int = Integer(anything) rescue float
float == int ? int : float
rescue
nil
end
def is_sequence?(nums, min_length = nil)
(min_length.nil? || nums.count >= min_length) &&
nums == (nums.min..nums.max).first(nums.count)
end
def all_large?(nums)
nums.all? { |n|
n > 1000000
}
end
def all_type?(objects, *types)
objects.all? { |obj|
types.any?{ |c| obj.is_a?(c) }
}
end
def difference_comps(data)
stats = {}
stats[:length] = data.count
stats[:min] = data.min
stats[:max] = data.max
if stats[:length] > 1
diffs = data.each_cons(2).map { |a,b| b-a }
diffs.extend(DescriptiveStatistics)
stats[:min_diff] = diffs.min
stats[:max_diff] = diffs.max
stats[:avg_diff] = diffs.median
end
stats
end
def var_name_for_string(key)
base = "is_"
if key.nil?
base += "nil"
else
base += key.to_s.strip.gsub(/\s+/, "_").downcase
end
base
end
end
end
|
require 'stringio'
require 'ostruct'
module YARD
module Parser
# Raised when an object is recognized but cannot be documented. This
# generally occurs when the Ruby syntax used to declare an object is
# too dynamic in nature.
class UndocumentableError < Exception; end
# Raised when the parser sees a Ruby syntax error
class ParserSyntaxError < UndocumentableError; end
# Responsible for parsing a list of files in order. The
# {#parse} method of this class can be called from the
# {SourceParser#globals} globals state list to re-enter
# parsing for the remainder of files in the list recursively.
#
# @see Processor#parse_remaining_files
class OrderedParser
# @return [Array<String>] the list of remaining files to parse
attr_accessor :files
# Creates a new OrderedParser with the global state and a list
# of files to parse.
#
# @note OrderedParser sets itself as the +ordered_parser+ key on
# global_state for later use in {Handlers::Processor}.
# @param [OpenStruct] global_state a structure containing all global
# state during parsing
# @param [Array<String>] files the list of files to parse
def initialize(global_state, files)
@global_state = global_state
@files = files.dup
@global_state.ordered_parser = self
end
# Parses the remainder of the {#files} list.
#
# @see Processor#parse_remaining_files
def parse
while file = files.shift
log.capture("Parsing #{file}") do
SourceParser.new(SourceParser.parser_type, @global_state).parse(file)
end
end
end
end
# Responsible for parsing a source file into the namespace. Parsing
# also invokes handlers to process the parsed statements and generate
# any code objects that may be recognized.
#
# == Custom Parsers
# SourceParser allows custom parsers to be registered and called when
# a certain filetype is recognized. To register a parser and hook it
# up to a set of file extensions, call {register_parser_type}
#
# @see register_parser_type
# @see Handlers::Base
# @see CodeObjects::Base
class SourceParser
SHEBANG_LINE = /\A\s*#!\S+/
ENCODING_LINE = /\A(?:\s*#*!.*\r?\n)?\s*(?:#+|\/\*+|\/\/+).*coding\s*[:=]{1,2}\s*([a-z\d_\-]+)/i
# Byte order marks for various encodings
# @since 0.7.0
ENCODING_BYTE_ORDER_MARKS = {
'utf-8' => "\xEF\xBB\xBF",
# Not yet supported
#'utf-16be' => "\xFE\xFF",
#'utf-16le' => "\xFF\xFE",
#'utf-32be' => "\x00\x00\xFF\xFE",
#'utf-32le' => "\xFF\xFE",
}
class << self
# @return [Symbol] the default parser type (defaults to :ruby)
attr_reader :parser_type
def parser_type=(value)
@parser_type = validated_parser_type(value)
end
# Parses a path or set of paths
#
# @param [String, Array<String>] paths a path, glob, or list of paths to
# parse
# @param [Array<String, Regexp>] excluded a list of excluded path matchers
# @param [Fixnum] level the logger level to use during parsing. See
# {YARD::Logger}
# @return [void]
def parse(paths = ["{lib,app}/**/*.rb", "ext/**/*.c"], excluded = [], level = log.level)
log.debug("Parsing #{paths.inspect} with `#{parser_type}` parser")
excluded = excluded.map do |path|
case path
when Regexp; path
else Regexp.new(path.to_s, Regexp::IGNORECASE)
end
end
files = [paths].flatten.
map {|p| File.directory?(p) ? "#{p}/**/*.{rb,c}" : p }.
map {|p| p.include?("*") ? Dir[p].sort_by {|f| f.length } : p }.flatten.
reject {|p| !File.file?(p) || excluded.any? {|re| p =~ re } }
log.enter_level(level) do
parse_in_order(*files.uniq)
end
end
# Parses a string +content+
#
# @param [String] content the block of code to parse
# @param [Symbol] ptype the parser type to use. See {parser_type}.
# @return the parser object that was used to parse +content+
def parse_string(content, ptype = parser_type)
new(ptype).parse(StringIO.new(content))
end
# Tokenizes but does not parse the block of code
#
# @param [String] content the block of code to tokenize
# @param [Symbol] ptype the parser type to use. See {parser_type}.
# @return [Array] a list of tokens
def tokenize(content, ptype = parser_type)
new(ptype).tokenize(content)
end
# Registers a new parser type.
#
# @example Registering a parser for "java" files
# SourceParser.register_parser_type :java, JavaParser, 'java'
# @param [Symbol] type a symbolic name for the parser type
# @param [Base] parser_klass a class that implements parsing and tokenization
# @param [Array<String>, String, Regexp] extensions a list of extensions or a
# regex to match against the file extension
# @return [void]
# @see Parser::Base
def register_parser_type(type, parser_klass, extensions = nil)
unless Base > parser_klass
raise ArgumentError, "expecting parser_klass to be a subclass of YARD::Parser::Base"
end
parser_type_extensions[type.to_sym] = extensions if extensions
parser_types[type.to_sym] = parser_klass
end
# @return [Hash{Symbol=>Object}] a list of registered parser types
# @private
# @since 0.5.6
attr_reader :parser_types
undef parser_types
def parser_types; @@parser_types ||= {} end
def parser_types=(value) @@parser_types = value end
# @return [Hash] a list of registered parser type extensions
# @private
# @since 0.5.6
attr_reader :parser_type_extensions
undef parser_type_extensions
def parser_type_extensions; @@parser_type_extensions ||= {} end
def parser_type_extensions=(value) @@parser_type_extensions = value end
# Finds a parser type that is registered for the extension. If no
# type is found, the default Ruby type is returned.
#
# @return [Symbol] the parser type to be used for the extension
# @since 0.5.6
def parser_type_for_extension(extension)
type = parser_type_extensions.find do |t, exts|
[exts].flatten.any? {|ext| ext === extension }
end
validated_parser_type(type ? type.first : :ruby)
end
# Returns the validated parser type. Basically, enforces that :ruby
# type is never set if the Ripper library is not available
#
# @param [Symbol] type the parser type to set
# @return [Symbol] the validated parser type
# @private
def validated_parser_type(type)
!defined?(::Ripper) && type == :ruby ? :ruby18 : type
end
# @group Parser Callbacks
# Registers a callback to be called before a list of files is parsed
# via {parse}. The block passed to this method will be called on
# subsequent parse calls.
#
# @example Installing a simple callback
# SourceParser.before_parse_list do |files, globals|
# puts "Starting to parse..."
# end
# YARD.parse('lib/**/*.rb')
# # prints "Starting to parse..."
#
# @example Setting global state
# SourceParser.before_parse_list do |files, globals|
# globals.method_count = 0
# end
# SourceParser.after_parse_list do |files, globals|
# puts "Found #{globals.method_count} methods"
# end
# class MyCountHandler < Handlers::Ruby::Base
# handles :def, :defs
# process { globals.method_count += 1 }
# end
# YARD.parse
# # Prints: "Found 37 methods"
#
# @example Using a global callback to cancel parsing
# SourceParser.before_parse_list do |files, globals|
# return false if files.include?('foo.rb')
# end
#
# YARD.parse(['foo.rb', 'bar.rb']) # callback cancels this method
# YARD.parse('bar.rb') # parses normally
#
# @yield [files, globals] the yielded block is called once before
# parsing all files
# @yieldparam [Array<String>] files the list of files that will be parsed.
# @yieldparam [OpenStruct] globals a global structure to store arbitrary
# state for post processing (see {Handlers::Processor#globals})
# @yieldreturn [Boolean] if the block returns +false+, parsing is
# cancelled.
# @return [Proc] the yielded block
# @see after_parse_list
# @see before_parse_file
# @since 0.7.0
def before_parse_list(&block)
before_parse_list_callbacks << block
end
# Registers a callback to be called after a list of files is parsed
# via {parse}. The block passed to this method will be called on
# subsequent parse calls.
#
# @example Printing results after parsing occurs
# SourceParser.after_parse_list do
# puts "Finished parsing!"
# end
# YARD.parse
# # Prints "Finished parsing!" after parsing files
# @yield [files, globals] the yielded block is called once before
# parsing all files
# @yieldparam [Array<String>] files the list of files that will be parsed.
# @yieldparam [OpenStruct] globals a global structure to store arbitrary
# state for post processing (see {Handlers::Processor#globals})
# @yieldreturn [void] the return value for the block is ignored.
# @return [Proc] the yielded block
# @see before_parse_list
# @see before_parse_file
# @since 0.7.0
def after_parse_list(&block)
after_parse_list_callbacks << block
end
# Registers a callback to be called before an individual file is parsed.
# The block passed to this method will be called on subsequent parse
# calls.
#
# To register a callback that is called before the entire list of files
# is processed, see {before_parse_list}.
#
# @example Installing a simple callback
# SourceParser.before_parse_file do |parser|
# puts "I'm parsing #{parser.file}"
# end
# YARD.parse('lib/**/*.rb')
# # prints:
# "I'm parsing lib/foo.rb"
# "I'm parsing lib/foo_bar.rb"
# "I'm parsing lib/last_file.rb"
#
# @example Cancel parsing of any test_*.rb files
# SourceParser.before_parse_file do |parser|
# return false if parser.file =~ /^test_.+\.rb$/
# end
#
# @yield [parser] the yielded block is called once before each
# file that is parsed. This might happen many times for a single
# codebase.
# @yieldparam [SourceParser] parser the parser object that will {#parse}
# the file.
# @yieldreturn [Boolean] if the block returns +false+, parsing for
# the file is cancelled.
# @return [Proc] the yielded block
# @see after_parse_file
# @see before_parse_list
# @since 0.7.0
def before_parse_file(&block)
before_parse_file_callbacks << block
end
# Registers a callback to be called after an individual file is parsed.
# The block passed to this method will be called on subsequent parse
# calls.
#
# To register a callback that is called after the entire list of files
# is processed, see {after_parse_list}.
#
# @example Printing the length of each file after it is parsed
# SourceParser.after_parse_file do |parser|
# puts "#{parser.file} is #{parser.contents.size} characters"
# end
# YARD.parse('lib/**/*.rb')
# # prints:
# "lib/foo.rb is 1240 characters"
# "lib/foo_bar.rb is 248 characters"
#
# @yield [parser] the yielded block is called once after each file
# that is parsed. This might happen many times for a single codebase.
# @yieldparam [SourceParser] parser the parser object that parsed
# the file.
# @yieldreturn [void] the return value for the block is ignored.
# @return [Proc] the yielded block
# @see before_parse_file
# @see after_parse_list
# @since 0.7.0
def after_parse_file(&block)
after_parse_file_callbacks << block
end
# @return [Array<Proc>] the list of callbacks to be called before
# parsing a list of files. Should only be used for testing.
# @since 0.7.0
def before_parse_list_callbacks
@before_parse_list_callbacks ||= []
end
# @return [Array<Proc>] the list of callbacks to be called after
# parsing a list of files. Should only be used for testing.
# @since 0.7.0
def after_parse_list_callbacks
@after_parse_list_callbacks ||= []
end
# @return [Array<Proc>] the list of callbacks to be called before
# parsing a file. Should only be used for testing.
# @since 0.7.0
def before_parse_file_callbacks
@before_parse_file_callbacks ||= []
end
# @return [Array<Proc>] the list of callbacks to be called after
# parsing a file. Should only be used for testing.
# @since 0.7.0
def after_parse_file_callbacks
@after_parse_file_callbacks ||= []
end
# @endgroup
private
# Parses a list of files in a queue.
#
# @param [Array<String>] files a list of files to queue for parsing
# @return [void]
def parse_in_order(*files)
global_state = OpenStruct.new
before_parse_list_callbacks.each do |cb|
return if cb.call(files, global_state) == false
end
OrderedParser.new(global_state, files).parse
after_parse_list_callbacks.each do |cb|
cb.call(files, global_state)
end
end
end
register_parser_type :ruby, Ruby::RubyParser
register_parser_type :ruby18, Ruby::Legacy::RubyParser
register_parser_type :c, C::CParser, ['c', 'cc', 'cxx', 'cpp']
self.parser_type = :ruby
# @return [String] the filename being parsed by the parser.
attr_accessor :file
# @return [Symbol] the parser type associated with the parser instance.
# This should be set by the {#initialize constructor}.
attr_reader :parser_type
# @return [OpenStruct] an open struct containing arbitrary global state
# shared between files and handlers.
# @since 0.7.0
attr_reader :globals
# @return [String] the contents of the file to be parsed
# @since 0.7.0
attr_reader :contents
# @overload initialize(parser_type = SourceParser.parser_type, globals = nil)
# Creates a new parser object for code parsing with a specific parser type.
#
# @param [Symbol] parser_type the parser type to use
# @param [OpenStruct] globals global state to be re-used across separate source files
def initialize(parser_type = SourceParser.parser_type, globals1 = nil, globals2 = nil)
globals = [true, false].include?(globals1) ? globals2 : globals1
@file = '(stdin)'
@globals = globals || OpenStruct.new
self.parser_type = parser_type
end
# The main parser method. This should not be called directly. Instead,
# use the class methods {parse} and {parse_string}.
#
# @param [String, #read, Object] content the source file to parse
# @return [Object, nil] the parser object used to parse the source
def parse(content = __FILE__)
case content
when String
@file = File.cleanpath(content)
content = convert_encoding(File.read_binary(file))
checksum = Registry.checksum_for(content)
return if Registry.checksums[file] == checksum
if Registry.checksums.has_key?(file)
log.info "File '#{file}' was modified, re-processing..."
end
Registry.checksums[@file] = checksum
self.parser_type = parser_type_for_filename(file)
else
content = content.read if content.respond_to? :read
end
@contents = content
@parser = parser_class.new(content, file)
self.class.before_parse_file_callbacks.each do |cb|
return @parser if cb.call(self) == false
end
@parser.parse
post_process
self.class.after_parse_file_callbacks.each do |cb|
cb.call(self)
end
@parser
rescue ArgumentError, NotImplementedError => e
log.warn("Cannot parse `#{file}': #{e.message}")
log.backtrace(e, :warn)
rescue ParserSyntaxError => e
log.warn(e.message.capitalize)
log.backtrace(e, :warn)
end
# Tokenizes but does not parse the block of code using the current {#parser_type}
#
# @param [String] content the block of code to tokenize
# @return [Array] a list of tokens
def tokenize(content)
@parser = parser_class.new(content, file)
@parser.tokenize
end
private
# Searches for encoding line and forces encoding
# @since 0.5.3
def convert_encoding(content)
return content unless content.respond_to?(:force_encoding)
if content =~ ENCODING_LINE
content.force_encoding($1)
else
old_encoding = content.encoding
content.force_encoding('binary')
ENCODING_BYTE_ORDER_MARKS.each do |encoding, bom|
bom.force_encoding('binary')
if content[0,bom.size] == bom
content.force_encoding(encoding)
return content
end
end
content.force_encoding(old_encoding)
content
end
end
# Runs a {Handlers::Processor} object to post process the parsed statements.
# @return [void]
def post_process
return unless @parser.respond_to? :enumerator
return unless enumerator = @parser.enumerator
post = Handlers::Processor.new(self)
post.process(enumerator)
end
def parser_type=(value)
@parser_type = self.class.validated_parser_type(value)
end
# Guesses the parser type to use depending on the file extension.
#
# @param [String] filename the filename to use to guess the parser type
# @return [Symbol] a parser type that matches the filename
def parser_type_for_filename(filename)
ext = (File.extname(filename)[1..-1] || "").downcase
type = self.class.parser_type_for_extension(ext)
parser_type == :ruby18 && type == :ruby ? :ruby18 : type
end
# @since 0.5.6
def parser_class
klass = self.class.parser_types[parser_type]
raise ArgumentError, "invalid parser type '#{parser_type}' or unrecognized file", caller[1..-1] if !klass
klass
end
end
end
end
Make UTF-8 the default file encoding
Follows Ruby 2.0 convention, should not affect other versions.
Closes #607
require 'stringio'
require 'ostruct'
module YARD
module Parser
# Raised when an object is recognized but cannot be documented. This
# generally occurs when the Ruby syntax used to declare an object is
# too dynamic in nature.
class UndocumentableError < Exception; end
# Raised when the parser sees a Ruby syntax error
class ParserSyntaxError < UndocumentableError; end
# Responsible for parsing a list of files in order. The
# {#parse} method of this class can be called from the
# {SourceParser#globals} globals state list to re-enter
# parsing for the remainder of files in the list recursively.
#
# @see Processor#parse_remaining_files
class OrderedParser
# @return [Array<String>] the list of remaining files to parse
attr_accessor :files
# Creates a new OrderedParser with the global state and a list
# of files to parse.
#
# @note OrderedParser sets itself as the +ordered_parser+ key on
# global_state for later use in {Handlers::Processor}.
# @param [OpenStruct] global_state a structure containing all global
# state during parsing
# @param [Array<String>] files the list of files to parse
def initialize(global_state, files)
@global_state = global_state
@files = files.dup
@global_state.ordered_parser = self
end
# Parses the remainder of the {#files} list.
#
# @see Processor#parse_remaining_files
def parse
while file = files.shift
log.capture("Parsing #{file}") do
SourceParser.new(SourceParser.parser_type, @global_state).parse(file)
end
end
end
end
# Responsible for parsing a source file into the namespace. Parsing
# also invokes handlers to process the parsed statements and generate
# any code objects that may be recognized.
#
# == Custom Parsers
# SourceParser allows custom parsers to be registered and called when
# a certain filetype is recognized. To register a parser and hook it
# up to a set of file extensions, call {register_parser_type}
#
# @see register_parser_type
# @see Handlers::Base
# @see CodeObjects::Base
class SourceParser
SHEBANG_LINE = /\A\s*#!\S+/
ENCODING_LINE = /\A(?:\s*#*!.*\r?\n)?\s*(?:#+|\/\*+|\/\/+).*coding\s*[:=]{1,2}\s*([a-z\d_\-]+)/i
# Byte order marks for various encodings
# @since 0.7.0
ENCODING_BYTE_ORDER_MARKS = {
'utf-8' => "\xEF\xBB\xBF",
# Not yet supported
#'utf-16be' => "\xFE\xFF",
#'utf-16le' => "\xFF\xFE",
#'utf-32be' => "\x00\x00\xFF\xFE",
#'utf-32le' => "\xFF\xFE",
}
class << self
# @return [Symbol] the default parser type (defaults to :ruby)
attr_reader :parser_type
def parser_type=(value)
@parser_type = validated_parser_type(value)
end
# Parses a path or set of paths
#
# @param [String, Array<String>] paths a path, glob, or list of paths to
# parse
# @param [Array<String, Regexp>] excluded a list of excluded path matchers
# @param [Fixnum] level the logger level to use during parsing. See
# {YARD::Logger}
# @return [void]
def parse(paths = ["{lib,app}/**/*.rb", "ext/**/*.c"], excluded = [], level = log.level)
log.debug("Parsing #{paths.inspect} with `#{parser_type}` parser")
excluded = excluded.map do |path|
case path
when Regexp; path
else Regexp.new(path.to_s, Regexp::IGNORECASE)
end
end
files = [paths].flatten.
map {|p| File.directory?(p) ? "#{p}/**/*.{rb,c}" : p }.
map {|p| p.include?("*") ? Dir[p].sort_by {|f| f.length } : p }.flatten.
reject {|p| !File.file?(p) || excluded.any? {|re| p =~ re } }
log.enter_level(level) do
parse_in_order(*files.uniq)
end
end
# Parses a string +content+
#
# @param [String] content the block of code to parse
# @param [Symbol] ptype the parser type to use. See {parser_type}.
# @return the parser object that was used to parse +content+
def parse_string(content, ptype = parser_type)
new(ptype).parse(StringIO.new(content))
end
# Tokenizes but does not parse the block of code
#
# @param [String] content the block of code to tokenize
# @param [Symbol] ptype the parser type to use. See {parser_type}.
# @return [Array] a list of tokens
def tokenize(content, ptype = parser_type)
new(ptype).tokenize(content)
end
# Registers a new parser type.
#
# @example Registering a parser for "java" files
# SourceParser.register_parser_type :java, JavaParser, 'java'
# @param [Symbol] type a symbolic name for the parser type
# @param [Base] parser_klass a class that implements parsing and tokenization
# @param [Array<String>, String, Regexp] extensions a list of extensions or a
# regex to match against the file extension
# @return [void]
# @see Parser::Base
def register_parser_type(type, parser_klass, extensions = nil)
unless Base > parser_klass
raise ArgumentError, "expecting parser_klass to be a subclass of YARD::Parser::Base"
end
parser_type_extensions[type.to_sym] = extensions if extensions
parser_types[type.to_sym] = parser_klass
end
# @return [Hash{Symbol=>Object}] a list of registered parser types
# @private
# @since 0.5.6
attr_reader :parser_types
undef parser_types
def parser_types; @@parser_types ||= {} end
def parser_types=(value) @@parser_types = value end
# @return [Hash] a list of registered parser type extensions
# @private
# @since 0.5.6
attr_reader :parser_type_extensions
undef parser_type_extensions
def parser_type_extensions; @@parser_type_extensions ||= {} end
def parser_type_extensions=(value) @@parser_type_extensions = value end
# Finds a parser type that is registered for the extension. If no
# type is found, the default Ruby type is returned.
#
# @return [Symbol] the parser type to be used for the extension
# @since 0.5.6
def parser_type_for_extension(extension)
type = parser_type_extensions.find do |t, exts|
[exts].flatten.any? {|ext| ext === extension }
end
validated_parser_type(type ? type.first : :ruby)
end
# Returns the validated parser type. Basically, enforces that :ruby
# type is never set if the Ripper library is not available
#
# @param [Symbol] type the parser type to set
# @return [Symbol] the validated parser type
# @private
def validated_parser_type(type)
!defined?(::Ripper) && type == :ruby ? :ruby18 : type
end
# @group Parser Callbacks
# Registers a callback to be called before a list of files is parsed
# via {parse}. The block passed to this method will be called on
# subsequent parse calls.
#
# @example Installing a simple callback
# SourceParser.before_parse_list do |files, globals|
# puts "Starting to parse..."
# end
# YARD.parse('lib/**/*.rb')
# # prints "Starting to parse..."
#
# @example Setting global state
# SourceParser.before_parse_list do |files, globals|
# globals.method_count = 0
# end
# SourceParser.after_parse_list do |files, globals|
# puts "Found #{globals.method_count} methods"
# end
# class MyCountHandler < Handlers::Ruby::Base
# handles :def, :defs
# process { globals.method_count += 1 }
# end
# YARD.parse
# # Prints: "Found 37 methods"
#
# @example Using a global callback to cancel parsing
# SourceParser.before_parse_list do |files, globals|
# return false if files.include?('foo.rb')
# end
#
# YARD.parse(['foo.rb', 'bar.rb']) # callback cancels this method
# YARD.parse('bar.rb') # parses normally
#
# @yield [files, globals] the yielded block is called once before
# parsing all files
# @yieldparam [Array<String>] files the list of files that will be parsed.
# @yieldparam [OpenStruct] globals a global structure to store arbitrary
# state for post processing (see {Handlers::Processor#globals})
# @yieldreturn [Boolean] if the block returns +false+, parsing is
# cancelled.
# @return [Proc] the yielded block
# @see after_parse_list
# @see before_parse_file
# @since 0.7.0
def before_parse_list(&block)
before_parse_list_callbacks << block
end
# Registers a callback to be called after a list of files is parsed
# via {parse}. The block passed to this method will be called on
# subsequent parse calls.
#
# @example Printing results after parsing occurs
# SourceParser.after_parse_list do
# puts "Finished parsing!"
# end
# YARD.parse
# # Prints "Finished parsing!" after parsing files
# @yield [files, globals] the yielded block is called once before
# parsing all files
# @yieldparam [Array<String>] files the list of files that will be parsed.
# @yieldparam [OpenStruct] globals a global structure to store arbitrary
# state for post processing (see {Handlers::Processor#globals})
# @yieldreturn [void] the return value for the block is ignored.
# @return [Proc] the yielded block
# @see before_parse_list
# @see before_parse_file
# @since 0.7.0
def after_parse_list(&block)
after_parse_list_callbacks << block
end
# Registers a callback to be called before an individual file is parsed.
# The block passed to this method will be called on subsequent parse
# calls.
#
# To register a callback that is called before the entire list of files
# is processed, see {before_parse_list}.
#
# @example Installing a simple callback
# SourceParser.before_parse_file do |parser|
# puts "I'm parsing #{parser.file}"
# end
# YARD.parse('lib/**/*.rb')
# # prints:
# "I'm parsing lib/foo.rb"
# "I'm parsing lib/foo_bar.rb"
# "I'm parsing lib/last_file.rb"
#
# @example Cancel parsing of any test_*.rb files
# SourceParser.before_parse_file do |parser|
# return false if parser.file =~ /^test_.+\.rb$/
# end
#
# @yield [parser] the yielded block is called once before each
# file that is parsed. This might happen many times for a single
# codebase.
# @yieldparam [SourceParser] parser the parser object that will {#parse}
# the file.
# @yieldreturn [Boolean] if the block returns +false+, parsing for
# the file is cancelled.
# @return [Proc] the yielded block
# @see after_parse_file
# @see before_parse_list
# @since 0.7.0
def before_parse_file(&block)
before_parse_file_callbacks << block
end
# Registers a callback to be called after an individual file is parsed.
# The block passed to this method will be called on subsequent parse
# calls.
#
# To register a callback that is called after the entire list of files
# is processed, see {after_parse_list}.
#
# @example Printing the length of each file after it is parsed
# SourceParser.after_parse_file do |parser|
# puts "#{parser.file} is #{parser.contents.size} characters"
# end
# YARD.parse('lib/**/*.rb')
# # prints:
# "lib/foo.rb is 1240 characters"
# "lib/foo_bar.rb is 248 characters"
#
# @yield [parser] the yielded block is called once after each file
# that is parsed. This might happen many times for a single codebase.
# @yieldparam [SourceParser] parser the parser object that parsed
# the file.
# @yieldreturn [void] the return value for the block is ignored.
# @return [Proc] the yielded block
# @see before_parse_file
# @see after_parse_list
# @since 0.7.0
def after_parse_file(&block)
after_parse_file_callbacks << block
end
# @return [Array<Proc>] the list of callbacks to be called before
# parsing a list of files. Should only be used for testing.
# @since 0.7.0
def before_parse_list_callbacks
@before_parse_list_callbacks ||= []
end
# @return [Array<Proc>] the list of callbacks to be called after
# parsing a list of files. Should only be used for testing.
# @since 0.7.0
def after_parse_list_callbacks
@after_parse_list_callbacks ||= []
end
# @return [Array<Proc>] the list of callbacks to be called before
# parsing a file. Should only be used for testing.
# @since 0.7.0
def before_parse_file_callbacks
@before_parse_file_callbacks ||= []
end
# @return [Array<Proc>] the list of callbacks to be called after
# parsing a file. Should only be used for testing.
# @since 0.7.0
def after_parse_file_callbacks
@after_parse_file_callbacks ||= []
end
# @endgroup
private
# Parses a list of files in a queue.
#
# @param [Array<String>] files a list of files to queue for parsing
# @return [void]
def parse_in_order(*files)
global_state = OpenStruct.new
before_parse_list_callbacks.each do |cb|
return if cb.call(files, global_state) == false
end
OrderedParser.new(global_state, files).parse
after_parse_list_callbacks.each do |cb|
cb.call(files, global_state)
end
end
end
register_parser_type :ruby, Ruby::RubyParser
register_parser_type :ruby18, Ruby::Legacy::RubyParser
register_parser_type :c, C::CParser, ['c', 'cc', 'cxx', 'cpp']
self.parser_type = :ruby
# @return [String] the filename being parsed by the parser.
attr_accessor :file
# @return [Symbol] the parser type associated with the parser instance.
# This should be set by the {#initialize constructor}.
attr_reader :parser_type
# @return [OpenStruct] an open struct containing arbitrary global state
# shared between files and handlers.
# @since 0.7.0
attr_reader :globals
# @return [String] the contents of the file to be parsed
# @since 0.7.0
attr_reader :contents
# @overload initialize(parser_type = SourceParser.parser_type, globals = nil)
# Creates a new parser object for code parsing with a specific parser type.
#
# @param [Symbol] parser_type the parser type to use
# @param [OpenStruct] globals global state to be re-used across separate source files
def initialize(parser_type = SourceParser.parser_type, globals1 = nil, globals2 = nil)
globals = [true, false].include?(globals1) ? globals2 : globals1
@file = '(stdin)'
@globals = globals || OpenStruct.new
self.parser_type = parser_type
end
# The main parser method. This should not be called directly. Instead,
# use the class methods {parse} and {parse_string}.
#
# @param [String, #read, Object] content the source file to parse
# @return [Object, nil] the parser object used to parse the source
def parse(content = __FILE__)
case content
when String
@file = File.cleanpath(content)
content = convert_encoding(File.read_binary(file))
checksum = Registry.checksum_for(content)
return if Registry.checksums[file] == checksum
if Registry.checksums.has_key?(file)
log.info "File '#{file}' was modified, re-processing..."
end
Registry.checksums[@file] = checksum
self.parser_type = parser_type_for_filename(file)
else
content = content.read if content.respond_to? :read
end
@contents = content
@parser = parser_class.new(content, file)
self.class.before_parse_file_callbacks.each do |cb|
return @parser if cb.call(self) == false
end
@parser.parse
post_process
self.class.after_parse_file_callbacks.each do |cb|
cb.call(self)
end
@parser
rescue ArgumentError, NotImplementedError => e
log.warn("Cannot parse `#{file}': #{e.message}")
log.backtrace(e, :warn)
rescue ParserSyntaxError => e
log.warn(e.message.capitalize)
log.backtrace(e, :warn)
end
# Tokenizes but does not parse the block of code using the current {#parser_type}
#
# @param [String] content the block of code to tokenize
# @return [Array] a list of tokens
def tokenize(content)
@parser = parser_class.new(content, file)
@parser.tokenize
end
private
# Searches for encoding line and forces encoding
# @since 0.5.3
def convert_encoding(content)
return content unless content.respond_to?(:force_encoding)
if content =~ ENCODING_LINE
content.force_encoding($1)
else
content.force_encoding('binary')
ENCODING_BYTE_ORDER_MARKS.each do |encoding, bom|
bom.force_encoding('binary')
if content[0,bom.size] == bom
content.force_encoding(encoding)
return content
end
end
content.force_encoding('utf-8') # UTF-8 is default encoding
content
end
end
# Runs a {Handlers::Processor} object to post process the parsed statements.
# @return [void]
def post_process
return unless @parser.respond_to? :enumerator
return unless enumerator = @parser.enumerator
post = Handlers::Processor.new(self)
post.process(enumerator)
end
def parser_type=(value)
@parser_type = self.class.validated_parser_type(value)
end
# Guesses the parser type to use depending on the file extension.
#
# @param [String] filename the filename to use to guess the parser type
# @return [Symbol] a parser type that matches the filename
def parser_type_for_filename(filename)
ext = (File.extname(filename)[1..-1] || "").downcase
type = self.class.parser_type_for_extension(ext)
parser_type == :ruby18 && type == :ruby ? :ruby18 : type
end
# @since 0.5.6
def parser_class
klass = self.class.parser_types[parser_type]
raise ArgumentError, "invalid parser type '#{parser_type}' or unrecognized file", caller[1..-1] if !klass
klass
end
end
end
end
|
# encoding: utf-8
namespace :cartodb do
desc "Adapt max_import_file_size according to disk quota"
task :setup_max_import_file_size_based_on_disk_quota => :environment do
mid_size = 500*1024*1024
big_size = 1000*1024*1024
User.all.each do |user|
quota_in_mb = user.quota_in_bytes/1024/1024
if quota_in_mb >= 450 && quota_in_mb < 1500
user.max_import_file_size = mid_size
user.save
print "M"
elsif quota_in_mb >= 1500
user.max_import_file_size = big_size
user.save
print "B"
else
print "."
end
end
puts "\n"
end
desc "Adapt max_import_table_row_count according to disk quota"
task :setup_max_import_table_row_count_based_on_disk_quota => :environment do
mid_count = 1000000
big_count = 5000000
User.all.each do |user|
quota_in_mb = user.quota_in_bytes/1024/1024
if quota_in_mb >= 50 && quota_in_mb < 1000
user.max_import_table_row_count = mid_count
user.save
print "M"
elsif quota_in_mb >= 1000
user.max_import_table_row_count = big_count
user.save
print "B"
else
print "."
end
end
puts "\n"
end
desc "Increase limits for twitter import users"
task :increase_limits_for_twitter_import_users => :environment do
file_size_quota = 1500*1024*1024
row_count_quota = 5000000
User.where(twitter_datasource_enabled: true).each do |user|
# Only increase, don't decrease
user.max_import_file_size = file_size_quota if file_size_quota > user.max_import_file_size
user.max_import_table_row_count = row_count_quota if row_count_quota > user.max_import_table_row_count
user.save
puts "#{user.username}"
end
end
end
rake ready
# encoding: utf-8
namespace :cartodb do
desc "Adapt max_import_file_size according to disk quota"
task :setup_max_import_file_size_based_on_disk_quota => :environment do
mid_size = 500*1024*1024
big_size = 1000*1024*1024
User.all.each do |user|
quota_in_mb = user.quota_in_bytes/1024/1024
if quota_in_mb >= 450 && quota_in_mb < 1500
user.max_import_file_size = mid_size
user.save
print "M"
elsif quota_in_mb >= 1500
user.max_import_file_size = big_size
user.save
print "B"
else
print "."
end
end
puts "\n"
end
desc "Adapt max_import_table_row_count according to disk quota"
task :setup_max_import_table_row_count_based_on_disk_quota => :environment do
mid_count = 1000000
big_count = 5000000
User.all.each do |user|
quota_in_mb = user.quota_in_bytes/1024/1024
if quota_in_mb >= 50 && quota_in_mb < 1000
user.max_import_table_row_count = mid_count
user.save
print "M"
elsif quota_in_mb >= 1000
user.max_import_table_row_count = big_count
user.save
print "B"
else
print "."
end
end
puts "\n"
end
desc "Increase limits for twitter import users"
task :increase_limits_for_twitter_import_users => :environment do
file_size_quota = 1500*1024*1024
row_count_quota = 5000000
User.where(twitter_datasource_enabled: true).each do |user|
# Only increase, don't decrease
user.max_import_file_size = file_size_quota if file_size_quota > user.max_import_file_size
user.max_import_table_row_count = row_count_quota if row_count_quota > user.max_import_table_row_count
user.save
puts "#{user.username}"
end
end
desc "Set custom platform limits for a user"
task :set_custom_limits_for_user, [:username, :import_file_size, :table_row_count, :concurrent_imports] => :environment do |task_name, args|
raise "Invalid username supplied" if args[:username].nil?
raise "Invalid import size" if args[:import_file_size].nil? || args[:import_file_size].to_i <= 0
raise "Invalid tabel row count" if args[:table_row_count].nil? || args[:table_row_count].to_i <= 0
raise "Invalid concurrent imports" if args[:concurrent_imports].nil? || args[:concurrent_imports].to_i <= 0
user = User.where(username: args[:username]).first
raise "User not found" if user.nil?
user.max_import_file_size = args[:import_file_size].to_i
user.max_import_table_row_count = args[:table_row_count].to_i
user.max_concurrent_import_count = args[:concurrent_imports].to_i
user.save
end
end
|
namespace :site do
desc "Set up new sites on AWS"
task :set_up_publishing => :environment do
start_time = Time.zone.now
Site.active.in_hour(6).each do |site|
target_date = site.time_zone_obj.today - 1.day
unless site.ready_to_publish?
puts "Setting up #{site.name} at #{site.host_url} for the first time..."
site.prepare_for_publishing!
unless site.ready_to_publish?
site.active = false
site.save
end
end
end
end_time = Time.zone.now
elapsed = (end_time - start_time).to_i
puts "Finished in #{elapsed} seconds."
end
desc "Publish a day of JSON and HTML for each site (once per day)"
task :publish_site_files => :environment do
start_time = Time.zone.now
Site.active.in_hour(8).each do |site|
target_date = site.time_zone_obj.today - 1.day
next unless site.ready_to_publish?
puts "Calculating the rank for all tweets..."
site.set_tweet_ranks!(target_date)
puts "Writing tweet summary JSON files..."
site.delay.write_final_metrics_for(target_date)
puts "Publishing all HTML files..."
app = ActionDispatch::Integration::Session.new(Rails.application)
site.html_files_to_publish_for(target_date).each do |file|
puts " #{file[:filename]}..."
app.get(file[:route])
if app.response.success?
site.s3_bucket.objects[file[:filename]].write(app.response.body)
puts " ...written."
else
raise "Can't get HTML from #{file[:route]}. (#{app.response.message})"
end
end
puts "Writing asset files..."
Dir.chdir(Rails.root.join('public'))
Dir.glob('assets/**/*.*').each do |asset_file|
puts " #{asset_file}..."
site.s3_bucket.objects[asset_file].write(:file => asset_file)
end
end
end_time = Time.zone.now
elapsed = (end_time - start_time).to_i
puts "Finished in #{elapsed} seconds."
end
end
Give sites another hour to finish metrics before publishing.
namespace :site do
desc "Set up new sites on AWS"
task :set_up_publishing => :environment do
start_time = Time.zone.now
Site.active.in_hour(6).each do |site|
target_date = site.time_zone_obj.today - 1.day
unless site.ready_to_publish?
puts "Setting up #{site.name} at #{site.host_url} for the first time..."
site.prepare_for_publishing!
unless site.ready_to_publish?
site.active = false
site.save
end
end
end
end_time = Time.zone.now
elapsed = (end_time - start_time).to_i
puts "Finished in #{elapsed} seconds."
end
desc "Publish a day of JSON and HTML for each site (once per day)"
task :publish_site_files => :environment do
start_time = Time.zone.now
Site.active.in_hour(9).each do |site|
target_date = site.time_zone_obj.today - 1.day
next unless site.ready_to_publish?
puts "Calculating the rank for all tweets..."
site.set_tweet_ranks!(target_date)
puts "Writing tweet summary JSON files..."
site.delay.write_final_metrics_for(target_date)
puts "Publishing all HTML files..."
app = ActionDispatch::Integration::Session.new(Rails.application)
site.html_files_to_publish_for(target_date).each do |file|
puts " #{file[:filename]}..."
app.get(file[:route])
if app.response.success?
site.s3_bucket.objects[file[:filename]].write(app.response.body)
puts " ...written."
else
raise "Can't get HTML from #{file[:route]}. (#{app.response.message})"
end
end
puts "Writing asset files..."
Dir.chdir(Rails.root.join('public'))
Dir.glob('assets/**/*.*').each do |asset_file|
puts " #{asset_file}..."
site.s3_bucket.objects[asset_file].write(:file => asset_file)
end
end
end_time = Time.zone.now
elapsed = (end_time - start_time).to_i
puts "Finished in #{elapsed} seconds."
end
end
|
require 'socket'
require 'versionomy'
namespace :test do
desc "Run javascript tests"
task :javascript => :environment do
phantomjs_version = Versionomy.parse(`phantomjs --version`.strip) rescue nil
unless phantomjs_version && (phantomjs_version >= Versionomy.parse("1.3.0"))
STDERR.puts "The version of phantomjs (v#{phantomjs_version}) is not compatible with the current phantom-driver.js."
STDERR.puts "Please upgrade your version of phantomjs and re-run this task."
exit 1
end
test_port = 3100
pid_file = Rails.root.join('tmp', 'pids', 'javascript_tests.pid')
if File.exists?(pid_file)
STDERR.puts "It looks like the javascript test server is running with pid #{File.read(pid_file)}."
STDERR.puts "Please kill the server, remove the pid file from #{pid_file} and re-run this task:"
STDERR.puts " $ kill -KILL `cat #{pid_file}` && rm #{pid_file}"
exit 1
end
puts "Starting the test server on port #{test_port}"
`cd #{Rails.root} && script/rails server -p #{test_port} --daemon --environment=test --pid=#{pid_file}`
puts "Waiting for the server to come up"
not_connected = true
while (not_connected) do
begin
TCPSocket.new("127.0.0.1", test_port)
not_connected = false
puts "Server is up and ready"
rescue Errno::ECONNREFUSED
sleep 1
end
end
runner = "http://127.0.0.1:#{test_port}/test/qunit"
phantom_driver = Rails.root.join('test', 'javascripts', 'support', 'phantom-driver.js')
command = "phantomjs #{phantom_driver} #{runner}"
# linux needs to run phantom through windowing server
# apt-get install xvfb
if RUBY_PLATFORM =~ /linux/
command = "xvfb-run " + command
end
IO.popen(command) do |test|
puts test.read
end
# grab the exit status of phantomjs
# this will be the result of the tests
# it is important to grab it before we
# exit the server otherwise $? will be overwritten.
test_result = $?.exitstatus
puts "Stopping the server"
if File.exist?(pid_file)
`kill -KILL #{File.read(pid_file)}`
`rm #{pid_file}`
end
exit test_result
end
end
task :default => "test:javascript"
Clarify the required version of phantomjs.
require 'socket'
require 'versionomy'
namespace :test do
desc "Run javascript tests"
task :javascript => :environment do
minimum_supported_version = Versionomy.parse("1.3.0")
phantomjs_version = Versionomy.parse(`phantomjs --version`.strip) rescue nil
unless phantomjs_version && (phantomjs_version >= minimum_supported_version)
STDERR.puts "Your version of phantomjs (v#{phantomjs_version}) is not compatible with the current phantom-driver.js."
STDERR.puts "Please upgrade your version of phantomjs to at least #{minimum_supported_version} and re-run this task."
exit 1
end
test_port = 3100
pid_file = Rails.root.join('tmp', 'pids', 'javascript_tests.pid')
if File.exists?(pid_file)
STDERR.puts "It looks like the javascript test server is running with pid #{File.read(pid_file)}."
STDERR.puts "Please kill the server, remove the pid file from #{pid_file} and re-run this task:"
STDERR.puts " $ kill -KILL `cat #{pid_file}` && rm #{pid_file}"
exit 1
end
puts "Starting the test server on port #{test_port}"
`cd #{Rails.root} && script/rails server -p #{test_port} --daemon --environment=test --pid=#{pid_file}`
puts "Waiting for the server to come up"
not_connected = true
while (not_connected) do
begin
TCPSocket.new("127.0.0.1", test_port)
not_connected = false
puts "Server is up and ready"
rescue Errno::ECONNREFUSED
sleep 1
end
end
runner = "http://127.0.0.1:#{test_port}/test/qunit"
phantom_driver = Rails.root.join('test', 'javascripts', 'support', 'phantom-driver.js')
command = "phantomjs #{phantom_driver} #{runner}"
# linux needs to run phantom through windowing server
# apt-get install xvfb
if RUBY_PLATFORM =~ /linux/
command = "xvfb-run " + command
end
IO.popen(command) do |test|
puts test.read
end
# grab the exit status of phantomjs
# this will be the result of the tests
# it is important to grab it before we
# exit the server otherwise $? will be overwritten.
test_result = $?.exitstatus
puts "Stopping the server"
if File.exist?(pid_file)
`kill -KILL #{File.read(pid_file)}`
`rm #{pid_file}`
end
exit test_result
end
end
task :default => "test:javascript" |
require "rails"
module Thincloud
module Resque
# Public: Thincloud Resque Engine
class Engine < ::Rails::Engine
# convenience method for engine options / configuration
def configuration
Thincloud::Resque.configuration
end
# initialize the configuration so it is available during rails init
ActiveSupport.on_load :before_configuration do
app_name = Rails.application.class.name.deconstantize.underscore
rails_env = Rails.env || "development"
unless config.respond_to? :thincloud
config.thincloud = ActiveSupport::OrderedOptions.new
end
config.thincloud.resque ||= Thincloud::Resque.configure do |c|
c.redis_namespace = "resque:#{app_name}:#{rails_env}"
end
end
rake_tasks { require "resque/tasks" }
initializer "thincloud.resque.environment" do
require "redis"
require "resque"
::Resque.redis = ::Redis.new({
url: configuration.redis_url,
driver: configuration.redis_driver
})
::Resque.redis.namespace = configuration.redis_namespace
end
initializer "thincloud.resque.server" do
require "resque/server"
require "resque-cleaner"
# use http basic auth for resque-web
::Resque::Server.use ::Rack::Auth::Basic do |username, password|
username == configuration.web_username &&
password == configuration.web_password
end
::Resque::Server.set :show_exceptions, true
# set the Resque::Server sinatra app as the endpoint for this engine
self.class.endpoint ::Resque::Server
end
initializer "thincloud.resque.mailer", after: "finisher_hook" do
if configuration.mailer_environments.include?(Rails.env.to_sym)
require "resque_mailer"
# We manage the environments by only including Resque::Mailer for
# explicit environments (#mailer_environments)
::Resque::Mailer.excluded_environments = []
# Make sure that Resque::Mailer ends up at the correct place
# in the inheritance chain
ActiveSupport.on_load :action_mailer do
def self.inherited(subclass)
# Devise::Mailer does not play nicely with Resque::Mailer
unless subclass.name == "Devise::Mailer"
subclass.send :include, ::Resque::Mailer
super
end
end
end
end
end
end
end
end
Mv `super` call to always return in self.inherited
* This commit moves the return value of thincloud.resque.mailer#self.inherited
outside of the unless block, ensuring that the subclass will always be passed
through.
* The reason for this commit was noticed in a client application where devise is
used for automated password reset via email. Errors of the type below were
observed:
```
'undefined method `main_app' for #<Class ...>
```
require "rails"
module Thincloud
module Resque
# Public: Thincloud Resque Engine
class Engine < ::Rails::Engine
# convenience method for engine options / configuration
def configuration
Thincloud::Resque.configuration
end
# initialize the configuration so it is available during rails init
ActiveSupport.on_load :before_configuration do
app_name = Rails.application.class.name.deconstantize.underscore
rails_env = Rails.env || "development"
unless config.respond_to? :thincloud
config.thincloud = ActiveSupport::OrderedOptions.new
end
config.thincloud.resque ||= Thincloud::Resque.configure do |c|
c.redis_namespace = "resque:#{app_name}:#{rails_env}"
end
end
rake_tasks { require "resque/tasks" }
initializer "thincloud.resque.environment" do
require "redis"
require "resque"
::Resque.redis = ::Redis.new({
url: configuration.redis_url,
driver: configuration.redis_driver
})
::Resque.redis.namespace = configuration.redis_namespace
end
initializer "thincloud.resque.server" do
require "resque/server"
require "resque-cleaner"
# use http basic auth for resque-web
::Resque::Server.use ::Rack::Auth::Basic do |username, password|
username == configuration.web_username &&
password == configuration.web_password
end
::Resque::Server.set :show_exceptions, true
# set the Resque::Server sinatra app as the endpoint for this engine
self.class.endpoint ::Resque::Server
end
initializer "thincloud.resque.mailer", after: "finisher_hook" do
if configuration.mailer_environments.include?(Rails.env.to_sym)
require "resque_mailer"
# We manage the environments by only including Resque::Mailer for
# explicit environments (#mailer_environments)
::Resque::Mailer.excluded_environments = []
# Make sure that Resque::Mailer ends up at the correct place
# in the inheritance chain
ActiveSupport.on_load :action_mailer do
def self.inherited(subclass)
# Devise::Mailer does not play nicely with Resque::Mailer
unless subclass.name == "Devise::Mailer"
subclass.send :include, ::Resque::Mailer
end
super
end
end
end
end
end
end
end
|
[WIP] server
require 'sinatra/base'
module Tmbundle
module Manager
class Server < Sinatra::Base
get '/api/v1/bundles' do
[
{
name: 'elia/avian-missing',
git: 'https://github.com/elia/avian-missing.git'
},
{
name: 'elia/markdown-redcarpet',
git: 'https://github.com/elia/markdown-redcarpet.git'
},
].to_yaml
end
end
end
end
|
module Tori
module Backend
class FileSystem
attr_accessor :root
def initialize(root)
@root = root
FileUtils.mkdir_p @root.to_s
end
def write(filename, resource)
case resource
when String
::File.open(path(filename), 'w'){ |f| f.write resource }
# see also https://bugs.ruby-lang.org/issues/11199
when Pathname
::IO.copy_stream resource.to_s, path(filename)
else
::IO.copy_stream resource, path(filename)
end
end
def delete(filename)
::File.unlink path(filename)
end
def exist?(filename)
::File.exist? path(filename)
end
alias exists? exist?
def read(filename)
::File.read path(filename)
end
def path(filename)
@root.join filename.to_s
end
end
end
end
Avoid Encoding::UndefinedConversionError
when ruby2.3.0dev
module Tori
module Backend
class FileSystem
attr_accessor :root
def initialize(root)
@root = root
FileUtils.mkdir_p @root.to_s
end
def write(filename, resource)
case resource
when String
::File.open(path(filename), 'w'){ |f| f.write resource }
when Pathname
# see also https://bugs.ruby-lang.org/issues/11199
::File.open(resource) { |src|
::File.open(path(filename), 'w'){ |dst|
::IO.copy_stream src, dst
}
}
else
::IO.copy_stream resource, path(filename)
end
end
def delete(filename)
::File.unlink path(filename)
end
def exist?(filename)
::File.exist? path(filename)
end
alias exists? exist?
def read(filename)
::File.read path(filename)
end
def path(filename)
@root.join filename.to_s
end
end
end
end
|
module TransamFunding
VERSION = "0.0.52"
end
Bump version
module TransamFunding
VERSION = "0.0.53"
end
|
module TransamTransit
VERSION = "2.12.0-rc.1"
end
Update version.
module TransamTransit
VERSION = "2.12.0"
end
|
module TransamTransit
VERSION = "0.1.96"
end
Bump version
module TransamTransit
VERSION = "0.1.97"
end
|
module Typus
module Translate
module ClassMethods
def typus_translate(*args)
return if typus_translates?
cattr_accessor :typus_translate_options
self.typus_translate_options ||= args.extract_options!
self.typus_translate_options[:fallbacks_for_empty_translations] = Typus::Translate::Configuration.config['fallbacks_for_empty_translations'] if self.typus_translate_options[:fallbacks_for_empty_translations].nil? && !Typus::Translate::Configuration.config['fallbacks_for_empty_translations'].nil?
args << self.typus_translate_options
translates *args
accepts_nested_attributes_for :translations
self::Translation.attr_accessible :locale
extend TemplateMethods
end
def typus_translates?
included_modules.include?(TemplateMethods)
end
end
module TemplateMethods
def typus_template(attribute)
if self.translated_attribute_names.include? attribute.to_sym
'translate'
else
super(attribute)
end
end
end
end
end
fix support for mass assignment security
module Typus
module Translate
module ClassMethods
def typus_translate(*args)
return if typus_translates?
cattr_accessor :typus_translate_options
self.typus_translate_options ||= args.extract_options!
self.typus_translate_options[:fallbacks_for_empty_translations] = Typus::Translate::Configuration.config['fallbacks_for_empty_translations'] if self.typus_translate_options[:fallbacks_for_empty_translations].nil? && !Typus::Translate::Configuration.config['fallbacks_for_empty_translations'].nil?
fields = args
as = self.typus_translate_options.delete(:as) || :admin
args << self.typus_translate_options
translates *args
extend TemplateMethods
#Support for rails whitelist mode
unless accessible_attributes.empty?
attr_accessible :translations_attributes, :as => as
self::Translation.attr_accessible :locale, :as => as
fields.each do |f|
self::Translation.attr_accessible f, :as => as
end
end
accepts_nested_attributes_for :translations
end
def typus_translates?
included_modules.include?(TemplateMethods)
end
end
module TemplateMethods
def typus_template(attribute)
if self.translated_attribute_names.include? attribute.to_sym
'translate'
else
super(attribute)
end
end
end
end
end
|
module Travis
module Build
module Assertions
class AssertionFailed < RuntimeError
attr_reader :object, :method
def initialize(object, method)
@object = object
@method = method
end
end
def new
super.tap do |instance|
(class << instance; self; end).send(:include, assertions)
end
end
def assertions
@assertions ||= Module.new
end
def assert(name)
assertions.send(:define_method, name) do |*args|
super(*args).tap do |result|
raise AssertionFailed.new(self, name) unless result
end
end
end
end
end
end
move the exception outside of the module
module Travis
module Build
class AssertionFailed < RuntimeError
attr_reader :object, :method
def initialize(object, method)
@object = object
@method = method
end
end
module Assertions
def new(*args)
super(*args).tap do |instance|
(class << instance; self; end).send(:include, assertions)
end
end
def assertions
@assertions ||= Module.new
end
def assert(name)
assertions.send(:define_method, name) do |*args|
super(*args).tap do |result|
raise AssertionFailed.new(self, name) unless result
end
end
end
end
end
end
|
module Travis
module Build
class Script
module RVM
USER_DB = %w[
rvm_remote_server_url3=https://s3.amazonaws.com/travis-rubies
rvm_remote_server_path3=binary
rvm_remote_server_verify_downloads3=1
].join("\n")
def cache_slug
super << "--rvm-" << ruby_version.to_s
end
def export
super
set 'TRAVIS_RUBY_VERSION', config[:rvm], echo: false
end
def setup
super
cmd "echo '#{USER_DB}' > $rvm_path/user/db", echo: false
if ruby_version =~ /ruby-head/
fold("rvm.1") do
cmd 'echo -e "\033[33;1mSetting up latest %s\033[0m"' % ruby_version, assert: false, echo: false
cmd "rvm get stable", assert: false if ruby_version == 'jruby-head'
cmd "export ruby_alias=`rvm alias show #{ruby_version} 2>/dev/null`", assert: false
cmd "rvm alias delete #{ruby_version}", assert: false
cmd "rvm remove ${ruby_alias:-#{ruby_version}} --gems", assert: false
cmd "rvm remove #{ruby_version} --gems --fuzzy", assert: false
cmd "rvm install #{ruby_version} --binary"
end
cmd "rvm use #{ruby_version}"
else
cmd "rvm use #{ruby_version} --install --binary --fuzzy"
end
end
def announce
super
cmd 'ruby --version'
cmd 'rvm --version'
end
private
def ruby_version
config[:rvm].to_s.
gsub(/-(1[89]|2[01])mode$/, '-d\1')
end
end
end
end
end
multi os support - this way mac vms will not download a ruby compiled for ubuntu
module Travis
module Build
class Script
module RVM
USER_DB = %w[
rvm_remote_server_url3=https://s3.amazonaws.com/travis-rubies/binaries
rvm_remote_server_type3=rubies
rvm_remote_server_verify_downloads3=1
].join("\n")
def cache_slug
super << "--rvm-" << ruby_version.to_s
end
def export
super
set 'TRAVIS_RUBY_VERSION', config[:rvm], echo: false
end
def setup
super
cmd "echo '#{USER_DB}' > $rvm_path/user/db", echo: false
if ruby_version =~ /ruby-head/
fold("rvm.1") do
cmd 'echo -e "\033[33;1mSetting up latest %s\033[0m"' % ruby_version, assert: false, echo: false
cmd "rvm get stable", assert: false if ruby_version == 'jruby-head'
cmd "export ruby_alias=`rvm alias show #{ruby_version} 2>/dev/null`", assert: false
cmd "rvm alias delete #{ruby_version}", assert: false
cmd "rvm remove ${ruby_alias:-#{ruby_version}} --gems", assert: false
cmd "rvm remove #{ruby_version} --gems --fuzzy", assert: false
cmd "rvm install #{ruby_version} --binary"
end
cmd "rvm use #{ruby_version}"
else
cmd "rvm use #{ruby_version} --install --binary --fuzzy"
end
end
def announce
super
cmd 'ruby --version'
cmd 'rvm --version'
end
private
def ruby_version
config[:rvm].to_s.
gsub(/-(1[89]|2[01])mode$/, '-d\1')
end
end
end
end
end
|
class MatrixValidator
def initialize(matrix)
@matrix = matrix
end
def validate
result = { success: true, messages: [] }
validate_not_empty(result)
return result if !result[:success]
validate_shape(result)
validate_data_type(result)
result
end
def validate_data_type(result)
single_capital_alpha_char = /[A-Z]/
flattened_matrix = @matrix.flatten
non_cap_alpha_chars = flattened_matrix.select {|cell| cell !~ single_capital_alpha_char }
non_single_chars = flattened_matrix.select {|cell| cell.size > 1}
invalid_data = !non_cap_alpha_chars.empty? || !non_single_chars.empty?
if invalid_data
result[:success] = false
result[:messages] << 'Every member of the matrix must be a single uppercase English letter.'
end
result
end
def validate_not_empty(result)
empty_matrix = false
if @matrix.empty? || @matrix.first.empty?
empty_matrix = true
result[:success] = false
end
result[:messages] << 'Matrix must not be empty.' if empty_matrix
result
end
def validate_shape(result)
invalid_shape = false
num_cells = @matrix.first.size
@matrix.each do |row|
if row.size != num_cells
result[:success] = false
invalid_shape = true
end
end
result[:messages] << 'All rows in the matrix must contain the same number of elements.' if invalid_shape
end
end
flip negative expression
class MatrixValidator
def initialize(matrix)
@matrix = matrix
end
def validate
result = { success: true, messages: [] }
validate_not_empty(result)
return result unless result[:success]
validate_shape(result)
validate_data_type(result)
result
end
def validate_data_type(result)
single_capital_alpha_char = /[A-Z]/
flattened_matrix = @matrix.flatten
non_cap_alpha_chars = flattened_matrix.select {|cell| cell !~ single_capital_alpha_char }
non_single_chars = flattened_matrix.select {|cell| cell.size > 1}
invalid_data = !non_cap_alpha_chars.empty? || !non_single_chars.empty?
if invalid_data
result[:success] = false
result[:messages] << 'Every member of the matrix must be a single uppercase English letter.'
end
result
end
def validate_not_empty(result)
empty_matrix = false
if @matrix.empty? || @matrix.first.empty?
empty_matrix = true
result[:success] = false
end
result[:messages] << 'Matrix must not be empty.' if empty_matrix
result
end
def validate_shape(result)
invalid_shape = false
num_cells = @matrix.first.size
@matrix.each do |row|
if row.size != num_cells
result[:success] = false
invalid_shape = true
end
end
result[:messages] << 'All rows in the matrix must contain the same number of elements.' if invalid_shape
end
end |
module USaidWat
module CLI
class PostFormatter < BaseFormatter
def format(post)
cols = tty.width
out = StringIO.new
out.write("\n\n\n") unless @count == 0
out.write("#{post.subreddit}\n".color(:green))
out.write("#{post_link(post)}\n".color(:yellow))
out.write("#{post.title.strip.unescape_html.truncate(cols)}\n".color(:magenta))
out.write("#{post_date(post)}".color(:blue))
out.write("\n#{post.url}") unless post.url.end_with?(post.permalink)
@count += 1
out.rewind
out.read
end
private
def post_link(post)
"https://www.reddit.com#{post.permalink.split('/')[0..-2].join('/')}"
end
def post_date(post)
post.created_utc.ago
end
end
class CompactPostFormatter < BaseFormatter
def format(post)
cols = tty.width
out = StringIO.new
subreddit = post.subreddit
cols -= subreddit.length + 1
title = post.title.strip.unescape_html.truncate(cols)
out.write(subreddit.color(:green))
out.write(" #{title}\n")
out.rewind
out.read
end
end
end
end
Don't truncate post titles
Since post parts are color-coded now, this doesn't seem to be necessary.
module USaidWat
module CLI
class PostFormatter < BaseFormatter
def format(post)
cols = tty.width
out = StringIO.new
out.write("\n\n\n") unless @count == 0
out.write("#{post.subreddit}\n".color(:green))
out.write("#{post_link(post)}\n".color(:yellow))
out.write("#{post.title.strip.unescape_html}\n".color(:magenta))
out.write("#{post_date(post)}".color(:blue))
out.write("\n#{post.url}") unless post.url.end_with?(post.permalink)
@count += 1
out.rewind
out.read
end
private
def post_link(post)
"https://www.reddit.com#{post.permalink.split('/')[0..-2].join('/')}"
end
def post_date(post)
post.created_utc.ago
end
end
class CompactPostFormatter < BaseFormatter
def format(post)
cols = tty.width
out = StringIO.new
subreddit = post.subreddit
cols -= subreddit.length + 1
title = post.title.strip.unescape_html.truncate(cols)
out.write(subreddit.color(:green))
out.write(" #{title}\n")
out.rewind
out.read
end
end
end
end
|
module VagrantPlugins
module Proxmox
VERSION = '0.0.9'
end
end
bump version 0.0.10
module VagrantPlugins
module Proxmox
VERSION = '0.0.10'
end
end
|
module Virtus
module InstanceMethods
# Chains Class.new to be able to set attributes during initialization of
# an object.
#
# @param [Hash] attributes
# the attributes hash to be set
#
# @return [Object]
#
# @api private
def initialize(attributes = {})
self.attributes = attributes
end
# Returns a value of the attribute with the given name
#
# @param [Symbol] name
# a name of an attribute
#
# @return [Object]
# a value of an attribute
#
# @api public
def attribute_get(name)
__send__(name)
end
# Sets a value of the attribute with the given name
#
# @param [Symbol] name
# a name of an attribute
#
# @param [Object] value
# a value to be set
#
# @return [Object]
# the value set on an object
#
# @api public
def attribute_set(name, value)
__send__("#{name}=", value)
end
# Mass-assign of attribute values
#
# @param [Hash] attributes
# a hash of attribute values to be set on an object
#
# @return [Hash]
# the attributes
#
# @api public
def attributes=(attributes)
attributes.each do |name, value|
writer_name = "#{name}="
__send__(writer_name, value) if respond_to?(writer_name)
end
end
# Returns a hash of all publicly accessible attributes
#
# @return [Hash]
# the attributes
#
# @api public
def attributes
attributes = {}
self.class.attributes.each_key do |name|
attributes[name] = __send__(name) if respond_to?(name)
end
attributes
end
end # InstanceMethods
end # Virtus
Use the existing #attribute_get and #attribute_set methods
module Virtus
module InstanceMethods
# Chains Class.new to be able to set attributes during initialization of
# an object.
#
# @param [Hash] attributes
# the attributes hash to be set
#
# @return [Object]
#
# @api private
def initialize(attributes = {})
self.attributes = attributes
end
# Returns a value of the attribute with the given name
#
# @param [Symbol] name
# a name of an attribute
#
# @return [Object]
# a value of an attribute
#
# @api public
def attribute_get(name)
__send__(name)
end
# Sets a value of the attribute with the given name
#
# @param [Symbol] name
# a name of an attribute
#
# @param [Object] value
# a value to be set
#
# @return [Object]
# the value set on an object
#
# @api public
def attribute_set(name, value)
__send__("#{name}=", value)
end
# Mass-assign of attribute values
#
# @param [Hash] attributes
# a hash of attribute values to be set on an object
#
# @return [Hash]
# the attributes
#
# @api public
def attributes=(attributes)
attributes.each do |name, value|
attribute_set(name, value) if respond_to?("#{name}=")
end
end
# Returns a hash of all publicly accessible attributes
#
# @return [Hash]
# the attributes
#
# @api public
def attributes
attributes = {}
self.class.attributes.each_key do |name|
attributes[name] = attribute_get(name) if respond_to?(name)
end
attributes
end
end # InstanceMethods
end # Virtus
|
module Watir
VERSION = "0.5.7"
end
Bump to version 0.5.8
module Watir
VERSION = "0.5.8"
end
|
module WebDriverUtils
VERSION = '0.0.2' unless defined? ::WebDriverUtils::VERSION
DATE = '2015-05-10' unless defined? ::WebDriverUtils::DATE
end
Bump version to 0.0.3
module WebDriverUtils
VERSION = '0.0.3' unless defined? ::WebDriverUtils::VERSION
DATE = '2015-05-12' unless defined? ::WebDriverUtils::DATE
end
|
require 'jsonclient'
##
# Access Token 是 SNS 统一登录访问令牌的封装类。
# 不同于 Wechat::Common::AccessToken 。
class Wechat::SNS::AccessToken
extend Wechat::Core::Common
##
# 附:检验授权凭证(access_token)是否有效
# http://mp.weixin.qq.com/wiki/9/01f711493b5a02f24b04365ac5d8fd95.html#.E9.99.84.EF.BC.9A.E6.A3.80.E9.AA.8C.E6.8E.88.E6.9D.83.E5.87.AD.E8.AF.81.EF.BC.88access_token.EF.BC.89.E6.98.AF.E5.90.A6.E6.9C.89.E6.95.88
#
# Return hash format if success:
# {
# errcode: 0,
# errmsg: 'ok'
# }
#
# Return hash format if failure:
# {
# errcode: 40003,
# errmsg: 'invalid openid'
# }
def self.load(access_token, opend_id)
assert_present! :access_token, access_token
assert_present! :opend_id, opend_id
#raise ArgumentError.new('The access_token argument is required.') if access_token.blank?
get_json 'https://api.weixin.qq.com/sns/auth', body:
{
access_token: access_token,
openid: opend_id
}
end
# 第三步:刷新access_token(如果需要)
# http://mp.weixin.qq.com/wiki/9/01f711493b5a02f24b04365ac5d8fd95.html#.E7.AC.AC.E4.B8.89.E6.AD.A5.EF.BC.9A.E5.88.B7.E6.96.B0access_token.EF.BC.88.E5.A6.82.E6.9E.9C.E9.9C.80.E8.A6.81.EF.BC.89
#
# Return hash format if success:
# {
# access_token: <ACCESS_TOKEN>, # 网页授权接口调用凭证,注意:此access_token与基础支持的access_token不同
# expires_in: 7200, # access_token接口调用凭证超时时间,单位(秒)
# refresh_token: <REFRESH_TOKEN>, # 用户刷新access_token
# openid: <OPEN_ID>, # 用户唯一标识,请注意,在未关注公众号时,用户访问公众号的网页,也会产生一个用户和公众号唯一的OpenID
# scope: <SCOPE> # 用户授权的作用域,使用逗号(,)分隔
# }
#
# Return hash format if failure:
# {
# errcode: <ERROR_CODE>,
# errmsg: <ERROR_MESSAGE>
# }
def self.update(app_id, refresh_token)
assert_present! :app_id, app_id
assert_present! :refresh_token, refresh_token
# raise ArgumentError.new('The app_id argument is required.') if app_id.blank?
get_json 'https://api.weixin.qq.com/sns/oauth2/refresh_token', body:
{
appid: app_id,
grant_type: 'refresh_token',
refresh_token: refresh_token
}
end
# 第二步:通过code换取网页授权access_token
# http://mp.weixin.qq.com/wiki/9/01f711493b5a02f24b04365ac5d8fd95.html#.E7.AC.AC.E4.BA.8C.E6.AD.A5.EF.BC.9A.E9.80.9A.E8.BF.87code.E6.8D.A2.E5.8F.96.E7.BD.91.E9.A1.B5.E6.8E.88.E6.9D.83access_token
#
# Return hash format if success:
# {
# access_token: <ACCESS_TOKEN>, # 网页授权接口调用凭证,注意:此access_token与基础支持的access_token不同
# expires_in: 7200, # access_token接口调用凭证超时时间,单位(秒)
# refresh_token: <REFRESH_TOKEN>, # 用户刷新access_token
# openid: <OPEN_ID>, # 用户唯一标识,请注意,在未关注公众号时,用户访问公众号的网页,也会产生一个用户和公众号唯一的OpenID
# scope: <SCOPE>, # 用户授权的作用域,使用逗号(,)分隔
# unionid: <UNION_ID> # 只有在用户将公众号绑定到微信开放平台帐号后,才会出现该字段。
# }
#
# Return hash format if failure:
# {
# errcode: <ERROR_CODE>,
# errmsg: <ERROR_MESSAGE>
# }
def self.create(app_id, app_secret, code, grant_type: 'authorization_code')
raise ArgumentError.new('The app_id argument is required.') if app_id.blank?
get_json 'https://api.weixin.qq.com/sns/oauth2/access_token', body:
{
appid: app_id,
secret: app_secret,
code: code,
grant_type: grant_type
}
end
end
1, Improve the Access Token wrapper class for the argument validations.
require 'jsonclient'
##
# Access Token 是 SNS 统一登录访问令牌的封装类。
# 不同于 Wechat::Common::AccessToken 。
class Wechat::SNS::AccessToken
extend Wechat::Core::Common
##
# 附:检验授权凭证(access_token)是否有效
# http://mp.weixin.qq.com/wiki/9/01f711493b5a02f24b04365ac5d8fd95.html#.E9.99.84.EF.BC.9A.E6.A3.80.E9.AA.8C.E6.8E.88.E6.9D.83.E5.87.AD.E8.AF.81.EF.BC.88access_token.EF.BC.89.E6.98.AF.E5.90.A6.E6.9C.89.E6.95.88
#
# Return hash format if success:
# {
# errcode: 0,
# errmsg: 'ok'
# }
#
# Return hash format if failure:
# {
# errcode: 40003,
# errmsg: 'invalid openid'
# }
def self.load(access_token, opend_id)
assert_present! :access_token, access_token
assert_present! :opend_id, opend_id
#raise ArgumentError.new('The access_token argument is required.') if access_token.blank?
get_json 'https://api.weixin.qq.com/sns/auth', body:
{
access_token: access_token,
openid: opend_id
}
end
# 第三步:刷新access_token(如果需要)
# http://mp.weixin.qq.com/wiki/9/01f711493b5a02f24b04365ac5d8fd95.html#.E7.AC.AC.E4.B8.89.E6.AD.A5.EF.BC.9A.E5.88.B7.E6.96.B0access_token.EF.BC.88.E5.A6.82.E6.9E.9C.E9.9C.80.E8.A6.81.EF.BC.89
#
# Return hash format if success:
# {
# access_token: <ACCESS_TOKEN>, # 网页授权接口调用凭证,注意:此access_token与基础支持的access_token不同
# expires_in: 7200, # access_token接口调用凭证超时时间,单位(秒)
# refresh_token: <REFRESH_TOKEN>, # 用户刷新access_token
# openid: <OPEN_ID>, # 用户唯一标识,请注意,在未关注公众号时,用户访问公众号的网页,也会产生一个用户和公众号唯一的OpenID
# scope: <SCOPE> # 用户授权的作用域,使用逗号(,)分隔
# }
#
# Return hash format if failure:
# {
# errcode: <ERROR_CODE>,
# errmsg: <ERROR_MESSAGE>
# }
def self.update(app_id, refresh_token)
assert_present! :app_id, app_id
assert_present! :refresh_token, refresh_token
# raise ArgumentError.new('The app_id argument is required.') if app_id.blank?
get_json 'https://api.weixin.qq.com/sns/oauth2/refresh_token', body:
{
appid: app_id,
grant_type: 'refresh_token',
refresh_token: refresh_token
}
end
# 第二步:通过code换取网页授权access_token
# http://mp.weixin.qq.com/wiki/9/01f711493b5a02f24b04365ac5d8fd95.html#.E7.AC.AC.E4.BA.8C.E6.AD.A5.EF.BC.9A.E9.80.9A.E8.BF.87code.E6.8D.A2.E5.8F.96.E7.BD.91.E9.A1.B5.E6.8E.88.E6.9D.83access_token
#
# Return hash format if success:
# {
# access_token: <ACCESS_TOKEN>, # 网页授权接口调用凭证,注意:此access_token与基础支持的access_token不同
# expires_in: 7200, # access_token接口调用凭证超时时间,单位(秒)
# refresh_token: <REFRESH_TOKEN>, # 用户刷新access_token
# openid: <OPEN_ID>, # 用户唯一标识,请注意,在未关注公众号时,用户访问公众号的网页,也会产生一个用户和公众号唯一的OpenID
# scope: <SCOPE>, # 用户授权的作用域,使用逗号(,)分隔
# unionid: <UNION_ID> # 只有在用户将公众号绑定到微信开放平台帐号后,才会出现该字段。
# }
#
# Return hash format if failure:
# {
# errcode: <ERROR_CODE>,
# errmsg: <ERROR_MESSAGE>
# }
def self.create(app_id, app_secret, code, grant_type: 'authorization_code')
assert_present! :app_id, app_id
get_json 'https://api.weixin.qq.com/sns/oauth2/access_token', body:
{
appid: app_id,
secret: app_secret,
code: code,
grant_type: grant_type
}
end
end
|
require 'wright/resource'
require 'wright/dsl'
module Wright
class Resource
# Package resource, represents a package.
#
# @example
# vim = Wright::Resource::Package.new('vim')
# vim.installed_versions
# # => []
# vim.install
# vim.installed_versions
# # => ["2:7.3.547-7"]
#
# htop = Wright::Resource::Package.new('htop')
# htop.installed_versions
# # => ["1.0.1-1"]
# htop.remove
# htop.installed_versions
# # => []
class Package < Wright::Resource
# @return [String] the package version to install or remove
attr_accessor :version
# Initializes a Package.
#
# @param name [String] the package's name
def initialize(name)
super
@version = nil
@action = :install
end
# @return [Array<String>] the installed package versions
def installed_versions
@provider.installed_versions
end
# Installs the Package.
#
# @return [Bool] true if the package was updated and false
# otherwise
def install
might_update_resource do
@provider.install
end
end
# Removes the Package.
#
# @return [Bool] true if the package was updated and false
# otherwise
def remove
might_update_resource do
@provider.remove
end
end
alias_method :uninstall, :remove
end
end
end
Wright::DSL.register_resource(Wright::Resource::Package)
Wright::Config[:resources][:package] ||= {}
case Wright::Util.os_family
when 'debian'
Wright::Config[:resources][:package][:provider] ||=
'Wright::Provider::Package::Apt'
when 'macosx'
Wright::Config[:resources][:package][:provider] ||=
'Wright::Provider::Package::Homebrew'
end
Simplify default package provider selection
require 'wright/resource'
require 'wright/dsl'
module Wright
class Resource
# Package resource, represents a package.
#
# @example
# vim = Wright::Resource::Package.new('vim')
# vim.installed_versions
# # => []
# vim.install
# vim.installed_versions
# # => ["2:7.3.547-7"]
#
# htop = Wright::Resource::Package.new('htop')
# htop.installed_versions
# # => ["1.0.1-1"]
# htop.remove
# htop.installed_versions
# # => []
class Package < Wright::Resource
# @return [String] the package version to install or remove
attr_accessor :version
# Initializes a Package.
#
# @param name [String] the package's name
def initialize(name)
super
@version = nil
@action = :install
end
# @return [Array<String>] the installed package versions
def installed_versions
@provider.installed_versions
end
# Installs the Package.
#
# @return [Bool] true if the package was updated and false
# otherwise
def install
might_update_resource do
@provider.install
end
end
# Removes the Package.
#
# @return [Bool] true if the package was updated and false
# otherwise
def remove
might_update_resource do
@provider.remove
end
end
alias_method :uninstall, :remove
end
end
end
Wright::DSL.register_resource(Wright::Resource::Package)
Wright::Config[:resources][:package] ||= {}
Wright::Config[:resources][:package][:provider] ||=
case Wright::Util.os_family
when 'debian'
'Wright::Provider::Package::Apt'
when 'macosx'
'Wright::Provider::Package::Homebrew'
end
|
module XcodeprojUtils
VERSION = "0.2.1"
end
Update version to 0.2.2
module XcodeprojUtils
VERSION = "0.2.2"
end
|
require File.dirname(__FILE__) + '/../../spec_helper'
require 'ostruct'
describe "OpenStruct#table" do
before(:each) do
@os = OpenStruct.new("age" => 20, "name" => "John")
end
it "is protected" do
@os.protected_methods.map {|m| m.to_s }.should include("table")
end
it "returns self's method/value table" do
@os.send(:table).should == { :age => 20, :name => "John" }
end
end
Use have_protected_instance_method matcher for 1.9 compat.
require File.dirname(__FILE__) + '/../../spec_helper'
require 'ostruct'
describe "OpenStruct#table" do
before(:each) do
@os = OpenStruct.new("age" => 20, "name" => "John")
end
it "is protected" do
OpenStruct.should have_protected_instance_method(:table)
end
it "returns self's method/value table" do
@os.send(:table).should == { :age => 20, :name => "John" }
end
end
|
workers Integer(ENV['WEB_CONCURRENCY'] || 5)
threads_count = Integer(ENV['MAX_THREADS'] || 32)
threads threads_count, threads_count
preload_app!
rackup DefaultRackup
port ENV['PORT'] || 9292
Update web_concurrency to avoid ThreadError
workers Integer(ENV['WEB_CONCURRENCY'] || 1)
threads_count = Integer(ENV['MAX_THREADS'] || 32)
threads threads_count, threads_count
preload_app!
rackup DefaultRackup
port ENV['PORT'] || 9292
|
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
require 'metasm/exe_format/coff'
require 'metasm/encode'
module Metasm
class COFF
class Header
# finds good default values for header
def set_default_values(coff, opth)
@machine ||= 'UNKNOWN'
@num_sect ||= coff.sections.length
@time ||= Time.now.to_i
@size_opthdr ||= opth.virtsize
super(coff)
end
end
class OptionalHeader
# encodes an Optional header and the directories
def encode(coff)
opth = super(coff)
DIRECTORIES[0, @numrva].each { |d|
if d = coff.directory[d]
d = d.dup
d[0] = Expression[d[0], :-, coff.label_at(coff.encoded, 0)] if d[0].kind_of?(::String)
else
d = [0, 0]
end
opth << coff.encode_word(d[0]) << coff.encode_word(d[1])
}
opth
end
# find good default values for optheader members, based on coff.sections
def set_default_values(coff)
@signature ||= 'PE'
@link_ver_maj ||= 1
@link_ver_min ||= 0
@sect_align ||= 0x1000
align = lambda { |sz| EncodedData.align_size(sz, @sect_align) }
@code_size ||= coff.sections.find_all { |s| s.characteristics.include? 'CONTAINS_CODE' }.inject(0) { |sum, s| sum + align[s.virtsize] }
@data_size ||= coff.sections.find_all { |s| s.characteristics.include? 'CONTAINS_DATA' }.inject(0) { |sum, s| sum + align[s.virtsize] }
@udata_size ||= coff.sections.find_all { |s| s.characteristics.include? 'CONTAINS_UDATA' }.inject(0) { |sum, s| sum + align[s.virtsize] }
@entrypoint = Expression[@entrypoint, :-, coff.label_at(coff.encoded, 0)] if entrypoint and not @entrypoint.kind_of?(::Integer)
tmp = coff.sections.find { |s| s.characteristics.include? 'CONTAINS_CODE' }
@base_of_code ||= (tmp ? Expression[coff.label_at(tmp.encoded, 0), :-, coff.label_at(coff.encoded, 0)] : 0)
tmp = coff.sections.find { |s| s.characteristics.include? 'CONTAINS_DATA' }
@base_of_data ||= (tmp ? Expression[coff.label_at(tmp.encoded, 0), :-, coff.label_at(coff.encoded, 0)] : 0)
@image_base ||= coff.label_at(coff.encoded, 0)
@file_align ||= 0x200
@os_ver_maj ||= 4
@subsys_maj ||= 4
@image_size ||= coff.new_label('image_size')
@headers_size ||= coff.new_label('headers_size')
@checksum ||= coff.new_label('checksum')
@subsystem ||= 'WINDOWS_GUI'
@stack_reserve||= 0x100000
@stack_commit ||= 0x1000
@heap_reserve ||= 0x100000
@heap_commit ||= 0x1000
@numrva ||= DIRECTORIES.length
super(coff)
end
end
class Section
# find good default values for section header members, defines rawaddr/rawsize as new_label for later fixup
def set_default_values(coff)
@name ||= ''
@virtsize ||= @encoded.virtsize
@virtaddr ||= Expression[coff.label_at(@encoded, 0, 'sect_start'), :-, coff.label_at(coff.encoded, 0)]
@rawsize ||= coff.new_label('sect_rawsize')
@rawaddr ||= coff.new_label('sect_rawaddr')
super(coff)
end
end
class ExportDirectory
# encodes an export directory
def encode(coff)
edata = {}
%w[edata addrtable namptable ord_table libname nametable].each { |name|
edata[name] = EncodedData.new
}
label = lambda { |n| coff.label_at(edata[n], 0, n) }
rva = lambda { |n| Expression[label[n], :-, coff.label_at(coff.encoded, 0)] }
rva_end = lambda { |n| Expression[[label[n], :-, coff.label_at(coff.encoded, 0)], :+, edata[n].virtsize] }
# ordinal base: smallest number > 1 to honor ordinals, minimize gaps
olist = @exports.map { |e| e.ordinal }.compact
# start with lowest ordinal, substract all exports unused to fill ordinal sequence gaps
omin = olist.min.to_i
gaps = olist.empty? ? 0 : olist.max+1 - olist.min - olist.length
noord = @exports.length - olist.length
@ordinal_base ||= [omin - (noord - gaps), 1].max
@libname_p = rva['libname']
@num_exports = [@exports.length, @exports.map { |e| e.ordinal }.compact.max.to_i - @ordinal_base].max
@num_names = @exports.find_all { |e| e.name }.length
@func_p = rva['addrtable']
@names_p = rva['namptable']
@ord_p = rva['ord_table']
edata['edata'] << super(coff)
edata['libname'] << @libname << 0
elist = @exports.find_all { |e| e.name and not e.ordinal }.sort_by { |e| e.name }
@exports.find_all { |e| e.ordinal }.sort_by { |e| e.ordinal }.each { |e| elist.insert(e.ordinal-@ordinal_base, e) }
elist.each { |e|
if not e
# export by ordinal with gaps
# XXX test this value with the windows loader
edata['addrtable'] << coff.encode_word(0xffff_ffff)
next
end
if e.forwarder_lib
edata['addrtable'] << coff.encode_word(rva_end['nametable'])
edata['nametable'] << e.forwarder_lib << ?. <<
if not e.forwarder_name
"##{e.forwarder_ordinal}"
else
e.forwarder_name
end << 0
else
edata['addrtable'] << coff.encode_word(Expression[e.target, :-, coff.label_at(coff.encoded, 0)])
end
if e.name
edata['ord_table'] << coff.encode_half(edata['addrtable'].virtsize/4 - 1)
edata['namptable'] << coff.encode_word(rva_end['nametable'])
edata['nametable'] << e.name << 0
end
}
# sorted by alignment directives
%w[edata addrtable namptable ord_table libname nametable].inject(EncodedData.new) { |ed, name| ed << edata[name] }
end
def set_default_values(coff)
@timestamp ||= Time.now.to_i
@libname ||= 'metalib'
@ordinal_base ||= 1
super(coff)
end
end
class ImportDirectory
# encodes all import directories + iat
def self.encode(coff, ary)
edata = { 'iat' => [] }
%w[idata ilt nametable].each { |name| edata[name] = EncodedData.new }
ary.each { |i| i.encode(coff, edata) }
it = edata['idata'] <<
coff.encode_word(0) <<
coff.encode_word(0) <<
coff.encode_word(0) <<
coff.encode_word(0) <<
coff.encode_word(0) <<
edata['ilt'] <<
edata['nametable']
iat = edata['iat'] # why not fragmented ?
[it, iat]
end
# encodes an import directory + iat + names in the edata hash received as arg
def encode(coff, edata)
edata['iat'] << EncodedData.new
# edata['ilt'] = edata['iat']
label = lambda { |n| coff.label_at(edata[n], 0, n) }
rva = lambda { |n| Expression[label[n], :-, coff.label_at(coff.encoded, 0)] }
rva_end = lambda { |n| Expression[[label[n], :-, coff.label_at(coff.encoded, 0)], :+, edata[n].virtsize] }
@libname_p = rva_end['nametable']
@ilt_p = rva_end['ilt']
@iat_p ||= Expression[coff.label_at(edata['iat'].last, 0, 'iat'), :-, coff.label_at(coff.encoded, 0)]
edata['idata'] << super(coff)
edata['nametable'] << @libname << 0
ord_mask = 1 << (coff.optheader.signature == 'PE+' ? 63 : 31)
@imports.each { |i|
edata['iat'].last.add_export i.target, edata['iat'].last.virtsize if i.target
if i.ordinal
ptr = coff.encode_xword(Expression[i.ordinal, :|, ord_mask])
else
edata['nametable'].align 2
ptr = coff.encode_xword(rva_end['nametable'])
edata['nametable'] << coff.encode_half(i.hint || 0) << i.name << 0
end
edata['ilt'] << ptr
edata['iat'].last << ptr
}
edata['ilt'] << coff.encode_xword(0)
edata['iat'].last << coff.encode_xword(0)
end
end
class TLSDirectory
def encode(coff)
cblist = EncodedData.new
@callback_p = coff.label_at(cblist, 0, 'callback_p')
@callbacks.to_a.each { |cb|
cblist << coff.encode_xword(cb)
}
cblist << coff.encode_xword(0)
dir = super(coff)
[dir, cblist]
end
def set_default_values(coff)
@start_va ||= 0
@end_va ||= @start_va
super(coff)
end
end
class RelocationTable
# encodes a COFF relocation table
def encode(coff)
rel = super(coff) << coff.encode_word(8 + 2*@relocs.length)
@relocs.each { |r| rel << r.encode(coff) }
rel
end
def setup_default_values(coff)
# @base_addr is an rva
@base_addr = Expression[@base_addr, :-, coff.label_at(coff.encoded, 0)] if @base_addr.kind_of?(::String)
# align relocation table size
if @relocs.length % 2 != 0
r = Relocation.new
r.type = 0
r.offset = 0
@relocs << r
end
super(coff)
end
end
class ResourceDirectory
# compiles ressource directories
def encode(coff, edata = nil)
if not edata
# init recursion
edata = {}
subtables = %w[table names dataentries data]
subtables.each { |n| edata[n] = EncodedData.new }
encode(coff, edata)
return subtables.inject(EncodedData.new) { |sum, n| sum << edata[n] }
end
label = lambda { |n| coff.label_at(edata[n], 0, n) }
# data 'rva' are real rvas (from start of COFF)
rva_end = lambda { |n| Expression[[label[n], :-, coff.label_at(coff.encoded, 0)], :+, edata[n].virtsize] }
# names and table 'rva' are relative to the beginning of the resource directory
off_end = lambda { |n| Expression[[label[n], :-, coff.label_at(edata['table'], 0)], :+, edata[n].virtsize] }
# build name_w if needed
@entries.each { |e| e.name_w = e.name.unpack('C*').pack('v*') if e.name and not e.name_w }
# fixup forward references to us, as subdir
edata['table'].fixup @curoff_label => edata['table'].virtsize if defined? @curoff_label
@nr_names = @entries.find_all { |e| e.name_w }.length
@nr_id = @entries.find_all { |e| e.id }.length
edata['table'] << super(coff)
# encode entries, sorted by names nocase, then id
@entries.sort_by { |e| e.name_w ? [0, e.name_w.downcase] : [1, e.id] }.each { |e|
if e.name_w
edata['table'] << coff.encode_word(Expression[off_end['names'], :|, 1 << 31])
edata['names'] << coff.encode_half(e.name_w.length/2) << e.name_w
else
edata['table'] << coff.encode_word(e.id)
end
if e.subdir
e.subdir.curoff_label = coff.new_label('rsrc_curoff')
edata['table'] << coff.encode_word(Expression[e.subdir.curoff_label, :|, 1 << 31])
else # data entry
edata['table'] << coff.encode_word(off_end['dataentries'])
edata['dataentries'] <<
coff.encode_word(rva_end['data']) <<
coff.encode_word(e.data.length) <<
coff.encode_word(e.codepage || 0) <<
coff.encode_word(e.reserved || 0)
edata['data'] << e.data
end
}
# recurse
@entries.find_all { |e| e.subdir }.each { |e| e.subdir.encode(coff, edata) }
end
end
# computes the checksum for a given COFF file
# may not work with overlapping sections
def self.checksum(str, endianness = :little)
coff = load str
coff.endianness = endianness
coff.decode_header
coff.encoded.ptr = 0
flen = 0
csum = 0
# negate old checksum
oldcs = coff.encode_word(coff.optheader.checksum)
oldcs.ptr = 0
csum -= coff.decode_half(oldcs)
csum -= coff.decode_half(oldcs)
# checksum header
raw = coff.encoded.read(coff.optheader.headers_size)
flen += coff.optheader.headers_size
coff.sections.each { |s|
coff.encoded.ptr = s.rawaddr
raw << coff.encoded.read(s.rawsize)
flen += s.rawsize
}
raw.unpack(endianness == :little ? 'v*' : 'n*').each { |s|
csum += s
csum = (csum & 0xffff) + (csum >> 16) if (csum >> 16) > 0
}
csum + flen
end
def encode_byte(w) Expression[w].encode(:u8, @endianness, (caller if $DEBUG)) end
def encode_half(w) Expression[w].encode(:u16, @endianness, (caller if $DEBUG)) end
def encode_word(w) Expression[w].encode(:u32, @endianness, (caller if $DEBUG)) end
def encode_xword(w) Expression[w].encode((@optheader.signature == 'PE+' ? :u64 : :u32), @endianness, (caller if $DEBUG)) end
# adds a new compiler-generated section
def encode_append_section(s)
if (s.virtsize || s.encoded.virtsize) < 4096
# find section to merge with
# XXX check following sections for hardcoded base address ?
char = s.characteristics.dup
secs = @sections.dup
# do not merge non-discardable in discardable
if not char.delete 'MEM_DISCARDABLE'
secs.delete_if { |ss| ss.characteristics.include? 'MEM_DISCARDABLE' }
end
# do not merge shared w/ non-shared
if char.delete 'MEM_SHARED'
secs.delete_if { |ss| not ss.characteristics.include? 'MEM_SHARED' }
else
secs.delete_if { |ss| ss.characteristics.include? 'MEM_SHARED' }
end
secs.delete_if { |ss| ss.virtsize.kind_of?(::Integer) or ss.rawsize.kind_of?(::Integer) or secs[secs.index(ss)+1..-1].find { |ss_| ss_.virtaddr.kind_of?(::Integer) } }
# try to find superset of characteristics
if target = secs.find { |ss| (ss.characteristics & char) == char }
target.encoded.align 8
puts "PE: merging #{s.name} in #{target.name} (#{target.encoded.virtsize})" if $DEBUG
s.encoded = target.encoded << s.encoded
else
@sections << s
end
else
@sections << s
end
end
# encodes the export table as a new section, updates directory['export_table']
def encode_exports
edata = @export.encode self
# must include name tables (for forwarders)
@directory['export_table'] = [label_at(edata, 0, 'export_table'), edata.virtsize]
s = Section.new
s.name = '.edata'
s.encoded = edata
s.characteristics = %w[MEM_READ]
encode_append_section s
end
# encodes the import tables as a new section, updates directory['import_table'] and directory['iat']
def encode_imports
idata, iat = ImportDirectory.encode(self, @imports)
@directory['import_table'] = [label_at(idata, 0, 'idata'), idata.virtsize]
s = Section.new
s.name = '.idata'
s.encoded = idata
s.characteristics = %w[MEM_READ MEM_WRITE MEM_DISCARDABLE]
encode_append_section s
if @imports.first and @imports.first.iat_p.kind_of? Integer
ordiat = @imports.zip(iat).sort_by { |id, it| id.iat_p.kind_of?(Integer) ? id.iat_p : 1<<65 }.map { |id, it| it }
else
ordiat = iat
end
@directory['iat'] = [label_at(ordiat.first, 0, 'iat'),
Expression[label_at(ordiat.last, ordiat.last.virtsize, 'iat_end'), :-, label_at(ordiat.first, 0)]] if not ordiat.empty?
iat_s = nil
plt = Section.new
plt.name = '.plt'
plt.encoded = EncodedData.new
plt.characteristics = %w[MEM_READ MEM_EXECUTE]
@imports.zip(iat) { |id, it|
if id.iat_p.kind_of? Integer and s = @sections.find { |s_| s_.virtaddr <= id.iat_p and s_.virtaddr + (s_.virtsize || s_.encoded.virtsize) > id.iat_p }
id.iat = it # will be fixed up after encode_section
else
# XXX should not be mixed (for @directory['iat'][1])
if not iat_s
iat_s = Section.new
iat_s.name = '.iat'
iat_s.encoded = EncodedData.new
iat_s.characteristics = %w[MEM_READ MEM_WRITE]
encode_append_section iat_s
end
iat_s.encoded << it
end
id.imports.each { |i|
if i.thunk
arch_encode_thunk(plt.encoded, i)
end
}
}
encode_append_section plt if not plt.encoded.empty?
end
# encodes a thunk to imported function
def arch_encode_thunk(edata, import)
case @cpu
when Ia32
shellcode = lambda { |c| Shellcode.new(@cpu).share_namespace(self).assemble(c).encoded }
if @cpu.generate_PIC
if @cpu.size == 64
edata << shellcode["#{import.thunk}: jmp [rip+#{import.target}-1f]\n1:"]
return
end
# sections starts with a helper function that returns the address of metasm_intern_geteip in eax (PIC)
if not @sections.find { |s| s.encoded and s.encoded.export['metasm_intern_geteip'] } and edata.empty?
edata << shellcode["metasm_intern_geteip: call 42f\n42:\npop eax\nsub eax, 42b-metasm_intern_geteip\nret"]
end
edata << shellcode["#{import.thunk}: call metasm_intern_geteip\njmp [eax+#{import.target}-metasm_intern_geteip]"]
else
edata << shellcode["#{import.thunk}: jmp [#{import.target}]"]
end
else raise EncodeError, 'E: COFF: encode import thunk: unsupported architecture'
end
end
def encode_tls
dir, cbtable = @tls.encode(self)
@directory['tls_table'] = [label_at(dir, 0, 'tls_table'), dir.virtsize]
s = Section.new
s.name = '.tls'
s.encoded = EncodedData.new << dir << cbtable
s.characteristics = %w[MEM_READ MEM_WRITE]
encode_append_section s
end
# encodes relocation tables in a new section .reloc, updates @directory['base_relocation_table']
def encode_relocs
if @relocations.empty?
rt = RelocationTable.new
rt.base_addr = 0
rt.relocs = []
@relocations << rt
end
relocs = @relocations.inject(EncodedData.new) { |edata, rt_| edata << rt_.encode(self) }
@directory['base_relocation_table'] = [label_at(relocs, 0, 'reloc_table'), relocs.virtsize]
s = Section.new
s.name = '.reloc'
s.encoded = relocs
s.characteristics = %w[MEM_READ MEM_DISCARDABLE]
encode_append_section s
end
# creates the @relocations from sections.encoded.reloc
def create_relocation_tables
@relocations = []
# create a fake binding with all exports, to find only-image_base-dependant relocs targets
# not foolproof, but works in standard cases
startaddr = curaddr = label_at(@encoded, 0, 'coff_start')
binding = {}
@sections.each { |s|
binding.update s.encoded.binding(curaddr)
curaddr = Expression[curaddr, :+, s.encoded.virtsize]
}
# for each section.encoded, make as many RelocationTables as needed
@sections.each { |s|
# rt.base_addr temporarily holds the offset from section_start, and is fixed up to rva before '@reloc << rt'
rt = RelocationTable.new
s.encoded.reloc.each { |off, rel|
# check that the relocation looks like "program_start + integer" when bound using the fake binding
# XXX allow :i32 etc
if rel.endianness == @endianness and [:u32, :a32, :u64, :a64].include?(rel.type) and
rel.target.bind(binding).reduce.kind_of?(Expression) and
Expression[rel.target, :-, startaddr].bind(binding).reduce.kind_of?(::Integer)
# winner !
# build relocation
r = RelocationTable::Relocation.new
r.offset = off & 0xfff
r.type = { :u32 => 'HIGHLOW', :u64 => 'DIR64', :a32 => 'HIGHLOW', :a64 => 'DIR64' }[rel.type]
# check if we need to start a new relocation table
if rt.base_addr and (rt.base_addr & ~0xfff) != (off & ~0xfff)
rt.base_addr = Expression[[label_at(s.encoded, 0, 'sect_start'), :-, startaddr], :+, rt.base_addr]
@relocations << rt
rt = RelocationTable.new
end
# initialize reloc table base address if needed
if not rt.base_addr
rt.base_addr = off & ~0xfff
end
(rt.relocs ||= []) << r
else
puts "W: COFF: Ignoring weird relocation #{rel.inspect} when building relocation tables" if $DEBUG
end
}
if rt and rt.relocs
rt.base_addr = Expression[[label_at(s.encoded, 0, 'sect_start'), :-, startaddr], :+, rt.base_addr]
@relocations << rt
end
}
end
def encode_resource
res = @resource.encode self
@directory['resource_table'] = [label_at(res, 0, 'resource_table'), res.virtsize]
s = Section.new
s.name = '.rsrc'
s.encoded = res
s.characteristics = %w[MEM_READ]
encode_append_section s
end
# appends the header/optheader/directories/section table to @encoded
# initializes some flags based on the target arg ('exe' / 'dll' / 'kmod' / 'obj')
def encode_header(target = 'exe')
target = {:bin => 'exe', :lib => 'dll', :obj => 'obj', 'sys' => 'kmod', 'drv' => 'kmod'}.fetch(target, target)
@header.machine ||= case @cpu.shortname
when 'x64'; 'AMD64'
when 'ia32'; 'I386'
end
# setup header flags
tmp = %w[LINE_NUMS_STRIPPED LOCAL_SYMS_STRIPPED DEBUG_STRIPPED] +
case target
when 'exe'; %w[EXECUTABLE_IMAGE]
when 'dll'; %w[EXECUTABLE_IMAGE DLL]
when 'kmod'; %w[EXECUTABLE_IMAGE]
when 'obj'; []
end
tmp << 'x32BIT_MACHINE' # XXX
tmp << 'RELOCS_STRIPPED' # if not @directory['base_relocation_table'] # object relocs
@header.characteristics ||= tmp
@optheader.subsystem ||= case target
when 'exe', 'dll'; 'WINDOWS_GUI'
when 'kmod'; 'NATIVE'
end
@optheader.dll_characts = ['DYNAMIC_BASE'] if @directory['base_relocation_table']
# encode section table, add CONTAINS_* flags from other characteristics flags
s_table = EncodedData.new
@sections.each { |s|
if s.characteristics.kind_of? Array and s.characteristics.include? 'MEM_READ'
if s.characteristics.include? 'MEM_EXECUTE'
s.characteristics |= ['CONTAINS_CODE']
elsif s.encoded
if s.encoded.rawsize == 0
s.characteristics |= ['CONTAINS_UDATA']
else
s.characteristics |= ['CONTAINS_DATA']
end
end
end
s.rawaddr = nil if s.rawaddr.kind_of?(::Integer) # XXX allow to force rawaddr ?
s_table << s.encode(self)
}
# encode optional header
@optheader.headers_size = nil
@optheader.image_size = nil
@optheader.numrva = nil
opth = @optheader.encode(self)
# encode header
@header.num_sect = nil
@header.size_opthdr = nil
@encoded << @header.encode(self, opth) << opth << s_table
end
# append the section bodies to @encoded, and link the resulting binary
def encode_sections_fixup
@encoded.align @optheader.file_align
if @optheader.headers_size.kind_of?(::String)
@encoded.fixup! @optheader.headers_size => @encoded.virtsize
@optheader.headers_size = @encoded.virtsize
end
baseaddr = @optheader.image_base.kind_of?(::Integer) ? @optheader.image_base : 0x400000
binding = @encoded.binding(baseaddr)
curaddr = baseaddr + @optheader.headers_size
@sections.each { |s|
# align
curaddr = EncodedData.align_size(curaddr, @optheader.sect_align)
if s.rawaddr.kind_of?(::String)
@encoded.fixup! s.rawaddr => @encoded.virtsize
s.rawaddr = @encoded.virtsize
end
if s.virtaddr.kind_of?(::Integer)
raise "E: COFF: cannot encode section #{s.name}: hardcoded address too short" if curaddr > baseaddr + s.virtaddr
curaddr = baseaddr + s.virtaddr
end
binding.update s.encoded.binding(curaddr)
curaddr += s.virtsize
pre_sz = @encoded.virtsize
@encoded << s.encoded[0, s.encoded.rawsize]
@encoded.align @optheader.file_align
if s.rawsize.kind_of?(::String)
@encoded.fixup! s.rawsize => (@encoded.virtsize - pre_sz)
s.rawsize = @encoded.virtsize - pre_sz
end
}
# not aligned ? spec says it is, visual studio does not
binding[@optheader.image_size] = curaddr - baseaddr if @optheader.image_size.kind_of?(::String)
# patch the iat where iat_p was defined
# sort to ensure a 0-terminated will not overwrite an entry
# (try to dump notepad.exe, which has a forwarder;)
@imports.find_all { |id| id.iat_p.kind_of? Integer }.sort_by { |id| id.iat_p }.each { |id|
s = sect_at_rva(id.iat_p)
@encoded[s.rawaddr + s.encoded.ptr, id.iat.virtsize] = id.iat
binding.update id.iat.binding(baseaddr + id.iat_p)
} if imports
@encoded.fill
@encoded.fixup! binding
if @optheader.checksum.kind_of?(::String) and @encoded.reloc.length == 1
# won't work if there are other unresolved relocs
checksum = self.class.checksum(@encoded.data, @endianness)
@encoded.fixup @optheader.checksum => checksum
@optheader.checksum = checksum
end
end
# encode a COFF file, building export/import/reloc tables if needed
# creates the base relocation tables (need for references to IAT not known before)
def encode(target = 'exe', want_relocs = (target != 'exe' and target != :bin))
@encoded = EncodedData.new
label_at(@encoded, 0, 'coff_start')
autoimport
encode_exports if export
encode_imports if imports
encode_resource if resource
encode_tls if tls
create_relocation_tables if want_relocs
encode_relocs if relocations
encode_header(target)
encode_sections_fixup
@encoded.data
end
def parse_init
# ahem...
# a fake object, which when appended makes us parse '.text', which creates a real default section
# forwards to it this first appendage.
# allows the user to specify its own section if he wishes, and to use .text if he doesn't
if not defined? @cursource or not @cursource
@cursource = ::Object.new
class << @cursource
attr_accessor :coff
def <<(*a)
t = Preprocessor::Token.new(nil)
t.raw = '.text'
coff.parse_parser_instruction t
coff.cursource.send(:<<, *a)
end
end
@cursource.coff = self
end
@source ||= {}
super()
end
# handles compiler meta-instructions
#
# syntax:
# .section "<section name>" <perm list> <base>
# section name is a string (may be quoted)
# perms are in 'r' 'w' 'x' 'shared' 'discard', may be concatenated (in this order), may be prefixed by 'no' to remove the attribute for an existing section
# base is the token 'base', the token '=' and an immediate expression
# default sections:
# .text = .section '.text' rx
# .data = .section '.data' rw
# .rodata = .section '.rodata' r
# .bss = .section '.bss' rw
# .entrypoint | .entrypoint <label>
# defines the label as the program entrypoint
# without argument, creates a label used as entrypoint
# .libname "<name>"
# defines the string to be used as exported library name (should be the same as the file name, may omit extension)
# .export ["<exported_name>"] [<ordinal>] [<label_name>]
# exports the specified label with the specified name (label_name defaults to exported_name)
# if exported_name is an unquoted integer, the export is by ordinal. XXX if the ordinal starts with '0', the integer is interpreted as octal
# .import "<libname>" "<import_name|ordinal>" [<thunk_name>] [<label_name>]
# imports a symbol from a library
# if the thunk name is specified and not 'nil', the compiler will generate a thunk that can be called (in ia32, 'call thunk' == 'call [import_name]')
# the thunk is position-independent, and should be used instead of the indirect call form, for imported functions
# label_name is the label to attribute to the location that will receive the address of the imported symbol, defaults to import_name (iat_<import_name> if thunk == iname)
# .image_base <base>
# specifies the COFF prefered load address, base is an immediate expression
#
def parse_parser_instruction(instr)
readstr = lambda {
@lexer.skip_space
raise instr, 'string expected' if not t = @lexer.readtok or (t.type != :string and t.type != :quoted)
t.value || t.raw
}
check_eol = lambda {
@lexer.skip_space
raise instr, 'eol expected' if t = @lexer.nexttok and t.type != :eol
}
case instr.raw.downcase
when '.text', '.data', '.rodata', '.bss'
sname = instr.raw.downcase
if not @sections.find { |s| s.name == sname }
s = Section.new
s.name = sname
s.encoded = EncodedData.new
s.characteristics = case sname
when '.text'; %w[MEM_READ MEM_EXECUTE]
when '.data', '.bss'; %w[MEM_READ MEM_WRITE]
when '.rodata'; %w[MEM_READ]
end
@sections << s
end
@cursource = @source[sname] ||= []
check_eol[] if instr.backtrace # special case for magic @cursource
when '.section'
# .section <section name|"section name"> [(no)r w x shared discard] [base=<expr>]
sname = readstr[]
if not s = @sections.find { |s_| s_.name == sname }
s = Section.new
s.name = sname
s.encoded = EncodedData.new
s.characteristics = []
@sections << s
end
loop do
@lexer.skip_space
break if not tok = @lexer.nexttok or tok.type != :string
case @lexer.readtok.raw.downcase
when /^(no)?(r)?(w)?(x)?(shared)?(discard)?$/
ar = []
ar << 'MEM_READ' if $2
ar << 'MEM_WRITE' if $3
ar << 'MEM_EXECUTE' if $4
ar << 'MEM_SHARED' if $5
ar << 'MEM_DISCARDABLE' if $6
if $1; s.characteristics -= ar
else s.characteristics |= ar
end
when 'base'
@lexer.skip_space
@lexer.unreadtok tok if not tok = @lexer.readtok or tok.type != :punct or tok.raw != '='
raise instr, 'invalid base' if not s.virtaddr = Expression.parse(@lexer).reduce or not s.virtaddr.kind_of?(::Integer)
if not @optheader.image_base
@optheader.image_base = (s.virtaddr-0x80) & 0xfff00000
puts "Warning: no image_base specified, using #{Expression[@optheader.image_base]}" if $VERBOSE
end
s.virtaddr -= @optheader.image_base
else raise instr, 'unknown parameter'
end
end
@cursource = @source[sname] ||= []
check_eol[]
when '.libname'
# export directory library name
# .libname <libname|"libname">
@export ||= ExportDirectory.new
@export.libname = readstr[]
check_eol[]
when '.export'
# .export <export name|ordinal|"export name"> [ordinal] [label to export if different]
@lexer.skip_space
raise instr, 'string expected' if not tok = @lexer.readtok or (tok.type != :string and tok.type != :quoted)
exportname = tok.value || tok.raw
if tok.type == :string and (?0..?9).include? tok.raw[0]
exportname = Integer(exportname) rescue raise(tok, "bad ordinal value, try quotes #{' or rm leading 0' if exportname[0] == ?0}")
end
@lexer.skip_space
tok = @lexer.readtok
if tok and tok.type == :string and (?0..?9).include? tok.raw[0]
(eord = Integer(tok.raw)) rescue @lexer.unreadtok(tok)
else @lexer.unreadtok(tok)
end
@lexer.skip_space
tok = @lexer.readtok
if tok and tok.type == :string
exportlabel = tok.raw
else
@lexer.unreadtok tok
end
@export ||= ExportDirectory.new
@export.exports ||= []
e = ExportDirectory::Export.new
if exportname.kind_of? Integer
e.ordinal = exportname
else
e.name = exportname
e.ordinal = eord if eord
end
e.target = exportlabel || exportname
@export.exports << e
check_eol[]
when '.import'
# .import <libname|"libname"> <imported sym|"imported sym"> [label of plt thunk|nil] [label of iat element if != symname]
libname = readstr[]
i = ImportDirectory::Import.new
@lexer.skip_space
raise instr, 'string expected' if not tok = @lexer.readtok or (tok.type != :string and tok.type != :quoted)
if tok.type == :string and (?0..?9).include? tok.raw[0]
i.ordinal = Integer(tok.raw)
else
i.name = tok.value || tok.raw
end
@lexer.skip_space
if tok = @lexer.readtok and tok.type == :string
i.thunk = tok.raw if tok.raw != 'nil'
@lexer.skip_space
tok = @lexer.readtok
end
if tok and tok.type == :string
i.target = tok.raw
else
i.target = ((i.thunk == i.name) ? ('iat_' + i.name) : (i.name ? i.name : (i.thunk ? 'iat_' + i.thunk : raise(instr, 'need iat label'))))
@lexer.unreadtok tok
end
raise tok, 'import target exists' if i.target != new_label(i.target)
@imports ||= []
if not id = @imports.find { |id_| id_.libname == libname }
id = ImportDirectory.new
id.libname = libname
id.imports = []
@imports << id
end
id.imports << i
check_eol[]
when '.entrypoint'
# ".entrypoint <somelabel/expression>" or ".entrypoint" (here)
@lexer.skip_space
if tok = @lexer.nexttok and tok.type == :string
raise instr, 'syntax error' if not entrypoint = Expression.parse(@lexer)
else
entrypoint = new_label('entrypoint')
@cursource << Label.new(entrypoint, instr.backtrace.dup)
end
@optheader.entrypoint = entrypoint
check_eol[]
when '.image_base'
raise instr if not base = Expression.parse(@lexer) or !(base = base.reduce).kind_of?(::Integer)
@optheader.image_base = base
check_eol[]
when '.subsystem'
@lexer.skip_space
raise instr if not tok = @lexer.readtok
@optheader.subsystem = tok.raw
check_eol[]
else super(instr)
end
end
def assemble(*a)
parse(*a) if not a.empty?
@source.each { |k, v|
raise "no section named #{k} ?" if not s = @sections.find { |s_| s_.name == k }
s.encoded << assemble_sequence(v, @cpu)
v.clear
}
end
# defines __PE__
def tune_prepro(l)
l.define_weak('__PE__', 1)
l.define_weak('__MS_X86_64_ABI__') if @cpu and @cpu.shortname == 'x64'
end
def tune_cparser(cp)
super(cp)
cp.llp64 if @cpu.size == 64
end
# honors C attributes: export, export_as(foo), import_from(kernel32), entrypoint
# import by ordinal: extern __stdcall int anyname(int) __attribute__((import_from(ws2_32:28)));
# can alias imports with int mygpaddr_alias() attr(import_from(kernel32:GetProcAddr))
def read_c_attrs(cp)
cp.toplevel.symbol.each_value { |v|
next if not v.kind_of? C::Variable
if v.has_attribute 'export' or ea = v.has_attribute_var('export_as')
@export ||= ExportDirectory.new
@export.exports ||= []
e = ExportDirectory::Export.new
begin
e.ordinal = Integer(ea || v.name)
rescue ArgumentError
e.name = ea || v.name
end
e.target = v.name
@export.exports << e
end
if v.has_attribute('import') or ln = v.has_attribute_var('import_from')
ln ||= WindowsExports::EXPORT[v.name]
i = ImportDirectory::Import.new
if ln.include? ':'
ln, name = ln.split(':')
begin
i.ordinal = Integer(name)
rescue ArgumentError
i.name = name
end
else
i.name = v.name
end
if v.type.kind_of? C::Function
i.thunk = v.name
i.target = 'iat_'+i.thunk
else
i.target = v.name
end
@imports ||= []
if not id = @imports.find { |id_| id_.libname == ln }
id = ImportDirectory.new
id.libname = ln
id.imports = []
@imports << id
end
id.imports << i
end
if v.has_attribute 'entrypoint'
@optheader.entrypoint = v.name
end
}
end
# try to resolve automatically COFF import tables from self.sections.encoded.relocations
# and WindowsExports::EXPORT
# if the relocation target is '<symbolname>' or 'iat_<symbolname>, link to the IAT address, if it is '<symbolname> + <expr>',
# link to a thunk (plt-like)
def autoimport
WindowsExports rescue return # autorequire
autoexports = WindowsExports::EXPORT.dup
@sections.each { |s|
next if not s.encoded
s.encoded.export.keys.each { |e| autoexports.delete e }
}
@sections.each { |s|
next if not s.encoded
s.encoded.reloc.each_value { |r|
if r.target.op == :+ and not r.target.lexpr and r.target.rexpr.kind_of?(::String)
sym = target = r.target.rexpr
sym = sym[4..-1] if sym[0, 4] == 'iat_'
elsif r.target.op == :- and r.target.rexpr.kind_of?(::String) and r.target.lexpr.kind_of?(::String)
sym = thunk = r.target.lexpr
end
next if not dll = autoexports[sym]
@imports ||= []
next if @imports.find { |id| id.imports.find { |ii| ii.name == sym } }
if not id = @imports.find { |id_| id_.libname =~ /^#{dll}(\.dll)?$/i }
id = ImportDirectory.new
id.libname = dll
id.imports = []
@imports << id
end
if not i = id.imports.find { |i_| i_.name == sym }
i = ImportDirectory::Import.new
i.name = sym
id.imports << i
end
if (target and i.target and (i.target != target or i.thunk == target)) or
(thunk and i.thunk and (i.thunk != thunk or i.target == thunk))
puts "autoimport: conflict for #{target} #{thunk} #{i.inspect}" if $VERBOSE
else
i.target ||= new_label(target || 'iat_' + thunk)
i.thunk ||= thunk if thunk
end
}
}
end
end
end
coffencode: raise on unknown imported C var
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
require 'metasm/exe_format/coff'
require 'metasm/encode'
module Metasm
class COFF
class Header
# finds good default values for header
def set_default_values(coff, opth)
@machine ||= 'UNKNOWN'
@num_sect ||= coff.sections.length
@time ||= Time.now.to_i
@size_opthdr ||= opth.virtsize
super(coff)
end
end
class OptionalHeader
# encodes an Optional header and the directories
def encode(coff)
opth = super(coff)
DIRECTORIES[0, @numrva].each { |d|
if d = coff.directory[d]
d = d.dup
d[0] = Expression[d[0], :-, coff.label_at(coff.encoded, 0)] if d[0].kind_of?(::String)
else
d = [0, 0]
end
opth << coff.encode_word(d[0]) << coff.encode_word(d[1])
}
opth
end
# find good default values for optheader members, based on coff.sections
def set_default_values(coff)
@signature ||= 'PE'
@link_ver_maj ||= 1
@link_ver_min ||= 0
@sect_align ||= 0x1000
align = lambda { |sz| EncodedData.align_size(sz, @sect_align) }
@code_size ||= coff.sections.find_all { |s| s.characteristics.include? 'CONTAINS_CODE' }.inject(0) { |sum, s| sum + align[s.virtsize] }
@data_size ||= coff.sections.find_all { |s| s.characteristics.include? 'CONTAINS_DATA' }.inject(0) { |sum, s| sum + align[s.virtsize] }
@udata_size ||= coff.sections.find_all { |s| s.characteristics.include? 'CONTAINS_UDATA' }.inject(0) { |sum, s| sum + align[s.virtsize] }
@entrypoint = Expression[@entrypoint, :-, coff.label_at(coff.encoded, 0)] if entrypoint and not @entrypoint.kind_of?(::Integer)
tmp = coff.sections.find { |s| s.characteristics.include? 'CONTAINS_CODE' }
@base_of_code ||= (tmp ? Expression[coff.label_at(tmp.encoded, 0), :-, coff.label_at(coff.encoded, 0)] : 0)
tmp = coff.sections.find { |s| s.characteristics.include? 'CONTAINS_DATA' }
@base_of_data ||= (tmp ? Expression[coff.label_at(tmp.encoded, 0), :-, coff.label_at(coff.encoded, 0)] : 0)
@image_base ||= coff.label_at(coff.encoded, 0)
@file_align ||= 0x200
@os_ver_maj ||= 4
@subsys_maj ||= 4
@image_size ||= coff.new_label('image_size')
@headers_size ||= coff.new_label('headers_size')
@checksum ||= coff.new_label('checksum')
@subsystem ||= 'WINDOWS_GUI'
@stack_reserve||= 0x100000
@stack_commit ||= 0x1000
@heap_reserve ||= 0x100000
@heap_commit ||= 0x1000
@numrva ||= DIRECTORIES.length
super(coff)
end
end
class Section
# find good default values for section header members, defines rawaddr/rawsize as new_label for later fixup
def set_default_values(coff)
@name ||= ''
@virtsize ||= @encoded.virtsize
@virtaddr ||= Expression[coff.label_at(@encoded, 0, 'sect_start'), :-, coff.label_at(coff.encoded, 0)]
@rawsize ||= coff.new_label('sect_rawsize')
@rawaddr ||= coff.new_label('sect_rawaddr')
super(coff)
end
end
class ExportDirectory
# encodes an export directory
def encode(coff)
edata = {}
%w[edata addrtable namptable ord_table libname nametable].each { |name|
edata[name] = EncodedData.new
}
label = lambda { |n| coff.label_at(edata[n], 0, n) }
rva = lambda { |n| Expression[label[n], :-, coff.label_at(coff.encoded, 0)] }
rva_end = lambda { |n| Expression[[label[n], :-, coff.label_at(coff.encoded, 0)], :+, edata[n].virtsize] }
# ordinal base: smallest number > 1 to honor ordinals, minimize gaps
olist = @exports.map { |e| e.ordinal }.compact
# start with lowest ordinal, substract all exports unused to fill ordinal sequence gaps
omin = olist.min.to_i
gaps = olist.empty? ? 0 : olist.max+1 - olist.min - olist.length
noord = @exports.length - olist.length
@ordinal_base ||= [omin - (noord - gaps), 1].max
@libname_p = rva['libname']
@num_exports = [@exports.length, @exports.map { |e| e.ordinal }.compact.max.to_i - @ordinal_base].max
@num_names = @exports.find_all { |e| e.name }.length
@func_p = rva['addrtable']
@names_p = rva['namptable']
@ord_p = rva['ord_table']
edata['edata'] << super(coff)
edata['libname'] << @libname << 0
elist = @exports.find_all { |e| e.name and not e.ordinal }.sort_by { |e| e.name }
@exports.find_all { |e| e.ordinal }.sort_by { |e| e.ordinal }.each { |e| elist.insert(e.ordinal-@ordinal_base, e) }
elist.each { |e|
if not e
# export by ordinal with gaps
# XXX test this value with the windows loader
edata['addrtable'] << coff.encode_word(0xffff_ffff)
next
end
if e.forwarder_lib
edata['addrtable'] << coff.encode_word(rva_end['nametable'])
edata['nametable'] << e.forwarder_lib << ?. <<
if not e.forwarder_name
"##{e.forwarder_ordinal}"
else
e.forwarder_name
end << 0
else
edata['addrtable'] << coff.encode_word(Expression[e.target, :-, coff.label_at(coff.encoded, 0)])
end
if e.name
edata['ord_table'] << coff.encode_half(edata['addrtable'].virtsize/4 - 1)
edata['namptable'] << coff.encode_word(rva_end['nametable'])
edata['nametable'] << e.name << 0
end
}
# sorted by alignment directives
%w[edata addrtable namptable ord_table libname nametable].inject(EncodedData.new) { |ed, name| ed << edata[name] }
end
def set_default_values(coff)
@timestamp ||= Time.now.to_i
@libname ||= 'metalib'
@ordinal_base ||= 1
super(coff)
end
end
class ImportDirectory
# encodes all import directories + iat
def self.encode(coff, ary)
edata = { 'iat' => [] }
%w[idata ilt nametable].each { |name| edata[name] = EncodedData.new }
ary.each { |i| i.encode(coff, edata) }
it = edata['idata'] <<
coff.encode_word(0) <<
coff.encode_word(0) <<
coff.encode_word(0) <<
coff.encode_word(0) <<
coff.encode_word(0) <<
edata['ilt'] <<
edata['nametable']
iat = edata['iat'] # why not fragmented ?
[it, iat]
end
# encodes an import directory + iat + names in the edata hash received as arg
def encode(coff, edata)
edata['iat'] << EncodedData.new
# edata['ilt'] = edata['iat']
label = lambda { |n| coff.label_at(edata[n], 0, n) }
rva = lambda { |n| Expression[label[n], :-, coff.label_at(coff.encoded, 0)] }
rva_end = lambda { |n| Expression[[label[n], :-, coff.label_at(coff.encoded, 0)], :+, edata[n].virtsize] }
@libname_p = rva_end['nametable']
@ilt_p = rva_end['ilt']
@iat_p ||= Expression[coff.label_at(edata['iat'].last, 0, 'iat'), :-, coff.label_at(coff.encoded, 0)]
edata['idata'] << super(coff)
edata['nametable'] << @libname << 0
ord_mask = 1 << (coff.optheader.signature == 'PE+' ? 63 : 31)
@imports.each { |i|
edata['iat'].last.add_export i.target, edata['iat'].last.virtsize if i.target
if i.ordinal
ptr = coff.encode_xword(Expression[i.ordinal, :|, ord_mask])
else
edata['nametable'].align 2
ptr = coff.encode_xword(rva_end['nametable'])
edata['nametable'] << coff.encode_half(i.hint || 0) << i.name << 0
end
edata['ilt'] << ptr
edata['iat'].last << ptr
}
edata['ilt'] << coff.encode_xword(0)
edata['iat'].last << coff.encode_xword(0)
end
end
class TLSDirectory
def encode(coff)
cblist = EncodedData.new
@callback_p = coff.label_at(cblist, 0, 'callback_p')
@callbacks.to_a.each { |cb|
cblist << coff.encode_xword(cb)
}
cblist << coff.encode_xword(0)
dir = super(coff)
[dir, cblist]
end
def set_default_values(coff)
@start_va ||= 0
@end_va ||= @start_va
super(coff)
end
end
class RelocationTable
# encodes a COFF relocation table
def encode(coff)
rel = super(coff) << coff.encode_word(8 + 2*@relocs.length)
@relocs.each { |r| rel << r.encode(coff) }
rel
end
def setup_default_values(coff)
# @base_addr is an rva
@base_addr = Expression[@base_addr, :-, coff.label_at(coff.encoded, 0)] if @base_addr.kind_of?(::String)
# align relocation table size
if @relocs.length % 2 != 0
r = Relocation.new
r.type = 0
r.offset = 0
@relocs << r
end
super(coff)
end
end
class ResourceDirectory
# compiles ressource directories
def encode(coff, edata = nil)
if not edata
# init recursion
edata = {}
subtables = %w[table names dataentries data]
subtables.each { |n| edata[n] = EncodedData.new }
encode(coff, edata)
return subtables.inject(EncodedData.new) { |sum, n| sum << edata[n] }
end
label = lambda { |n| coff.label_at(edata[n], 0, n) }
# data 'rva' are real rvas (from start of COFF)
rva_end = lambda { |n| Expression[[label[n], :-, coff.label_at(coff.encoded, 0)], :+, edata[n].virtsize] }
# names and table 'rva' are relative to the beginning of the resource directory
off_end = lambda { |n| Expression[[label[n], :-, coff.label_at(edata['table'], 0)], :+, edata[n].virtsize] }
# build name_w if needed
@entries.each { |e| e.name_w = e.name.unpack('C*').pack('v*') if e.name and not e.name_w }
# fixup forward references to us, as subdir
edata['table'].fixup @curoff_label => edata['table'].virtsize if defined? @curoff_label
@nr_names = @entries.find_all { |e| e.name_w }.length
@nr_id = @entries.find_all { |e| e.id }.length
edata['table'] << super(coff)
# encode entries, sorted by names nocase, then id
@entries.sort_by { |e| e.name_w ? [0, e.name_w.downcase] : [1, e.id] }.each { |e|
if e.name_w
edata['table'] << coff.encode_word(Expression[off_end['names'], :|, 1 << 31])
edata['names'] << coff.encode_half(e.name_w.length/2) << e.name_w
else
edata['table'] << coff.encode_word(e.id)
end
if e.subdir
e.subdir.curoff_label = coff.new_label('rsrc_curoff')
edata['table'] << coff.encode_word(Expression[e.subdir.curoff_label, :|, 1 << 31])
else # data entry
edata['table'] << coff.encode_word(off_end['dataentries'])
edata['dataentries'] <<
coff.encode_word(rva_end['data']) <<
coff.encode_word(e.data.length) <<
coff.encode_word(e.codepage || 0) <<
coff.encode_word(e.reserved || 0)
edata['data'] << e.data
end
}
# recurse
@entries.find_all { |e| e.subdir }.each { |e| e.subdir.encode(coff, edata) }
end
end
# computes the checksum for a given COFF file
# may not work with overlapping sections
def self.checksum(str, endianness = :little)
coff = load str
coff.endianness = endianness
coff.decode_header
coff.encoded.ptr = 0
flen = 0
csum = 0
# negate old checksum
oldcs = coff.encode_word(coff.optheader.checksum)
oldcs.ptr = 0
csum -= coff.decode_half(oldcs)
csum -= coff.decode_half(oldcs)
# checksum header
raw = coff.encoded.read(coff.optheader.headers_size)
flen += coff.optheader.headers_size
coff.sections.each { |s|
coff.encoded.ptr = s.rawaddr
raw << coff.encoded.read(s.rawsize)
flen += s.rawsize
}
raw.unpack(endianness == :little ? 'v*' : 'n*').each { |s|
csum += s
csum = (csum & 0xffff) + (csum >> 16) if (csum >> 16) > 0
}
csum + flen
end
def encode_byte(w) Expression[w].encode(:u8, @endianness, (caller if $DEBUG)) end
def encode_half(w) Expression[w].encode(:u16, @endianness, (caller if $DEBUG)) end
def encode_word(w) Expression[w].encode(:u32, @endianness, (caller if $DEBUG)) end
def encode_xword(w) Expression[w].encode((@optheader.signature == 'PE+' ? :u64 : :u32), @endianness, (caller if $DEBUG)) end
# adds a new compiler-generated section
def encode_append_section(s)
if (s.virtsize || s.encoded.virtsize) < 4096
# find section to merge with
# XXX check following sections for hardcoded base address ?
char = s.characteristics.dup
secs = @sections.dup
# do not merge non-discardable in discardable
if not char.delete 'MEM_DISCARDABLE'
secs.delete_if { |ss| ss.characteristics.include? 'MEM_DISCARDABLE' }
end
# do not merge shared w/ non-shared
if char.delete 'MEM_SHARED'
secs.delete_if { |ss| not ss.characteristics.include? 'MEM_SHARED' }
else
secs.delete_if { |ss| ss.characteristics.include? 'MEM_SHARED' }
end
secs.delete_if { |ss| ss.virtsize.kind_of?(::Integer) or ss.rawsize.kind_of?(::Integer) or secs[secs.index(ss)+1..-1].find { |ss_| ss_.virtaddr.kind_of?(::Integer) } }
# try to find superset of characteristics
if target = secs.find { |ss| (ss.characteristics & char) == char }
target.encoded.align 8
puts "PE: merging #{s.name} in #{target.name} (#{target.encoded.virtsize})" if $DEBUG
s.encoded = target.encoded << s.encoded
else
@sections << s
end
else
@sections << s
end
end
# encodes the export table as a new section, updates directory['export_table']
def encode_exports
edata = @export.encode self
# must include name tables (for forwarders)
@directory['export_table'] = [label_at(edata, 0, 'export_table'), edata.virtsize]
s = Section.new
s.name = '.edata'
s.encoded = edata
s.characteristics = %w[MEM_READ]
encode_append_section s
end
# encodes the import tables as a new section, updates directory['import_table'] and directory['iat']
def encode_imports
idata, iat = ImportDirectory.encode(self, @imports)
@directory['import_table'] = [label_at(idata, 0, 'idata'), idata.virtsize]
s = Section.new
s.name = '.idata'
s.encoded = idata
s.characteristics = %w[MEM_READ MEM_WRITE MEM_DISCARDABLE]
encode_append_section s
if @imports.first and @imports.first.iat_p.kind_of? Integer
ordiat = @imports.zip(iat).sort_by { |id, it| id.iat_p.kind_of?(Integer) ? id.iat_p : 1<<65 }.map { |id, it| it }
else
ordiat = iat
end
@directory['iat'] = [label_at(ordiat.first, 0, 'iat'),
Expression[label_at(ordiat.last, ordiat.last.virtsize, 'iat_end'), :-, label_at(ordiat.first, 0)]] if not ordiat.empty?
iat_s = nil
plt = Section.new
plt.name = '.plt'
plt.encoded = EncodedData.new
plt.characteristics = %w[MEM_READ MEM_EXECUTE]
@imports.zip(iat) { |id, it|
if id.iat_p.kind_of? Integer and s = @sections.find { |s_| s_.virtaddr <= id.iat_p and s_.virtaddr + (s_.virtsize || s_.encoded.virtsize) > id.iat_p }
id.iat = it # will be fixed up after encode_section
else
# XXX should not be mixed (for @directory['iat'][1])
if not iat_s
iat_s = Section.new
iat_s.name = '.iat'
iat_s.encoded = EncodedData.new
iat_s.characteristics = %w[MEM_READ MEM_WRITE]
encode_append_section iat_s
end
iat_s.encoded << it
end
id.imports.each { |i|
if i.thunk
arch_encode_thunk(plt.encoded, i)
end
}
}
encode_append_section plt if not plt.encoded.empty?
end
# encodes a thunk to imported function
def arch_encode_thunk(edata, import)
case @cpu
when Ia32
shellcode = lambda { |c| Shellcode.new(@cpu).share_namespace(self).assemble(c).encoded }
if @cpu.generate_PIC
if @cpu.size == 64
edata << shellcode["#{import.thunk}: jmp [rip+#{import.target}-1f]\n1:"]
return
end
# sections starts with a helper function that returns the address of metasm_intern_geteip in eax (PIC)
if not @sections.find { |s| s.encoded and s.encoded.export['metasm_intern_geteip'] } and edata.empty?
edata << shellcode["metasm_intern_geteip: call 42f\n42:\npop eax\nsub eax, 42b-metasm_intern_geteip\nret"]
end
edata << shellcode["#{import.thunk}: call metasm_intern_geteip\njmp [eax+#{import.target}-metasm_intern_geteip]"]
else
edata << shellcode["#{import.thunk}: jmp [#{import.target}]"]
end
else raise EncodeError, 'E: COFF: encode import thunk: unsupported architecture'
end
end
def encode_tls
dir, cbtable = @tls.encode(self)
@directory['tls_table'] = [label_at(dir, 0, 'tls_table'), dir.virtsize]
s = Section.new
s.name = '.tls'
s.encoded = EncodedData.new << dir << cbtable
s.characteristics = %w[MEM_READ MEM_WRITE]
encode_append_section s
end
# encodes relocation tables in a new section .reloc, updates @directory['base_relocation_table']
def encode_relocs
if @relocations.empty?
rt = RelocationTable.new
rt.base_addr = 0
rt.relocs = []
@relocations << rt
end
relocs = @relocations.inject(EncodedData.new) { |edata, rt_| edata << rt_.encode(self) }
@directory['base_relocation_table'] = [label_at(relocs, 0, 'reloc_table'), relocs.virtsize]
s = Section.new
s.name = '.reloc'
s.encoded = relocs
s.characteristics = %w[MEM_READ MEM_DISCARDABLE]
encode_append_section s
end
# creates the @relocations from sections.encoded.reloc
def create_relocation_tables
@relocations = []
# create a fake binding with all exports, to find only-image_base-dependant relocs targets
# not foolproof, but works in standard cases
startaddr = curaddr = label_at(@encoded, 0, 'coff_start')
binding = {}
@sections.each { |s|
binding.update s.encoded.binding(curaddr)
curaddr = Expression[curaddr, :+, s.encoded.virtsize]
}
# for each section.encoded, make as many RelocationTables as needed
@sections.each { |s|
# rt.base_addr temporarily holds the offset from section_start, and is fixed up to rva before '@reloc << rt'
rt = RelocationTable.new
s.encoded.reloc.each { |off, rel|
# check that the relocation looks like "program_start + integer" when bound using the fake binding
# XXX allow :i32 etc
if rel.endianness == @endianness and [:u32, :a32, :u64, :a64].include?(rel.type) and
rel.target.bind(binding).reduce.kind_of?(Expression) and
Expression[rel.target, :-, startaddr].bind(binding).reduce.kind_of?(::Integer)
# winner !
# build relocation
r = RelocationTable::Relocation.new
r.offset = off & 0xfff
r.type = { :u32 => 'HIGHLOW', :u64 => 'DIR64', :a32 => 'HIGHLOW', :a64 => 'DIR64' }[rel.type]
# check if we need to start a new relocation table
if rt.base_addr and (rt.base_addr & ~0xfff) != (off & ~0xfff)
rt.base_addr = Expression[[label_at(s.encoded, 0, 'sect_start'), :-, startaddr], :+, rt.base_addr]
@relocations << rt
rt = RelocationTable.new
end
# initialize reloc table base address if needed
if not rt.base_addr
rt.base_addr = off & ~0xfff
end
(rt.relocs ||= []) << r
else
puts "W: COFF: Ignoring weird relocation #{rel.inspect} when building relocation tables" if $DEBUG
end
}
if rt and rt.relocs
rt.base_addr = Expression[[label_at(s.encoded, 0, 'sect_start'), :-, startaddr], :+, rt.base_addr]
@relocations << rt
end
}
end
def encode_resource
res = @resource.encode self
@directory['resource_table'] = [label_at(res, 0, 'resource_table'), res.virtsize]
s = Section.new
s.name = '.rsrc'
s.encoded = res
s.characteristics = %w[MEM_READ]
encode_append_section s
end
# appends the header/optheader/directories/section table to @encoded
# initializes some flags based on the target arg ('exe' / 'dll' / 'kmod' / 'obj')
def encode_header(target = 'exe')
target = {:bin => 'exe', :lib => 'dll', :obj => 'obj', 'sys' => 'kmod', 'drv' => 'kmod'}.fetch(target, target)
@header.machine ||= case @cpu.shortname
when 'x64'; 'AMD64'
when 'ia32'; 'I386'
end
# setup header flags
tmp = %w[LINE_NUMS_STRIPPED LOCAL_SYMS_STRIPPED DEBUG_STRIPPED] +
case target
when 'exe'; %w[EXECUTABLE_IMAGE]
when 'dll'; %w[EXECUTABLE_IMAGE DLL]
when 'kmod'; %w[EXECUTABLE_IMAGE]
when 'obj'; []
end
tmp << 'x32BIT_MACHINE' # XXX
tmp << 'RELOCS_STRIPPED' # if not @directory['base_relocation_table'] # object relocs
@header.characteristics ||= tmp
@optheader.subsystem ||= case target
when 'exe', 'dll'; 'WINDOWS_GUI'
when 'kmod'; 'NATIVE'
end
@optheader.dll_characts = ['DYNAMIC_BASE'] if @directory['base_relocation_table']
# encode section table, add CONTAINS_* flags from other characteristics flags
s_table = EncodedData.new
@sections.each { |s|
if s.characteristics.kind_of? Array and s.characteristics.include? 'MEM_READ'
if s.characteristics.include? 'MEM_EXECUTE'
s.characteristics |= ['CONTAINS_CODE']
elsif s.encoded
if s.encoded.rawsize == 0
s.characteristics |= ['CONTAINS_UDATA']
else
s.characteristics |= ['CONTAINS_DATA']
end
end
end
s.rawaddr = nil if s.rawaddr.kind_of?(::Integer) # XXX allow to force rawaddr ?
s_table << s.encode(self)
}
# encode optional header
@optheader.headers_size = nil
@optheader.image_size = nil
@optheader.numrva = nil
opth = @optheader.encode(self)
# encode header
@header.num_sect = nil
@header.size_opthdr = nil
@encoded << @header.encode(self, opth) << opth << s_table
end
# append the section bodies to @encoded, and link the resulting binary
def encode_sections_fixup
@encoded.align @optheader.file_align
if @optheader.headers_size.kind_of?(::String)
@encoded.fixup! @optheader.headers_size => @encoded.virtsize
@optheader.headers_size = @encoded.virtsize
end
baseaddr = @optheader.image_base.kind_of?(::Integer) ? @optheader.image_base : 0x400000
binding = @encoded.binding(baseaddr)
curaddr = baseaddr + @optheader.headers_size
@sections.each { |s|
# align
curaddr = EncodedData.align_size(curaddr, @optheader.sect_align)
if s.rawaddr.kind_of?(::String)
@encoded.fixup! s.rawaddr => @encoded.virtsize
s.rawaddr = @encoded.virtsize
end
if s.virtaddr.kind_of?(::Integer)
raise "E: COFF: cannot encode section #{s.name}: hardcoded address too short" if curaddr > baseaddr + s.virtaddr
curaddr = baseaddr + s.virtaddr
end
binding.update s.encoded.binding(curaddr)
curaddr += s.virtsize
pre_sz = @encoded.virtsize
@encoded << s.encoded[0, s.encoded.rawsize]
@encoded.align @optheader.file_align
if s.rawsize.kind_of?(::String)
@encoded.fixup! s.rawsize => (@encoded.virtsize - pre_sz)
s.rawsize = @encoded.virtsize - pre_sz
end
}
# not aligned ? spec says it is, visual studio does not
binding[@optheader.image_size] = curaddr - baseaddr if @optheader.image_size.kind_of?(::String)
# patch the iat where iat_p was defined
# sort to ensure a 0-terminated will not overwrite an entry
# (try to dump notepad.exe, which has a forwarder;)
@imports.find_all { |id| id.iat_p.kind_of? Integer }.sort_by { |id| id.iat_p }.each { |id|
s = sect_at_rva(id.iat_p)
@encoded[s.rawaddr + s.encoded.ptr, id.iat.virtsize] = id.iat
binding.update id.iat.binding(baseaddr + id.iat_p)
} if imports
@encoded.fill
@encoded.fixup! binding
if @optheader.checksum.kind_of?(::String) and @encoded.reloc.length == 1
# won't work if there are other unresolved relocs
checksum = self.class.checksum(@encoded.data, @endianness)
@encoded.fixup @optheader.checksum => checksum
@optheader.checksum = checksum
end
end
# encode a COFF file, building export/import/reloc tables if needed
# creates the base relocation tables (need for references to IAT not known before)
def encode(target = 'exe', want_relocs = (target != 'exe' and target != :bin))
@encoded = EncodedData.new
label_at(@encoded, 0, 'coff_start')
autoimport
encode_exports if export
encode_imports if imports
encode_resource if resource
encode_tls if tls
create_relocation_tables if want_relocs
encode_relocs if relocations
encode_header(target)
encode_sections_fixup
@encoded.data
end
def parse_init
# ahem...
# a fake object, which when appended makes us parse '.text', which creates a real default section
# forwards to it this first appendage.
# allows the user to specify its own section if he wishes, and to use .text if he doesn't
if not defined? @cursource or not @cursource
@cursource = ::Object.new
class << @cursource
attr_accessor :coff
def <<(*a)
t = Preprocessor::Token.new(nil)
t.raw = '.text'
coff.parse_parser_instruction t
coff.cursource.send(:<<, *a)
end
end
@cursource.coff = self
end
@source ||= {}
super()
end
# handles compiler meta-instructions
#
# syntax:
# .section "<section name>" <perm list> <base>
# section name is a string (may be quoted)
# perms are in 'r' 'w' 'x' 'shared' 'discard', may be concatenated (in this order), may be prefixed by 'no' to remove the attribute for an existing section
# base is the token 'base', the token '=' and an immediate expression
# default sections:
# .text = .section '.text' rx
# .data = .section '.data' rw
# .rodata = .section '.rodata' r
# .bss = .section '.bss' rw
# .entrypoint | .entrypoint <label>
# defines the label as the program entrypoint
# without argument, creates a label used as entrypoint
# .libname "<name>"
# defines the string to be used as exported library name (should be the same as the file name, may omit extension)
# .export ["<exported_name>"] [<ordinal>] [<label_name>]
# exports the specified label with the specified name (label_name defaults to exported_name)
# if exported_name is an unquoted integer, the export is by ordinal. XXX if the ordinal starts with '0', the integer is interpreted as octal
# .import "<libname>" "<import_name|ordinal>" [<thunk_name>] [<label_name>]
# imports a symbol from a library
# if the thunk name is specified and not 'nil', the compiler will generate a thunk that can be called (in ia32, 'call thunk' == 'call [import_name]')
# the thunk is position-independent, and should be used instead of the indirect call form, for imported functions
# label_name is the label to attribute to the location that will receive the address of the imported symbol, defaults to import_name (iat_<import_name> if thunk == iname)
# .image_base <base>
# specifies the COFF prefered load address, base is an immediate expression
#
def parse_parser_instruction(instr)
readstr = lambda {
@lexer.skip_space
raise instr, 'string expected' if not t = @lexer.readtok or (t.type != :string and t.type != :quoted)
t.value || t.raw
}
check_eol = lambda {
@lexer.skip_space
raise instr, 'eol expected' if t = @lexer.nexttok and t.type != :eol
}
case instr.raw.downcase
when '.text', '.data', '.rodata', '.bss'
sname = instr.raw.downcase
if not @sections.find { |s| s.name == sname }
s = Section.new
s.name = sname
s.encoded = EncodedData.new
s.characteristics = case sname
when '.text'; %w[MEM_READ MEM_EXECUTE]
when '.data', '.bss'; %w[MEM_READ MEM_WRITE]
when '.rodata'; %w[MEM_READ]
end
@sections << s
end
@cursource = @source[sname] ||= []
check_eol[] if instr.backtrace # special case for magic @cursource
when '.section'
# .section <section name|"section name"> [(no)r w x shared discard] [base=<expr>]
sname = readstr[]
if not s = @sections.find { |s_| s_.name == sname }
s = Section.new
s.name = sname
s.encoded = EncodedData.new
s.characteristics = []
@sections << s
end
loop do
@lexer.skip_space
break if not tok = @lexer.nexttok or tok.type != :string
case @lexer.readtok.raw.downcase
when /^(no)?(r)?(w)?(x)?(shared)?(discard)?$/
ar = []
ar << 'MEM_READ' if $2
ar << 'MEM_WRITE' if $3
ar << 'MEM_EXECUTE' if $4
ar << 'MEM_SHARED' if $5
ar << 'MEM_DISCARDABLE' if $6
if $1; s.characteristics -= ar
else s.characteristics |= ar
end
when 'base'
@lexer.skip_space
@lexer.unreadtok tok if not tok = @lexer.readtok or tok.type != :punct or tok.raw != '='
raise instr, 'invalid base' if not s.virtaddr = Expression.parse(@lexer).reduce or not s.virtaddr.kind_of?(::Integer)
if not @optheader.image_base
@optheader.image_base = (s.virtaddr-0x80) & 0xfff00000
puts "Warning: no image_base specified, using #{Expression[@optheader.image_base]}" if $VERBOSE
end
s.virtaddr -= @optheader.image_base
else raise instr, 'unknown parameter'
end
end
@cursource = @source[sname] ||= []
check_eol[]
when '.libname'
# export directory library name
# .libname <libname|"libname">
@export ||= ExportDirectory.new
@export.libname = readstr[]
check_eol[]
when '.export'
# .export <export name|ordinal|"export name"> [ordinal] [label to export if different]
@lexer.skip_space
raise instr, 'string expected' if not tok = @lexer.readtok or (tok.type != :string and tok.type != :quoted)
exportname = tok.value || tok.raw
if tok.type == :string and (?0..?9).include? tok.raw[0]
exportname = Integer(exportname) rescue raise(tok, "bad ordinal value, try quotes #{' or rm leading 0' if exportname[0] == ?0}")
end
@lexer.skip_space
tok = @lexer.readtok
if tok and tok.type == :string and (?0..?9).include? tok.raw[0]
(eord = Integer(tok.raw)) rescue @lexer.unreadtok(tok)
else @lexer.unreadtok(tok)
end
@lexer.skip_space
tok = @lexer.readtok
if tok and tok.type == :string
exportlabel = tok.raw
else
@lexer.unreadtok tok
end
@export ||= ExportDirectory.new
@export.exports ||= []
e = ExportDirectory::Export.new
if exportname.kind_of? Integer
e.ordinal = exportname
else
e.name = exportname
e.ordinal = eord if eord
end
e.target = exportlabel || exportname
@export.exports << e
check_eol[]
when '.import'
# .import <libname|"libname"> <imported sym|"imported sym"> [label of plt thunk|nil] [label of iat element if != symname]
libname = readstr[]
i = ImportDirectory::Import.new
@lexer.skip_space
raise instr, 'string expected' if not tok = @lexer.readtok or (tok.type != :string and tok.type != :quoted)
if tok.type == :string and (?0..?9).include? tok.raw[0]
i.ordinal = Integer(tok.raw)
else
i.name = tok.value || tok.raw
end
@lexer.skip_space
if tok = @lexer.readtok and tok.type == :string
i.thunk = tok.raw if tok.raw != 'nil'
@lexer.skip_space
tok = @lexer.readtok
end
if tok and tok.type == :string
i.target = tok.raw
else
i.target = ((i.thunk == i.name) ? ('iat_' + i.name) : (i.name ? i.name : (i.thunk ? 'iat_' + i.thunk : raise(instr, 'need iat label'))))
@lexer.unreadtok tok
end
raise tok, 'import target exists' if i.target != new_label(i.target)
@imports ||= []
if not id = @imports.find { |id_| id_.libname == libname }
id = ImportDirectory.new
id.libname = libname
id.imports = []
@imports << id
end
id.imports << i
check_eol[]
when '.entrypoint'
# ".entrypoint <somelabel/expression>" or ".entrypoint" (here)
@lexer.skip_space
if tok = @lexer.nexttok and tok.type == :string
raise instr, 'syntax error' if not entrypoint = Expression.parse(@lexer)
else
entrypoint = new_label('entrypoint')
@cursource << Label.new(entrypoint, instr.backtrace.dup)
end
@optheader.entrypoint = entrypoint
check_eol[]
when '.image_base'
raise instr if not base = Expression.parse(@lexer) or !(base = base.reduce).kind_of?(::Integer)
@optheader.image_base = base
check_eol[]
when '.subsystem'
@lexer.skip_space
raise instr if not tok = @lexer.readtok
@optheader.subsystem = tok.raw
check_eol[]
else super(instr)
end
end
def assemble(*a)
parse(*a) if not a.empty?
@source.each { |k, v|
raise "no section named #{k} ?" if not s = @sections.find { |s_| s_.name == k }
s.encoded << assemble_sequence(v, @cpu)
v.clear
}
end
# defines __PE__
def tune_prepro(l)
l.define_weak('__PE__', 1)
l.define_weak('__MS_X86_64_ABI__') if @cpu and @cpu.shortname == 'x64'
end
def tune_cparser(cp)
super(cp)
cp.llp64 if @cpu.size == 64
end
# honors C attributes: export, export_as(foo), import_from(kernel32), entrypoint
# import by ordinal: extern __stdcall int anyname(int) __attribute__((import_from(ws2_32:28)));
# can alias imports with int mygpaddr_alias() attr(import_from(kernel32:GetProcAddr))
def read_c_attrs(cp)
cp.toplevel.symbol.each_value { |v|
next if not v.kind_of? C::Variable
if v.has_attribute 'export' or ea = v.has_attribute_var('export_as')
@export ||= ExportDirectory.new
@export.exports ||= []
e = ExportDirectory::Export.new
begin
e.ordinal = Integer(ea || v.name)
rescue ArgumentError
e.name = ea || v.name
end
e.target = v.name
@export.exports << e
end
if v.has_attribute('import') or ln = v.has_attribute_var('import_from')
ln ||= WindowsExports::EXPORT[v.name]
raise "unknown library for #{v.name}" if not ln
i = ImportDirectory::Import.new
if ln.include? ':'
ln, name = ln.split(':')
begin
i.ordinal = Integer(name)
rescue ArgumentError
i.name = name
end
else
i.name = v.name
end
if v.type.kind_of? C::Function
i.thunk = v.name
i.target = 'iat_'+i.thunk
else
i.target = v.name
end
@imports ||= []
if not id = @imports.find { |id_| id_.libname == ln }
id = ImportDirectory.new
id.libname = ln
id.imports = []
@imports << id
end
id.imports << i
end
if v.has_attribute 'entrypoint'
@optheader.entrypoint = v.name
end
}
end
# try to resolve automatically COFF import tables from self.sections.encoded.relocations
# and WindowsExports::EXPORT
# if the relocation target is '<symbolname>' or 'iat_<symbolname>, link to the IAT address, if it is '<symbolname> + <expr>',
# link to a thunk (plt-like)
def autoimport
WindowsExports rescue return # autorequire
autoexports = WindowsExports::EXPORT.dup
@sections.each { |s|
next if not s.encoded
s.encoded.export.keys.each { |e| autoexports.delete e }
}
@sections.each { |s|
next if not s.encoded
s.encoded.reloc.each_value { |r|
if r.target.op == :+ and not r.target.lexpr and r.target.rexpr.kind_of?(::String)
sym = target = r.target.rexpr
sym = sym[4..-1] if sym[0, 4] == 'iat_'
elsif r.target.op == :- and r.target.rexpr.kind_of?(::String) and r.target.lexpr.kind_of?(::String)
sym = thunk = r.target.lexpr
end
next if not dll = autoexports[sym]
@imports ||= []
next if @imports.find { |id| id.imports.find { |ii| ii.name == sym } }
if not id = @imports.find { |id_| id_.libname =~ /^#{dll}(\.dll)?$/i }
id = ImportDirectory.new
id.libname = dll
id.imports = []
@imports << id
end
if not i = id.imports.find { |i_| i_.name == sym }
i = ImportDirectory::Import.new
i.name = sym
id.imports << i
end
if (target and i.target and (i.target != target or i.thunk == target)) or
(thunk and i.thunk and (i.thunk != thunk or i.target == thunk))
puts "autoimport: conflict for #{target} #{thunk} #{i.inspect}" if $VERBOSE
else
i.target ||= new_label(target || 'iat_' + thunk)
i.thunk ||= thunk if thunk
end
}
}
end
end
end
|
require 'rubygems'
require 'bundler/setup'
require 'set'
require 'boxcar_api'
require 'gmail'
require 'active_support/all'
POLL_INTERVAL = ENV['POLL_INTERVAL'] || 30
NOTIFY_ACCOUNT = ENV['GMAIL_USERNAME'] || ENV['GMAIL_USERNAME_0']
def get_account_info
accounts = {}
count = ENV['ACCOUNT_COUNT'] || 1
count = count.to_i
if count == 1
acounts[ENV['GMAIL_USERNAME']] = ENV['GMAIL_PASSWORD']
else
count.times do |i|
acounts[ENV["GMAIL_USERNAME_#{i}"]] = ENV["GMAIL_PASSWORD_#{i}"]
end
end
accounts
end
username_password_hash = get_account_info
unless username_password_hash.length > 0
puts 'GMAIL_USERNAME and GMAIL_PASSWORD env variables are not set'
exit
end
boxcar_key = ENV['BOXCAR_KEY']
boxcar_secret = ENV['BOXCAR_SECRET']
unless boxcar_key.present? && boxcar_secret.present?
puts 'BOXCAR_KEY and BOXCAR_SECRET env variables are not set'
exit
end
while true
username_password_hash.each_pair do |username, password|
puts "Checking #{username} gmail"
gmail = Gmail.new(username, password)
emails = gmail.inbox.emails(:unread)
emails.each do |e|
from = e.from.first.name
msg = e.subject
puts " #{from}: #{msg}"
provider = BoxcarAPI::Provider.new(boxcar_key, boxcar_secret)
provider.notify(NOTIFY_ACCOUNT, msg,
:from_screen_name => from,
:from_remote_service_id => e.uid)
end
gmail.logout
puts 'Done checking, sleeping now'
$stdout.flush
end
sleep POLL_INTERVAL
end
spell account correctly
require 'rubygems'
require 'bundler/setup'
require 'set'
require 'boxcar_api'
require 'gmail'
require 'active_support/all'
POLL_INTERVAL = ENV['POLL_INTERVAL'] || 30
NOTIFY_ACCOUNT = ENV['GMAIL_USERNAME'] || ENV['GMAIL_USERNAME_0']
def get_account_info
accounts = {}
count = ENV['ACCOUNT_COUNT'] || 1
count = count.to_i
if count == 1
accounts[ENV['GMAIL_USERNAME']] = ENV['GMAIL_PASSWORD']
else
count.times do |i|
accounts[ENV["GMAIL_USERNAME_#{i}"]] = ENV["GMAIL_PASSWORD_#{i}"]
end
end
accounts
end
username_password_hash = get_account_info
unless username_password_hash.length > 0
puts 'GMAIL_USERNAME and GMAIL_PASSWORD env variables are not set'
exit
end
boxcar_key = ENV['BOXCAR_KEY']
boxcar_secret = ENV['BOXCAR_SECRET']
unless boxcar_key.present? && boxcar_secret.present?
puts 'BOXCAR_KEY and BOXCAR_SECRET env variables are not set'
exit
end
while true
username_password_hash.each_pair do |username, password|
puts "Checking #{username} gmail"
gmail = Gmail.new(username, password)
emails = gmail.inbox.emails(:unread)
emails.each do |e|
from = e.from.first.name
msg = e.subject
puts " #{from}: #{msg}"
provider = BoxcarAPI::Provider.new(boxcar_key, boxcar_secret)
provider.notify(NOTIFY_ACCOUNT, msg,
:from_screen_name => from,
:from_remote_service_id => e.uid)
end
gmail.logout
puts 'Done checking, sleeping now'
$stdout.flush
end
sleep POLL_INTERVAL
end
|
68ebdf19-2d48-11e5-806f-7831c1c36510
68f1d95e-2d48-11e5-b1ad-7831c1c36510
68f1d95e-2d48-11e5-b1ad-7831c1c36510 |
4aed9ceb-2d48-11e5-bef3-7831c1c36510
4af32ff5-2d48-11e5-bdd2-7831c1c36510
4af32ff5-2d48-11e5-bdd2-7831c1c36510 |
57f64042-2d48-11e5-b36d-7831c1c36510
57fc347a-2d48-11e5-8e6d-7831c1c36510
57fc347a-2d48-11e5-8e6d-7831c1c36510 |
require 'sinatra'
require 'json'
require 'logging'
require 'rbconfig'
require 'open-uri'
require_relative './apps/bootstrap.rb'
require_relative './Models/models.rb'
require_relative './Models/test.rb'
require_relative './Models/configuration.rb'
require_relative './Models/testlocation.rb'
require_relative './Models/results.rb'
class PerfApp < Sinatra::Base
Logging.color_scheme( 'bright',
:levels => {
:info => :green,
:warn => :yellow,
:error => :red,
:fatal => [:white, :on_red]
},
:date => :blue,
:logger => :cyan,
:message => :magenta
)
logger = Logging.logger(STDOUT)
logger.level = :debug
results = nil
# In your main application file
configure do
set :views, "#{File.dirname(__FILE__)}/views"
set :public_dir, "#{File.dirname(__FILE__)}/public"
enable :show_exceptions if development? #or test?
set :deployment, environment
Apps::Bootstrap.main_config(environment)
end
configure :development do
set :show_exceptions, :after_handler
end
error ArgumentError do
request.env['sinatra.error'].message
end
error OpenURI::HTTPError do
"Unable to gather required data. There's a misconfiguration to connect to backend services."
end
get '/' do
erb :index, :locals => {
:data => Apps::Bootstrap.application_list
}
end
get '/:application' do |application|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
erb :app_index, :locals => {
:data => {
:name => new_app.config['application']['name'],
:description => new_app.config['application']['description'],
:title => new_app.config['application']['name'],
:application => app[:id]
}
}
else
status 404
end
end
get '/:application/applications' do |application|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
erb :applications, :locals => {
:application_list => new_app.config['application']['sub_apps'],
:title => new_app.config['application']['name'],
:application => app[:id]
}
else
status 404
end
end
get '/:application/applications/:name' do |application, name|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
erb :app_detail, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name)
}
else
status 404
end
else
status 404
end
end
get '/:application/applications/:name/update' do |application, name|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
erb :app_detail_update, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_type_list => Apps::Bootstrap.test_list.map {|k, v| {:id => k, :name => v["name"] } },
:runner_list => Apps::Bootstrap.runner_list.map { |k, _| k }
}
else
status 404
end
else
status 404
end
end
post '/:application/applications/:name/add_request_response' do |application, name|
halt 400, "No request specified" unless params['request']
halt 400, "No response specified" unless params['response']
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
Models::Test.new(new_app.db).add_request_response(application, name, new_app.storage_info, params['request'], params['response'])
status 201
erb :app_detail_update, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_type_list => Apps::Bootstrap.test_list.map {|k, v| {:id => k, :name => v["name"] } },
:runner_list => Apps::Bootstrap.runner_list.map { |k, _| k }
}
rescue ArgumentError => e
halt 404, 'invalid request response'
end
else
halt 404, 'invalid sub app specified'
end
else
halt 404, 'invalid application specified'
end
end
post '/:application/applications/:name/update_request_response' do |application, name|
halt 400, "No request specified" unless params['request'] || params['request']['request_id']
halt 400, "No response specified" unless params['response'] || params['response']['request_id']
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
Models::Test.new(new_app.db).update_request_response(application, name, new_app.storage_info, params['request'], params['response'])
status 200
rescue ArgumentError => e
halt 404, 'invalid request response'
end
else
halt 404, 'invalid sub app specified'
end
else
halt 404, 'invalid application specified'
end
end
delete '/:application/applications/:name/remove_request_response/:requests_to_delete' do |application, name,requests_to_delete|
request_ids = requests_to_delete.split(',')
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
Models::Test.new(new_app.db).remove_request_response(application, name, new_app.storage_info, request_ids)
status 200
erb :app_detail_update, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_type_list => Apps::Bootstrap.test_list.map {|k, v| {:id => k, :name => v["name"] } },
:runner_list => Apps::Bootstrap.runner_list.map { |k, _| k }
}
rescue ArgumentError => e
halt 404, 'invalid request ids specified'
end
else
halt 404, 'invalid sub app specified'
end
else
halt 404, 'invalid application specified'
end
end
post '/:application/applications/:name/upload_config' do |application, name|
config_name = params['upload_config'][:filename]
config_body = params['upload_config'][:tempfile]
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
Models::Configuration.new(new_app.db, new_app.fs_ip).add_config(application, name, new_app.storage_info, config_name, config_body)
status 201
erb :app_detail_update, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_type_list => Apps::Bootstrap.test_list.map {|k, v| {:id => k, :name => v["name"] } },
:runner_list => Apps::Bootstrap.runner_list.map { |k, _| k }
}
rescue ArgumentError => e
halt 404, 'invalid config file specified'
end
else
halt 404, 'invalid sub app specified'
end
else
halt 404, 'invalid application specified'
end
end
post '/:application/applications/:name/upload_test_file' do |application, name|
test_file_name = params['upload_test_file'][:filename]
test_file_body = params['upload_test_file'][:tempfile]
test_file_runner = params['test_file_runner']
test_file_type = params['test_file_type']
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).add_test_file(application, name, new_app.storage_info, test_file_name, test_file_body, test_file_runner, test_file_type)
status 201
erb :app_detail_update, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_type_list => Apps::Bootstrap.test_list.map {|k, v| {:id => k, :name => v["name"] } },
:runner_list => Apps::Bootstrap.runner_list.map { |k, _| k }
}
rescue ArgumentError => e
halt 404, 'invalid config file specified'
end
else
halt 404, 'invalid sub app specified'
end
else
halt 404, 'invalid application specified'
end
end
post '/:application/applications/:name/remove_config' do |application, name|
config_list = params['remove_config']
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
config_list.each do |config_name|
Models::Configuration.new(new_app.db, new_app.fs_ip).remove_config(application, name, new_app.storage_info, config_name)
end
status 200
erb :app_detail_update, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_type_list => Apps::Bootstrap.test_list.map {|k, v| {:id => k, :name => v["name"] } },
:runner_list => Apps::Bootstrap.runner_list.map { |k, _| k }
}
rescue ArgumentError => e
halt 404, 'invalid config file specified'
end
else
halt 404, 'invalid sub app specified'
end
else
halt 404, 'invalid application specified'
end
end
post '/:application/applications/:name/remove_test_file' do |application, name|
test_file_list = params['remove_test_file']
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
test_file_list.each do |test_file|
Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).remove_test_file(application, name, new_app.storage_info, test_file)
end
status 200
erb :app_detail_update, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_type_list => Apps::Bootstrap.test_list.map {|k, v| {:id => k, :name => v["name"] } },
:runner_list => Apps::Bootstrap.runner_list.map { |k, _| k }
}
rescue ArgumentError => e
halt 404, 'invalid config file specified'
end
else
halt 404, 'invalid sub app specified'
end
else
halt 404, 'invalid application specified'
end
end
get '/:application/applications/:name/test_download/:file_name' do |application, name, file_name|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
downloaded_file = Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_id(application, name, file_name).download
attachment "test_file"
content_type = 'Application/octet-stream'
body downloaded_file
rescue ArgumentError => e
status 404
body e.message
end
else
status 404
body "No test script exists for #{application}/#{name}"
end
else
status 404
body "No test script exists for #{application}/#{name}"
end
end
get '/:application/results' do |application|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
erb :results, :locals => {
:application_list => new_app.config['application']['sub_apps'],
:title => new_app.config['application']['name'],
:application => app[:id]
}
else
status 404
end
end
get '/:application/results/:name' do |application, name|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
erb :results_list, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:load_test_list => Apps::Bootstrap.test_list
}
else
status 404
end
else
status 404
end
end
get '/:application/results/:name/:test' do |application, name, test|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
results = Results::PastSummaryResults.new(application, name,
new_app.config['application']['type'].to_sym, test.chomp('_test'),
new_app.db, new_app.fs_ip, nil, logger)
plugins = []
new_app.load_plugins.each do |p|
plugins << {:id => p.to_s, :data => p.show_plugin_names.map do |id|
{:id => id[:id], :name => id[:name] }
end
}
end
erb results.summary_view, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:result_set_list => results.past_summary_results.test_results(new_app.db, new_app.fs_ip, results.test_list).sort_by {|r| r.start.to_s},
:plugin_list => plugins,
:test_type => test
}
else
status 404
end
else
status 404
end
end
get '/:application/results/:name/:test/metric/:metric' do |application, name, test, metric|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
metric_results = {} unless metric_results
metric_results[metric.to_sym] = []
results = Results::PastSummaryResults.new(application, name,
new_app.config['application']['type'].to_sym, test.chomp('_test'),
new_app.db, new_app.fs_ip, nil, logger)
result_set_list = results.past_summary_results.test_results(new_app.db, new_app.fs_ip, results.test_list)
if result_set_list[0].respond_to?(metric.to_sym)
result_set_list.each { |result| metric_results[metric.to_sym] << [result.start, result.send(metric.to_sym).to_f] }
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid metric specified'}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid sub app specified'}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid application specified'}.to_json
end
content_type :json
body metric_results.to_json
end
get '/:application/results/:name/:test/metric/:metric/id/:id' do |application, name, test, metric, id|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
results = Results::PastSummaryResults.new(application, name,
new_app.config['application']['type'].to_sym, test.chomp('_test'),
new_app.db, new_app.fs_ip, nil, logger)
detailed_results = results.past_summary_results.detailed_results(new_app.db, new_app.fs_ip, results.test_list, id)
metric_results = results.past_summary_results.metric_results(detailed_results, metric)
if metric_results and metric_results.length > 0
content_type :json
json_results = { metric.to_sym => metric_results }
body json_results.to_json
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'The metric data is empty'}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid sub app specified'}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid application specified'}.to_json
end
end
get '/:application/results/:name/:test/id/:id' do |application, name, test, id|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
app_type = new_app.config['application']['type'].to_sym
results = Results::PastSummaryResults.new(application, name,
app_type, test.chomp('_test'),
new_app.db, new_app.fs_ip, nil, logger)
detailed_results = results.past_summary_results.detailed_results(new_app.db, new_app.fs_ip, results.test_list, id)
if detailed_results and detailed_results.length > 0
test_id = id
test_type = test
erb results.detailed_view, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:result_set_list => detailed_results,
:test_id => id,
:test_type => test,
:request_response_list => Models::Test.new(new_app.db).get_result_requests(app_type, application, name, test.chomp('_test'), id),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_result(app_type, application, name, test.chomp('_test'), id),
:test_location => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_result(app_type, application, name, test.chomp('_test'), id)
}
else
status 404
end
else
status 404
end
else
status 404
end
end
get '/:application/results/:name/:test/id/:id/plugin/:plugin/:option' do |application, name, test, id, plugin, option|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
plugin_instance = new_app.load_plugins.find {|p| p.to_s == plugin }
halt 404, "no plugin #{plugin} found" unless plugin_instance
summary_plugin_data = plugin_instance.new(new_app.db, new_app.fs_ip).show_summary_data(application, name, test, option, id, {:application_type => new_app.config['application']['type'].to_sym})
detailed_plugin_data = plugin_instance.new(new_app.db, new_app.fs_ip).show_detailed_data(application, name, test, option, id, {:application_type => new_app.config['application']['type'].to_sym})
summary_headers = nil
summary_header_descriptions = nil
#TODO: detailed data is not required for all plugin types. have a condition to check if not available and then don't process it
begin
if summary_plugin_data && !summary_plugin_data.empty?
if new_app.config['application']['type'].to_sym == :comparison && summary_plugin_data[:plugin_type] == :time_series
summary_plugin_data[:id_results].each do |guid_results|
guid_results[:results].each do |metric, metric_data|
summary_headers = summary_headers ? (metric_data[:headers] | summary_headers) : metric_data[:headers]
summary_header_descriptions = metric_data[:description]
break
end
end
if detailed_plugin_data
detailed_plugin_result = []
#TODO: this might be very time_series centric. Maybe should be moved out of here
detailed_plugin_data[:id_results].each do |guid_results|
detailed_guid_results = {}
guid_results[:results].each do |key, value|
detailed_guid_results[key] = {}
detailed_guid_results[key][:headers] = value[:headers]
detailed_guid_results[key][:content] = {}
detailed_guid_results[key][:description] = value[:description]
value[:content].each do |instance, data|
detailed_guid_results[key][:content][instance] =
plugin_instance.new(new_app.db, new_app.fs_ip).order_by_date(data)
end
end
detailed_plugin_result << {:id => guid_results[:id], :results => detailed_guid_results}
end
end
else
if detailed_plugin_data
detailed_plugin_result = {}
detailed_plugin_data.each do |key, value|
detailed_plugin_result[key] = {}
detailed_plugin_result[key][:headers] = value[:headers]
detailed_plugin_result[key][:content] = {}
detailed_plugin_result[key][:description] = value[:description]
value[:content].each do |instance, data|
detailed_plugin_result[key][:content][instance] =
plugin_instance.new(new_app.db, new_app.fs_ip).order_by_date(data)
end
end
end
end
plugin_type = new_app.config['application']['type'].to_sym == :comparison ? summary_plugin_data[:plugin_type] : summary_plugin_data.map{|k,v|
v[:plugin_type]
}.first
erb PluginModule::PluginView.retrieve_view(plugin_type,new_app.config['application']['type'].to_sym), :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:summary_plugin_data => summary_plugin_data,
:summary_headers => summary_headers,
:summary_header_descriptions => summary_header_descriptions,
:detailed_plugin_data => detailed_plugin_result,
:detailed_unordered_plugin_data => detailed_plugin_data,
:test_id => id,
:test_type => test,
:plugin_name => plugin,
:option => option
}
else
halt 404, "no metric data found for #{application}/#{name}/#{test}/#{id}/#{plugin}/#{option}"
end
rescue Exception => e
p e
end
else
halt 404, "No sub application for #{name} found"
end
else
halt 404, "No application by name of #{application}/#{test} found"
end
end
get '/:application/results/:name/:test/metric/:metric/compare/:ids' do |application, name, test, metric, ids|
#get result files from file under files/apps/:name/results/:test/:date/:metric
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
results = Results::PastSummaryResults.new(application, name,
:comparison, test.chomp('_test'),
new_app.db, new_app.fs_ip, nil, logger)
detailed_results = results.past_summary_results.detailed_results(new_app.db, new_app.fs_ip, results.test_list, ids)
metric_results = results.past_summary_results.metric_results(detailed_results, metric)
if metric_results and metric_results.length > 0
content_type :json
json_results = { metric.to_sym => metric_results }
body json_results.to_json
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'The metric data is empty'}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid sub app specified'}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid application specified'}.to_json
end
end
get '/:application/results/:name/:test/:id/test_download/:file_name' do |application, name, test, id,file_name|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
downloaded_file = Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_result_by_id(
application, name, test.chomp('_test'), id).download
attachment "test_file"
content_type = 'Application/octet-stream'
body downloaded_file
rescue
halt 404, "No test script exists for #{application}/#{name}/#{id}"
end
else
halt 404, "No test script exists for #{application}/#{name}"
end
else
halt 404, "No test script exists for #{application}/#{name}"
end
end
post '/:application/results/:name/:test' do |application, name, test|
halt 400, "No tests were specified for comparison" unless params[:compare]
comparison_id_list = params[:compare]
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
results = Results::PastSummaryResults.new(application, name,
new_app.config['application']['type'].to_sym, test.chomp('_test'),
new_app.db, new_app.fs_ip, nil, logger)
result_set_list = results.past_summary_results.test_results(new_app.db, new_app.fs_ip, results.test_list)
plugins = []
new_app.load_plugins.each do |p|
plugins << {:id => p.to_s, :data => p.show_plugin_names.map do |id|
{:id => id[:id], :name => id[:name] }
end
}
end
erb :results_app_test_compare, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:result_set_list => result_set_list.find_all {|result| comparison_id_list.include?(result.id) },
:plugin_list => plugins,
:test_type => test,
:compare_guids => params[:compare]
}
else
halt 404, "No sub application for #{name} found"
end
else
halt 404, "No application by name of #{application}/#{test} found"
end
end
post '/:application/results/:name/:test/compare-plugin/metric' do |application, name, test|
halt 400, "No tests were specified for comparison" unless params[:compare]
halt 400, "No plugin was specified for comparison" unless params[:plugin]
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
comparison_id_list = params[:compare].split(',')
plugin_id = params[:plugin]
plugin_id_data = plugin_id.split('|||')
plugin = plugin_id_data[0]
option = plugin_id_data[1]
plugin_instance = new_app.load_plugins.find {|p| p.to_s == plugin }
halt 404, {'Content-Type' => 'application/json'}, {'fail' => "No plugin by name of #{plugin} found"}.to_json unless plugin_instance
detailed_plugin_data_list = []
valid_comparison_id_list = []
comparison_id_list.each do |id|
detailed_plugin_data = plugin_instance.new(new_app.db, new_app.fs_ip).show_detailed_data(application, name, test, option, id)
if detailed_plugin_data && !detailed_plugin_data.empty? && detailed_plugin_data[option.to_sym][:content].length > 0
detailed_plugin_data_list << {
:id => id,
:data => detailed_plugin_data
}
valid_comparison_id_list << id
end
end
if detailed_plugin_data_list and valid_comparison_id_list.length > 0
content_type :json
body detailed_plugin_data_list.to_json
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => "no data for #{plugin_id} found"}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => "No sub application for #{name} found"}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => "No application by name of #{application}/#{test} found"}.to_json
end
end
post '/:application/results/:name/:test/compare-plugin' do |application, name, test|
halt 400, "No tests were specified for comparison" unless params[:compare]
halt 400, "No plugin was specified for comparison" unless params[:plugin_id]
plugin_id = params[:plugin_id]
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
#get plugin_summary_results for each data set
plugin_id_data = plugin_id.split('|||')
plugin = plugin_id_data[0]
option = plugin_id_data[1]
plugin_instance = new_app.load_plugins.find {|p| p.to_s == plugin }
halt 404, "No plugin by name of #{plugin} found" unless plugin_instance
summary_plugin_data_list = []
valid_comparison_id_list = []
summary_headers = nil
summary_header_descriptions = nil
if new_app.config['application']['type'].to_sym == :comparison
summary_plugin_data = plugin_instance.new(new_app.db, new_app.fs_ip).show_summary_data(application, name, test, option, params[:compare], {:application_type => new_app.config['application']['type'].to_sym})
if summary_plugin_data && !summary_plugin_data.empty?
summary_plugin_data[:id_results].each do |guid_results|
guid_results[:results].each do |metric, metric_data|
summary_headers = summary_headers ? (metric_data[:headers] | summary_headers) : metric_data[:headers]
summary_header_descriptions = metric_data[:description]
break
end if guid_results[:results]
valid_comparison_id_list << guid_results[:id]
end
summary_plugin_data_list = summary_plugin_data[:id_results]
plugin_type = summary_plugin_data[:plugin_type]
params[:compare].split('+').each {|id| valid_comparison_id_list << id }
else
halt 404, "No data for #{plugin_id} found"
end
else
params[:compare].split('+').each do |id|
summary_plugin_data = plugin_instance.new(new_app.db, new_app.fs_ip).show_summary_data(application, name, test, option, id)
if summary_plugin_data && !summary_plugin_data.empty? && summary_plugin_data[option.to_sym][:content].length > 0
summary_plugin_data_list << {
:id => id,
:results => summary_plugin_data
}
valid_comparison_id_list << id
plugin_type = summary_plugin_data.map{|k,v|
v[:plugin_type]
}.first
summary_plugin_data.each do |key, content|
summary_headers = summary_headers ? (content[:headers] | summary_headers) : content[:headers]
summary_header_descriptions = content[:description]
break
end
end
end
end
#detailed_plugin_data = plugin_instance.new(new_app.db, new_app.fs_ip).show_detailed_data(application, name, test, option, id)
if valid_comparison_id_list.length > 0 && !summary_plugin_data_list.empty?
erb PluginModule::PluginView.retrieve_compare_view(
plugin_type,
new_app.config['application']['type'].to_sym
), :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:summary_plugin_data_list => summary_plugin_data_list,
:test_type => test,
:summary_headers => summary_headers,
:summary_header_descriptions => summary_header_descriptions,
:compare_guids => valid_comparison_id_list,
:plugin_name => plugin,
:plugin_id => plugin_id,
:option => option
}
else
halt 404, "No data for #{plugin_id} found"
end
else
halt 404, "No sub application for #{name} found"
end
else
halt 404, "No application by name of #{application}/#{test} found"
end
end
get '/:application/tests' do |application|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new
erb :tests, :locals => {
:application_list => new_app.config['application']['sub_apps'],
:title => new_app.config['application']['name'],
:application => app[:id]
}
else
status 404
end
end
get '/:application/tests/:name' do |application, name|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
erb :tests_list, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:load_test_list => Apps::Bootstrap.test_list
}
else
status 404
end
else
status 404
end
end
=begin
loop through plugins here
=end
get '/:application/tests/:name/:test' do |application, name, test|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
erb :tests_app_test_detail, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:results => Results::LiveSummaryResults.start_running_results(name, test.chomp('_test')),
:result_set_list => Results::LiveSummaryResults.start_running_results(name, test.chomp('_test')).summary_results
}
else
status 404
end
else
status 404
end
end
get '/tests/:name/:test/metric/:metric' do |name, test, metric|
#parse from summary file and jmx filei
app = bootstrap_config['applications'].find { |k,v| k['id'] == name }
if app and load_test_list.keys.include?(test.to_sym)
temp_results = []
live_results = Results::LiveSummaryResults.running_tests[name][test.chomp('_test')]
if live_results && live_results.summary_results[0].respond_to?(metric.to_sym)
live_results.summary_results.each { |result| temp_results << [result.date, result.send(metric.to_sym).to_f] }
end
content_type :json
response = { :results => temp_results, :ended => live_results.test_ended}
else
status 404
response = { :results => [], :ended => true}
end
body response.to_json
end
get '/tests/:name/:test/metric/:metric/live' do |name, test, metric|
#get last values from summary file and jmx file
app = bootstrap_config['applications'].find { |k,v| k['id'] == name }
if app and load_test_list.keys.include?(test.to_sym) and !app[:results].test_ended
temp_results = []
if app[:results] && app[:results].summary_results[0].respond_to?(metric.to_sym)
app[:results].new_summary_values.each { |result| temp_results << [result.date, result.send(metric.to_sym).to_f] }
end
content_type :json
response = { :results => temp_results, :ended => app[:results].test_ended}
else
status 404
response = { :results => [], :ended => true}
end
body response.to_json
end
post '/:application/applications/:name/:test/start' do |application, name, test|
=begin
POST /atom_hopper/applications/main/load/start -d '{"length":60, "description": "this is a description of the test", "flavor_type": "performance", "release": 1.6}'
=end
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and (Apps::Bootstrap.test_list.keys.include?(test) or Apps::Bootstrap.test_list.keys.include?("#{test}_test"))
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
request.body.rewind
json_data = JSON.parse(request.body.read)
content_type :json
halt 400, { "fail" => "required keys are missing"}.to_json unless json_data.has_key?("name") and json_data.has_key?("length") and json_data.has_key?("runner")
guid_response = new_app.start_test_recording(application, name, test.chomp('_test'), json_data)
halt 400, {'Content-Type' => 'application/json'}, {'fail' => "test for #{application}/#{name}/#{test} already started"}.to_json if guid_response.length == 0
halt 400, {'Content-Type' => 'application/json'}, guid_response if JSON.parse(guid_response).has_key?("fail")
body guid_response
end
end
end
post '/:application/applications/:name/:test/stop' do |application, name, test|
=begin
post /atom_hopper/application/main/load/stop
{
'guid':'1234-6382-2938-2938-2933',
'servers':{
'config':{
'server':'<server>',
'path':'<file path>'
},
'results':{
'server':'<server>',
'path':'<file path>'
}
}
}
=end
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and (Apps::Bootstrap.test_list.keys.include?(test) or Apps::Bootstrap.test_list.keys.include?("#{test}_test"))
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
request.body.rewind
json_data = JSON.parse(request.body.read)
content_type :json
halt 400, { "fail" => "required keys are missing"}.to_json unless json_data.has_key?("guid") and json_data.has_key?("servers") and json_data["servers"].has_key?("results")
stop_response = new_app.stop_test_recording(application, name, test.chomp('_test'), json_data)
halt 400, {'Content-Type' => 'application/json'}, stop_response.to_json if stop_response.has_key?("fail")
body stop_response.to_json
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid sub app specified'}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid application specified'}.to_json
end
end
end
#PerfApp.new
update master with merge
require 'sinatra'
require 'json'
require 'logging'
require 'rbconfig'
require 'open-uri'
require_relative './apps/bootstrap.rb'
require_relative './Models/models.rb'
require_relative './Models/test.rb'
require_relative './Models/configuration.rb'
require_relative './Models/testlocation.rb'
require_relative './Models/results.rb'
class PerfApp < Sinatra::Base
Logging.color_scheme( 'bright',
:levels => {
:info => :green,
:warn => :yellow,
:error => :red,
:fatal => [:white, :on_red]
},
:date => :blue,
:logger => :cyan,
:message => :magenta
)
logger = Logging.logger(STDOUT)
logger.level = :debug
results = nil
# In your main application file
configure do
set :views, "#{File.dirname(__FILE__)}/views"
set :public_dir, "#{File.dirname(__FILE__)}/public"
enable :show_exceptions if development? #or test?
set :deployment, environment
Apps::Bootstrap.main_config(environment)
end
configure :development do
set :show_exceptions, :after_handler
end
error ArgumentError do
request.env['sinatra.error'].message
end
error OpenURI::HTTPError do
"Unable to gather required data. There's a misconfiguration to connect to backend services."
end
get '/' do
erb :index, :locals => {
:data => Apps::Bootstrap.application_list
}
end
get '/:application' do |application|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
erb :app_index, :locals => {
:data => {
:name => new_app.config['application']['name'],
:description => new_app.config['application']['description'],
:title => new_app.config['application']['name'],
:application => app[:id]
}
}
else
status 404
end
end
get '/:application/applications' do |application|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
erb :applications, :locals => {
:application_list => new_app.config['application']['sub_apps'],
:title => new_app.config['application']['name'],
:application => app[:id]
}
else
status 404
end
end
get '/:application/applications/:name' do |application, name|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
erb :app_detail, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name)
}
else
status 404
end
else
status 404
end
end
get '/:application/applications/:name/update' do |application, name|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
erb :app_detail_update, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_type_list => Apps::Bootstrap.test_list.map {|k, v| {:id => k, :name => v["name"] } },
:runner_list => Apps::Bootstrap.runner_list.map { |k, _| k }
}
else
status 404
end
else
status 404
end
end
post '/:application/applications/:name/add_request_response' do |application, name|
halt 400, "No request specified" unless params['request']
halt 400, "No response specified" unless params['response']
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
Models::Test.new(new_app.db).add_request_response(application, name, new_app.storage_info, params['request'], params['response'])
status 201
erb :app_detail_update, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_type_list => Apps::Bootstrap.test_list.map {|k, v| {:id => k, :name => v["name"] } },
:runner_list => Apps::Bootstrap.runner_list.map { |k, _| k }
}
rescue ArgumentError => e
halt 404, 'invalid request response'
end
else
halt 404, 'invalid sub app specified'
end
else
halt 404, 'invalid application specified'
end
end
post '/:application/applications/:name/update_request_response' do |application, name|
halt 400, "No request specified" unless params['request'] || params['request']['request_id']
halt 400, "No response specified" unless params['response'] || params['response']['request_id']
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
Models::Test.new(new_app.db).update_request_response(application, name, new_app.storage_info, params['request'], params['response'])
status 200
rescue ArgumentError => e
halt 404, 'invalid request response'
end
else
halt 404, 'invalid sub app specified'
end
else
halt 404, 'invalid application specified'
end
end
delete '/:application/applications/:name/remove_request_response/:requests_to_delete' do |application, name,requests_to_delete|
request_ids = requests_to_delete.split(',')
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
Models::Test.new(new_app.db).remove_request_response(application, name, new_app.storage_info, request_ids)
status 200
erb :app_detail_update, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_type_list => Apps::Bootstrap.test_list.map {|k, v| {:id => k, :name => v["name"] } },
:runner_list => Apps::Bootstrap.runner_list.map { |k, _| k }
}
rescue ArgumentError => e
halt 404, 'invalid request ids specified'
end
else
halt 404, 'invalid sub app specified'
end
else
halt 404, 'invalid application specified'
end
end
post '/:application/applications/:name/upload_config' do |application, name|
config_name = params['upload_config'][:filename]
config_body = params['upload_config'][:tempfile]
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
Models::Configuration.new(new_app.db, new_app.fs_ip).add_config(application, name, new_app.storage_info, config_name, config_body)
status 201
erb :app_detail_update, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_type_list => Apps::Bootstrap.test_list.map {|k, v| {:id => k, :name => v["name"] } },
:runner_list => Apps::Bootstrap.runner_list.map { |k, _| k }
}
rescue ArgumentError => e
halt 404, 'invalid config file specified'
end
else
halt 404, 'invalid sub app specified'
end
else
halt 404, 'invalid application specified'
end
end
post '/:application/applications/:name/upload_test_file' do |application, name|
test_file_name = params['upload_test_file'][:filename]
test_file_body = params['upload_test_file'][:tempfile]
test_file_runner = params['test_file_runner']
test_file_type = params['test_file_type']
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).add_test_file(application, name, new_app.storage_info, test_file_name, test_file_body, test_file_runner, test_file_type)
status 201
erb :app_detail_update, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_type_list => Apps::Bootstrap.test_list.map {|k, v| {:id => k, :name => v["name"] } },
:runner_list => Apps::Bootstrap.runner_list.map { |k, _| k }
}
rescue ArgumentError => e
halt 404, 'invalid config file specified'
end
else
halt 404, 'invalid sub app specified'
end
else
halt 404, 'invalid application specified'
end
end
post '/:application/applications/:name/remove_config' do |application, name|
config_list = params['remove_config']
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
config_list.each do |config_name|
Models::Configuration.new(new_app.db, new_app.fs_ip).remove_config(application, name, new_app.storage_info, config_name)
end
status 200
erb :app_detail_update, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_type_list => Apps::Bootstrap.test_list.map {|k, v| {:id => k, :name => v["name"] } },
:runner_list => Apps::Bootstrap.runner_list.map { |k, _| k }
}
rescue ArgumentError => e
halt 404, 'invalid config file specified'
end
else
halt 404, 'invalid sub app specified'
end
else
halt 404, 'invalid application specified'
end
end
post '/:application/applications/:name/remove_test_file' do |application, name|
test_file_list = params['remove_test_file']
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
test_file_list.each do |test_file|
Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).remove_test_file(application, name, new_app.storage_info, test_file)
end
status 200
erb :app_detail_update, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:request_response_list => Models::Test.new(new_app.db).get_setup_requests_by_name(app[:id], name),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_location_list => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_name(app[:id], name),
:test_type_list => Apps::Bootstrap.test_list.map {|k, v| {:id => k, :name => v["name"] } },
:runner_list => Apps::Bootstrap.runner_list.map { |k, _| k }
}
rescue ArgumentError => e
halt 404, 'invalid config file specified'
end
else
halt 404, 'invalid sub app specified'
end
else
halt 404, 'invalid application specified'
end
end
get '/:application/applications/:name/test_download/:file_name' do |application, name, file_name|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
downloaded_file = Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_by_id(application, name, file_name).download
attachment "test_file"
content_type = 'Application/octet-stream'
body downloaded_file
rescue ArgumentError => e
status 404
body e.message
end
else
status 404
body "No test script exists for #{application}/#{name}"
end
else
status 404
body "No test script exists for #{application}/#{name}"
end
end
get '/:application/results' do |application|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
erb :results, :locals => {
:application_list => new_app.config['application']['sub_apps'],
:title => new_app.config['application']['name'],
:application => app[:id]
}
else
status 404
end
end
get '/:application/results/:name' do |application, name|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
erb :results_list, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:load_test_list => Apps::Bootstrap.test_list
}
else
status 404
end
else
status 404
end
end
get '/:application/results/:name/:test' do |application, name, test|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
results = Results::PastSummaryResults.new(application, name,
new_app.config['application']['type'].to_sym, test.chomp('_test'),
new_app.db, new_app.fs_ip, nil, logger)
plugins = []
new_app.load_plugins.each do |p|
plugins << {:id => p.to_s, :data => p.show_plugin_names.map do |id|
{:id => id[:id], :name => id[:name] }
end
}
end
erb results.summary_view, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:result_set_list => results.past_summary_results.test_results(new_app.db, new_app.fs_ip, results.test_list).sort_by {|r| r.start.to_s},
:plugin_list => plugins,
:test_type => test
}
else
status 404
end
else
status 404
end
end
get '/:application/results/:name/:test/metric/:metric' do |application, name, test, metric|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
metric_results = {} unless metric_results
metric_results[metric.to_sym] = []
results = Results::PastSummaryResults.new(application, name,
new_app.config['application']['type'].to_sym, test.chomp('_test'),
new_app.db, new_app.fs_ip, nil, logger)
result_set_list = results.past_summary_results.test_results(new_app.db, new_app.fs_ip, results.test_list)
if result_set_list[0].respond_to?(metric.to_sym)
result_set_list.each { |result| metric_results[metric.to_sym] << [result.start, result.send(metric.to_sym).to_f] }
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid metric specified'}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid sub app specified'}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid application specified'}.to_json
end
content_type :json
body metric_results.to_json
end
get '/:application/results/:name/:test/metric/:metric/id/:id' do |application, name, test, metric, id|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
results = Results::PastSummaryResults.new(application, name,
new_app.config['application']['type'].to_sym, test.chomp('_test'),
new_app.db, new_app.fs_ip, nil, logger)
detailed_results = results.past_summary_results.detailed_results(new_app.db, new_app.fs_ip, results.test_list, id)
metric_results = results.past_summary_results.metric_results(detailed_results, metric)
if metric_results and metric_results.length > 0
content_type :json
json_results = { metric.to_sym => metric_results }
body json_results.to_json
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'The metric data is empty'}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid sub app specified'}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid application specified'}.to_json
end
end
get '/:application/results/:name/:test/id/:id' do |application, name, test, id|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
app_type = new_app.config['application']['type'].to_sym
results = Results::PastSummaryResults.new(application, name,
app_type, test.chomp('_test'),
new_app.db, new_app.fs_ip, nil, logger)
detailed_results = results.past_summary_results.detailed_results(new_app.db, new_app.fs_ip, results.test_list, id)
if detailed_results and detailed_results.length > 0
test_id = id
test_type = test
erb results.detailed_view, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:result_set_list => detailed_results,
:test_id => id,
:test_type => test,
:request_response_list => Models::Test.new(new_app.db).get_result_requests(app_type, application, name, test.chomp('_test'), id),
:config_list => Models::Configuration.new(new_app.db, new_app.fs_ip).get_result(app_type, application, name, test.chomp('_test'), id),
:test_location => Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_result(app_type, application, name, test.chomp('_test'), id)
}
else
status 404
end
else
status 404
end
else
status 404
end
end
get '/:application/results/:name/:test/id/:id/plugin/:plugin/:option' do |application, name, test, id, plugin, option|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
plugin_instance = new_app.load_plugins.find {|p| p.to_s == plugin }
halt 404, "no plugin #{plugin} found" unless plugin_instance
summary_plugin_data = plugin_instance.new(new_app.db, new_app.fs_ip).show_summary_data(application, name, test, option, id, {:application_type => new_app.config['application']['type'].to_sym})
detailed_plugin_data = plugin_instance.new(new_app.db, new_app.fs_ip).show_detailed_data(application, name, test, option, id, {:application_type => new_app.config['application']['type'].to_sym})
summary_headers = nil
summary_header_descriptions = nil
<<<<<<< HEAD
#TODO: detailed data is not required for all plugin types. have a condition to check if not available and then don't process it
begin
if summary_plugin_data && !summary_plugin_data.empty?
if new_app.config['application']['type'].to_sym == :comparison && summary_plugin_data[:plugin_type] == :time_series
=======
if summary_plugin_data and detailed_plugin_data and !summary_plugin_data.empty?
if new_app.config['application']['type'].to_sym == :comparison
>>>>>>> 7dc86fabb0baf6a1ce406a32672e6212a78a1536
summary_plugin_data[:id_results].each do |guid_results|
guid_results[:results].each do |metric, metric_data|
summary_headers = summary_headers ? (metric_data[:headers] | summary_headers) : metric_data[:headers]
summary_header_descriptions = metric_data[:description]
break
end
end
if detailed_plugin_data
detailed_plugin_result = []
#TODO: this might be very time_series centric. Maybe should be moved out of here
detailed_plugin_data[:id_results].each do |guid_results|
detailed_guid_results = {}
guid_results[:results].each do |key, value|
detailed_guid_results[key] = {}
detailed_guid_results[key][:headers] = value[:headers]
detailed_guid_results[key][:content] = {}
detailed_guid_results[key][:description] = value[:description]
value[:content].each do |instance, data|
detailed_guid_results[key][:content][instance] =
plugin_instance.new(new_app.db, new_app.fs_ip).order_by_date(data)
end
end
detailed_plugin_result << {:id => guid_results[:id], :results => detailed_guid_results}
end
end
else
if detailed_plugin_data
detailed_plugin_result = {}
detailed_plugin_data.each do |key, value|
detailed_plugin_result[key] = {}
detailed_plugin_result[key][:headers] = value[:headers]
detailed_plugin_result[key][:content] = {}
detailed_plugin_result[key][:description] = value[:description]
value[:content].each do |instance, data|
detailed_plugin_result[key][:content][instance] =
plugin_instance.new(new_app.db, new_app.fs_ip).order_by_date(data)
end
end
end
end
plugin_type = new_app.config['application']['type'].to_sym == :comparison ? summary_plugin_data[:plugin_type] : summary_plugin_data.map{|k,v|
v[:plugin_type]
}.first
erb PluginModule::PluginView.retrieve_view(plugin_type,new_app.config['application']['type'].to_sym), :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:summary_plugin_data => summary_plugin_data,
:summary_headers => summary_headers,
:summary_header_descriptions => summary_header_descriptions,
:detailed_plugin_data => detailed_plugin_result,
:detailed_unordered_plugin_data => detailed_plugin_data,
:test_id => id,
:test_type => test,
:plugin_name => plugin,
:option => option
}
else
halt 404, "no metric data found for #{application}/#{name}/#{test}/#{id}/#{plugin}/#{option}"
end
rescue Exception => e
p e
end
else
halt 404, "No sub application for #{name} found"
end
else
halt 404, "No application by name of #{application}/#{test} found"
end
end
get '/:application/results/:name/:test/metric/:metric/compare/:ids' do |application, name, test, metric, ids|
#get result files from file under files/apps/:name/results/:test/:date/:metric
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
results = Results::PastSummaryResults.new(application, name,
:comparison, test.chomp('_test'),
new_app.db, new_app.fs_ip, nil, logger)
detailed_results = results.past_summary_results.detailed_results(new_app.db, new_app.fs_ip, results.test_list, ids)
metric_results = results.past_summary_results.metric_results(detailed_results, metric)
if metric_results and metric_results.length > 0
content_type :json
json_results = { metric.to_sym => metric_results }
body json_results.to_json
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'The metric data is empty'}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid sub app specified'}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid application specified'}.to_json
end
end
get '/:application/results/:name/:test/:id/test_download/:file_name' do |application, name, test, id,file_name|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
begin
downloaded_file = Models::TestLocationFactory.new(new_app.db, new_app.fs_ip).get_result_by_id(
application, name, test.chomp('_test'), id).download
attachment "test_file"
content_type = 'Application/octet-stream'
body downloaded_file
rescue
halt 404, "No test script exists for #{application}/#{name}/#{id}"
end
else
halt 404, "No test script exists for #{application}/#{name}"
end
else
halt 404, "No test script exists for #{application}/#{name}"
end
end
post '/:application/results/:name/:test' do |application, name, test|
halt 400, "No tests were specified for comparison" unless params[:compare]
comparison_id_list = params[:compare]
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
results = Results::PastSummaryResults.new(application, name,
new_app.config['application']['type'].to_sym, test.chomp('_test'),
new_app.db, new_app.fs_ip, nil, logger)
result_set_list = results.past_summary_results.test_results(new_app.db, new_app.fs_ip, results.test_list)
plugins = []
new_app.load_plugins.each do |p|
plugins << {:id => p.to_s, :data => p.show_plugin_names.map do |id|
{:id => id[:id], :name => id[:name] }
end
}
end
erb :results_app_test_compare, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:result_set_list => result_set_list.find_all {|result| comparison_id_list.include?(result.id) },
:plugin_list => plugins,
:test_type => test,
:compare_guids => params[:compare]
}
else
halt 404, "No sub application for #{name} found"
end
else
halt 404, "No application by name of #{application}/#{test} found"
end
end
post '/:application/results/:name/:test/compare-plugin/metric' do |application, name, test|
halt 400, "No tests were specified for comparison" unless params[:compare]
halt 400, "No plugin was specified for comparison" unless params[:plugin]
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
comparison_id_list = params[:compare].split(',')
plugin_id = params[:plugin]
plugin_id_data = plugin_id.split('|||')
plugin = plugin_id_data[0]
option = plugin_id_data[1]
plugin_instance = new_app.load_plugins.find {|p| p.to_s == plugin }
halt 404, {'Content-Type' => 'application/json'}, {'fail' => "No plugin by name of #{plugin} found"}.to_json unless plugin_instance
detailed_plugin_data_list = []
valid_comparison_id_list = []
comparison_id_list.each do |id|
detailed_plugin_data = plugin_instance.new(new_app.db, new_app.fs_ip).show_detailed_data(application, name, test, option, id)
if detailed_plugin_data && !detailed_plugin_data.empty? && detailed_plugin_data[option.to_sym][:content].length > 0
detailed_plugin_data_list << {
:id => id,
:data => detailed_plugin_data
}
valid_comparison_id_list << id
end
end
if detailed_plugin_data_list and valid_comparison_id_list.length > 0
content_type :json
body detailed_plugin_data_list.to_json
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => "no data for #{plugin_id} found"}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => "No sub application for #{name} found"}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => "No application by name of #{application}/#{test} found"}.to_json
end
end
post '/:application/results/:name/:test/compare-plugin' do |application, name, test|
halt 400, "No tests were specified for comparison" unless params[:compare]
halt 400, "No plugin was specified for comparison" unless params[:plugin_id]
plugin_id = params[:plugin_id]
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and Apps::Bootstrap.test_list.keys.include?(test)
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
#get plugin_summary_results for each data set
plugin_id_data = plugin_id.split('|||')
plugin = plugin_id_data[0]
option = plugin_id_data[1]
plugin_instance = new_app.load_plugins.find {|p| p.to_s == plugin }
halt 404, "No plugin by name of #{plugin} found" unless plugin_instance
summary_plugin_data_list = []
valid_comparison_id_list = []
summary_headers = nil
summary_header_descriptions = nil
if new_app.config['application']['type'].to_sym == :comparison
summary_plugin_data = plugin_instance.new(new_app.db, new_app.fs_ip).show_summary_data(application, name, test, option, params[:compare], {:application_type => new_app.config['application']['type'].to_sym})
<<<<<<< HEAD
if summary_plugin_data && !summary_plugin_data.empty?
summary_plugin_data[:id_results].each do |guid_results|
guid_results[:results].each do |metric, metric_data|
summary_headers = summary_headers ? (metric_data[:headers] | summary_headers) : metric_data[:headers]
summary_header_descriptions = metric_data[:description]
break
end if guid_results[:results]
=======
if summary_plugin_data && !summary_plugin_data.empty?
summary_plugin_data[:id_results].each do |guid_results|
guid_results[:results].each do |metric, metric_data|
summary_headers = metric_data[:headers]
summary_header_descriptions = metric_data[:description]
break
end
>>>>>>> 7dc86fabb0baf6a1ce406a32672e6212a78a1536
valid_comparison_id_list << guid_results[:id]
end
summary_plugin_data_list = summary_plugin_data[:id_results]
plugin_type = summary_plugin_data[:plugin_type]
params[:compare].split('+').each {|id| valid_comparison_id_list << id }
else
<<<<<<< HEAD
halt 404, "No data for #{plugin_id} found"
=======
halt 404, "Empty results"
>>>>>>> 7dc86fabb0baf6a1ce406a32672e6212a78a1536
end
else
params[:compare].split('+').each do |id|
summary_plugin_data = plugin_instance.new(new_app.db, new_app.fs_ip).show_summary_data(application, name, test, option, id)
<<<<<<< HEAD
if summary_plugin_data && !summary_plugin_data.empty? && summary_plugin_data[option.to_sym][:content].length > 0
=======
if summary_plugin_data and summary_plugin_data[option.to_sym][:content].length > 0
>>>>>>> 7dc86fabb0baf6a1ce406a32672e6212a78a1536
summary_plugin_data_list << {
:id => id,
:results => summary_plugin_data
}
valid_comparison_id_list << id
<<<<<<< HEAD
plugin_type = summary_plugin_data.map{|k,v|
v[:plugin_type]
}.first
summary_plugin_data.each do |key, content|
summary_headers = summary_headers ? (content[:headers] | summary_headers) : content[:headers]
summary_header_descriptions = content[:description]
break
end
=======
end
plugin_type = summary_plugin_data.map{|k,v|
v[:plugin_type]
}.first
puts summary_plugin_data
summary_plugin_data.each do |key, content|
summary_headers = content[:headers]
summary_header_descriptions = content[:description]
break
>>>>>>> 7dc86fabb0baf6a1ce406a32672e6212a78a1536
end
end
end
#detailed_plugin_data = plugin_instance.new(new_app.db, new_app.fs_ip).show_detailed_data(application, name, test, option, id)
<<<<<<< HEAD
if valid_comparison_id_list.length > 0 && !summary_plugin_data_list.empty?
=======
if valid_comparison_id_list.length > 0
>>>>>>> 7dc86fabb0baf6a1ce406a32672e6212a78a1536
erb PluginModule::PluginView.retrieve_compare_view(
plugin_type,
new_app.config['application']['type'].to_sym
), :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:summary_plugin_data_list => summary_plugin_data_list,
:test_type => test,
:summary_headers => summary_headers,
:summary_header_descriptions => summary_header_descriptions,
:compare_guids => valid_comparison_id_list,
:plugin_name => plugin,
:plugin_id => plugin_id,
:option => option
}
else
halt 404, "No data for #{plugin_id} found"
end
else
halt 404, "No sub application for #{name} found"
end
else
halt 404, "No application by name of #{application}/#{test} found"
end
end
get '/:application/tests' do |application|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new
erb :tests, :locals => {
:application_list => new_app.config['application']['sub_apps'],
:title => new_app.config['application']['name'],
:application => app[:id]
}
else
status 404
end
end
get '/:application/tests/:name' do |application, name|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
erb :tests_list, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:load_test_list => Apps::Bootstrap.test_list
}
else
status 404
end
else
status 404
end
end
=begin
loop through plugins here
=end
get '/:application/tests/:name/:test' do |application, name, test|
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app
new_app = app[:klass].new
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
erb :tests_app_test_detail, :locals => {
:application => app[:id],
:sub_app_id => name.to_sym,
:title => new_app.config['application']['name'],
:results => Results::LiveSummaryResults.start_running_results(name, test.chomp('_test')),
:result_set_list => Results::LiveSummaryResults.start_running_results(name, test.chomp('_test')).summary_results
}
else
status 404
end
else
status 404
end
end
get '/tests/:name/:test/metric/:metric' do |name, test, metric|
#parse from summary file and jmx filei
app = bootstrap_config['applications'].find { |k,v| k['id'] == name }
if app and load_test_list.keys.include?(test.to_sym)
temp_results = []
live_results = Results::LiveSummaryResults.running_tests[name][test.chomp('_test')]
if live_results && live_results.summary_results[0].respond_to?(metric.to_sym)
live_results.summary_results.each { |result| temp_results << [result.date, result.send(metric.to_sym).to_f] }
end
content_type :json
response = { :results => temp_results, :ended => live_results.test_ended}
else
status 404
response = { :results => [], :ended => true}
end
body response.to_json
end
get '/tests/:name/:test/metric/:metric/live' do |name, test, metric|
#get last values from summary file and jmx file
app = bootstrap_config['applications'].find { |k,v| k['id'] == name }
if app and load_test_list.keys.include?(test.to_sym) and !app[:results].test_ended
temp_results = []
if app[:results] && app[:results].summary_results[0].respond_to?(metric.to_sym)
app[:results].new_summary_values.each { |result| temp_results << [result.date, result.send(metric.to_sym).to_f] }
end
content_type :json
response = { :results => temp_results, :ended => app[:results].test_ended}
else
status 404
response = { :results => [], :ended => true}
end
body response.to_json
end
post '/:application/applications/:name/:test/start' do |application, name, test|
=begin
POST /atom_hopper/applications/main/load/start -d '{"length":60, "description": "this is a description of the test", "flavor_type": "performance", "release": 1.6}'
=end
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and (Apps::Bootstrap.test_list.keys.include?(test) or Apps::Bootstrap.test_list.keys.include?("#{test}_test"))
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
request.body.rewind
json_data = JSON.parse(request.body.read)
content_type :json
halt 400, { "fail" => "required keys are missing"}.to_json unless json_data.has_key?("name") and json_data.has_key?("length") and json_data.has_key?("runner")
guid_response = new_app.start_test_recording(application, name, test.chomp('_test'), json_data)
halt 400, {'Content-Type' => 'application/json'}, {'fail' => "test for #{application}/#{name}/#{test} already started"}.to_json if guid_response.length == 0
halt 400, {'Content-Type' => 'application/json'}, guid_response if JSON.parse(guid_response).has_key?("fail")
body guid_response
end
end
end
post '/:application/applications/:name/:test/stop' do |application, name, test|
=begin
post /atom_hopper/application/main/load/stop
{
'guid':'1234-6382-2938-2938-2933',
'servers':{
'config':{
'server':'<server>',
'path':'<file path>'
},
'results':{
'server':'<server>',
'path':'<file path>'
}
}
}
=end
app = Apps::Bootstrap.application_list.find {|a| a[:id] == application}
if app and (Apps::Bootstrap.test_list.keys.include?(test) or Apps::Bootstrap.test_list.keys.include?("#{test}_test"))
new_app = app[:klass].new(settings.deployment)
sub_app = new_app.config['application']['sub_apps'].find do |sa|
sa['id'] == name
end
if sub_app
request.body.rewind
json_data = JSON.parse(request.body.read)
content_type :json
halt 400, { "fail" => "required keys are missing"}.to_json unless json_data.has_key?("guid") and json_data.has_key?("servers") and json_data["servers"].has_key?("results")
stop_response = new_app.stop_test_recording(application, name, test.chomp('_test'), json_data)
halt 400, {'Content-Type' => 'application/json'}, stop_response.to_json if stop_response.has_key?("fail")
body stop_response.to_json
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid sub app specified'}.to_json
end
else
halt 404, {'Content-Type' => 'application/json'}, {'fail' => 'invalid application specified'}.to_json
end
end
end
#PerfApp.new
|
require_relative 'library'
require 'ncurses'
require 'oj'
require 'json'
include Ncurses
def save_state(seed,total_bunkers,items,walkable,all_beacons,all_bunkers,actors)
# Save a copy of the initial Game State.
# Prepare data for JSON
all_the_data = {}
seed_json = {"seed" => seed}
total_bunkers_json = {"total_bunkers" => total_bunkers}
items_json = {"items" => items}
walkable_json = {"walkable" => walkable}
all_beacons_json = {"beacons" => all_beacons}
bunkers_json = {"bunkers" => all_bunkers}
actors_json = {"actors" => actors}
all_the_data.merge!(seed_json)
all_the_data.merge!(total_bunkers_json)
all_the_data.merge!(items_json)
all_the_data.merge!(walkable_json)
all_the_data.merge!(all_beacons_json)
all_the_data.merge!(bunkers_json)
all_the_data.merge!(actors_json)
# Save data to JSON
File.open('sample.json', 'w') do |f|
f.puts Oj::dump all_the_data
end
end
def scr_message(message,bars)
loading = "[" + "=" * bars + " " * (7 - bars) + "]"
Ncurses.mvwaddstr(stdscr, 3, 3, "#{message}")
Ncurses.mvwaddstr(stdscr, 4, 4, "#{loading}")
Ncurses.refresh
Ncurses.napms(0500)
end
def scr_clear
Ncurses.mvwaddstr(stdscr, 3, 3, " ")
Ncurses.refresh
end
=begin
inhospitableLog = File.open("inhospitableLog.txt", "w")
inhospitableLog.puts "#{Time.now} - Game Launched"
inhospitableLog.close
=end
#################################################################################
# Initialize #
#################################################################################
@new = 0
Ncurses.initscr # Start Ncurses
Ncurses.noecho # Do not show keyboard input at cursor location
Ncurses.start_color
Ncurses.curs_set(0) # Disable blinking cursor
Ncurses.cbreak # Only accept a single character of input
Ncurses.stdscr # Initialize Standard Screen, which uses dimensions of current Terminal window. Invoke with stdscr
Ncurses.keypad(stdscr,true) # Use expanded keyboard characters
Ncurses.init_pair(1, COLOR_BLACK, COLOR_WHITE)
game_initialized = 0
main_menu(game_initialized, stdscr)
Ncurses.mvwaddstr(stdscr, 2, 2, "Generating World")
Ncurses.mvwaddstr(stdscr, 3, 3, "Please wait...")
Ncurses.mvwaddstr(stdscr, 4, 4, "[ ]")
Ncurses.refresh
if @new == 1 # Set to 1 when loading variables, located in ui.rb on line 44
# Load JSON File
#Ncurses.mvwaddstr(stdscr, 3, 3, "Loading Saved Data")
#Ncurses.mvwaddstr(stdscr, 4, 4, "[= ]")
#Ncurses.refresh
scr_message("Loading Saved Data",1)
json = File.read('sample.json')
everything = JSON.parse(json)
scr_clear
# Instantiate Windows
# For each window, define lines,cols variables and work with those instead of direct numbers
# Demo game uses 4 windows: game_window (aka game map), Viewport (aka what the player sees), console_window and side hud_window.
# Screen and window variables
#Ncurses.mvwaddstr(stdscr, 3, 3, "Prepare Window Variables")
#Ncurses.mvwaddstr(stdscr, 4, 4, "[== ]")
#Ncurses.refresh
#Ncurses.napms(0500)
scr_message("Prepare Window Variables",2)
standard_screen_columns = [] # Standard Screen column aka y
standard_screen_lines = [] # Standard Screen lines aka x
Ncurses.getmaxyx(stdscr,standard_screen_columns,standard_screen_lines) # Get Max Y,X for standard screen, place them in arrays. getmaxyx outputs to arrays.
game_window_lines = 200
game_window_columns = 200
viewport_window_lines = 25
viewport_window_columns = 25
hud_window_lines = viewport_window_lines
hud_window_columns = 15
console_window_lines = 3
console_window_columns = viewport_window_columns + hud_window_columns
bunker_area_with_space = (viewport_window_lines * viewport_window_columns * 10) + 11 # 11 x 11 is the area of the demo bunker
scr_clear
# Load JSON Data
#Ncurses.mvwaddstr(stdscr, 3, 3, "Loading Game Variables")
#Ncurses.mvwaddstr(stdscr, 4, 4, "[=== ]")
#Ncurses.refresh
#Ncurses.napms(0500)
scr_message("Loading Game Variables",3)
total_bunkers = everything["total_bunkers"].to_i
seed = everything["seed"].to_i
actors_from_json = everything["actors"]
actors = []
items = everything["items"]
all_beacons = []
all_bunkers = everything["bunkers"]
walkable = everything["walkable"]
scr_clear
# Game Loop Variables
#Ncurses.mvwaddstr(stdscr, 3, 3, "Setting Loop Variables")
#Ncurses.mvwaddstr(stdscr, 4, 4, "[==== ]")
#Ncurses.refresh
#Ncurses.napms(0500)
scr_message("Setting Loop Variables",4)
direction_steps = 0
counter = 0
dice_roll = false
hunger_count = 0
direction_steps = rand(10..25) # Meander long distances
player_visible = 1
scr_clear
# Create game windows, then generate the world
#Ncurses.mvwaddstr(stdscr, 3, 3, "Create Game Windows")
#Ncurses.mvwaddstr(stdscr, 4, 4, "[===== ]")
#Ncurses.refresh
#Ncurses.napms(0500)
scr_message("Creating Game Windows",5)
game_window = Ncurses.newwin(game_window_lines, game_window_columns, 0, 0)
viewport_window = Ncurses.derwin(game_window,viewport_window_lines, viewport_window_columns, 0, 0) # Must not exceed size of terminal or else crash
console_window = Ncurses.newwin(console_window_lines, console_window_columns, viewport_window_lines, 0)
hud_window = Ncurses.newwin(hud_window_lines, hud_window_columns, 0, viewport_window_lines)
scr_clear
#Ncurses.mvwaddstr(stdscr, 3, 3, "Generating Map")
#Ncurses.mvwaddstr(stdscr, 4, 4, "[====== ]")
#Ncurses.refresh
#Ncurses.napms(0500)
scr_message("Generating Map",6)
generate_map(game_window,total_bunkers,all_beacons,all_bunkers,actors,seed)
scr_clear
#Ncurses.mvwaddstr(stdscr, 3, 3, "Generate Actors")
#Ncurses.mvwaddstr(stdscr, 4, 4, "[=======]")
#Ncurses.refresh
#Ncurses.napms(0500)
scr_message("Generate Actors",7)
player = Character.new(symb: everything["actors"][0]["symb"],symbcode: everything["actors"][0]["symbcode"],color: everything["actors"][0]["color"],xlines: everything["actors"][0]["xlines"],ycols: everything["actors"][0]["ycols"],blocked: everything["actors"][0]["blocked"],hp: everything["actors"][0]["hp"],hunger: everything["actors"][0]["hunger"],inventory: everything["actors"][0]["inventory"])
actors << player
player.draw(game_window)
everything["actors"].drop(1).each do |k|
actors << Character.new(symb: k["symb"],symbcode: k["symbcode"],color: k["color"],xlines: k["xlines"],ycols: k["ycols"],blocked: k["blocked"],hp: k["hp"],hunger: k["hunger"],inventory: k["inventory"]) # Instantiate characters from Json
draw_to_map(game_window,k)
end
everything["beacons"].each do |b|
all_beacons << Beacon.new(symb: b["symb"], xlines: b["xlines"], ycols: b["ycols"], message: b["message"])
draw_to_map(game_window,b)
end
scr_clear
else
# Instantiate Windows
# For each window, define lines,cols variables and work with those instead of direct numbers
# Demo game uses 4 windows: game_window (aka game map), Viewport (aka what the player sees), console_window and side hud_window.
# Screen and window variables
scr_message("Prepare Window Variables",2)
seed = 12345
#seed = {"seed" => 12345}
standard_screen_columns = [] # Standard Screen column aka y
standard_screen_lines = [] # Standard Screen lines aka x
Ncurses.getmaxyx(stdscr,standard_screen_columns,standard_screen_lines) # Get Max Y,X for standard screen, place them in arrays. getmaxyx outputs to arrays.
game_window_lines = 200
game_window_columns = 200
viewport_window_lines = 25
viewport_window_columns = 25
hud_window_lines = viewport_window_lines
hud_window_columns = 15
console_window_lines = 3
console_window_columns = viewport_window_columns + hud_window_columns
bunker_area_with_space = (viewport_window_lines * viewport_window_columns * 10) + 11 # 11 x 11 is the area of the demo bunker
#total_bunkers = {"total_bunkers" => ((game_window_lines * game_window_columns) / bunker_area_with_space)}
scr_clear
# Define Actors, Items, Terrain, Bunkers and Beacons
=begin
#total_bunkers = {"total_bunkers" => ((game_window_lines * game_window_columns) / bunker_area_with_space)}
everything = {}
everything.merge!(seed)
actors = {"actors" => []}
everything.merge!(actors)
items = {"items" => [42,102,109]}
everything.merge!(items)
walkable = {"walkable" => [32,88,126,288,382]} # ' ', '~', 'X' #somehow 288 became space, 382 is colored ~
everything.merge!(walkable)
all_beacons = {"beacons" => []}
all_bunkers = {"bunkers" => []}
everything.merge!(total_bunkers)
everything.merge!(all_beacons)
everything.merge!(all_bunkers)
=end
scr_message("Loading Game Variables",3)
total_bunkers = ((game_window_lines * game_window_columns) / bunker_area_with_space)
actors = []
items = [42,102,109]
walkable = [32,88,126,288,382]
all_beacons = []
all_bunkers = []
scr_clear
# Game Loop Variables
scr_message("Setting Loop Variables",4)
direction_steps = 0
counter = 0
dice_roll = false
hunger_count = 0
#counter = 0 #wander counter for monster
direction_steps = rand(10..25) # Meander long distances
player_visible = 1
scr_clear
# Create game windows, then generate the world
scr_message("Creating Game Windows",5)
game_window = Ncurses.newwin(game_window_lines, game_window_columns, 0, 0)
viewport_window = Ncurses.derwin(game_window,viewport_window_lines, viewport_window_columns, 0, 0) # Must not exceed size of terminal or else crash
console_window = Ncurses.newwin(console_window_lines, console_window_columns, viewport_window_lines, 0)
hud_window = Ncurses.newwin(hud_window_lines, hud_window_columns, 0, viewport_window_lines)
scr_clear
# Create Player Actor
scr_message("Generate Actors",6)
game_window_max_lines = []
game_window_max_columns = []
Ncurses.getmaxyx(game_window,game_window_max_columns,game_window_max_lines) # Get Max Y,X of game_window
player_start_lines = (game_window_max_lines[0] / 4)
player_start_columns = (game_window_max_columns[0] / 4)
player = Character.new(symb: '@', symbcode: 64, xlines: player_start_lines, ycols: player_start_columns, hp: 9, color: 2)
actors << player
scr_clear
scr_message("Generating Map",7)
generate_map(game_window,total_bunkers,all_beacons,all_bunkers,actors,seed)
# Place all Actors from array
spiral(game_window,10,player,walkable) # Find legal starting position for player
actors.each { |actor| actor.draw(game_window)} # Add all actors to the map
save_state(seed,total_bunkers,items,walkable,all_beacons,all_bunkers,actors)
scr_clear
=begin
# Save a copy of the initial Game State.
# Prepare data for JSON
all_the_data = {}
seed_json = {"seed" => 12345}
total_bunkers_json = {"total_bunkers" => total_bunkers}
items_json = {"items" => items}
walkable_json = {"walkable" => walkable}
all_beacons_json = {"beacons" => all_beacons}
bunkers_json = {"bunkers" => all_bunkers}
actors_json = {"actors" => actors}
all_the_data.merge!(seed_json)
all_the_data.merge!(total_bunkers_json)
all_the_data.merge!(items_json)
all_the_data.merge!(walkable_json)
all_the_data.merge!(all_beacons_json)
all_the_data.merge!(bunkers_json)
all_the_data.merge!(actors_json)
# Save data to JSON
File.open('sample.json', 'w') do |f|
f.puts Oj::dump all_the_data
end
=end
end
menu_active = 0
game_initialized = 1
# Set up hud_window and console_window
borders(console_window) # Add borders to the console_window
Ncurses.wrefresh(console_window) # Refresh console_window window with message
hud_on(hud_window,player)
center(viewport_window,game_window,player.xlines,player.ycols) # Center map on player
Ncurses.wrefresh(viewport_window)
#################################################################################
# Game Loop #
#################################################################################
while player.hp > 0 && player.hunger > 0 && player.inventory["Token"] < total_bunkers # While Player hit points and hunger are above 0, and tokens are less than total, keep playing
if menu_active == 1
main_menu(game_initialized, game_window)
menu_active = 0
Ncurses.mvwaddstr(stdscr, 2, 2, "Returning to game...")
Ncurses.refresh
Ncurses.napms(1000)
end
hud_on(hud_window,player)
borders(console_window)
Ncurses.wrefresh(hud_window)
Ncurses.wrefresh(console_window)
Ncurses.wrefresh(viewport_window) # Fixed Monster location
#temp_hash = {"seed" => "#{seed}"}
#File.open("game.json", "w") do |f|
# f.puts temp_hash.to_json
#end
#inhospitableLog = File.open("inhospitableLog.txt", "w")
#actors.each { |a| inhospitableLog.puts "#{a}.to_yaml" }
#inhospitableLog.close
input = Ncurses.getch
case input
when KEY_UP, 119 # Move Up
check_space(game_window,hud_window,-1,0,player,walkable,items,actors)
center(viewport_window,game_window,player.xlines,player.ycols)
when KEY_DOWN, 115 # Move Down
check_space(game_window,hud_window,1,0,player,walkable,items,actors)
center(viewport_window,game_window,player.xlines,player.ycols)
when KEY_RIGHT, 100 # Move Right
check_space(game_window,hud_window,0,1,player,walkable,items,actors)
center(viewport_window,game_window,player.xlines,player.ycols)
when KEY_LEFT, 97 # Move Left
check_space(game_window,hud_window,0,-1,player,walkable,items,actors)
center(viewport_window,game_window,player.xlines,player.ycols)
when 32 # Spacebar, dont move
center(viewport_window,game_window,player.xlines,player.ycols)
when 104 # h
if player_visible == 1
player_visible = 0
elsif player_visible == 0
player_visible = 1
end
when 114 # r
the_beacon = get_distance_all_beacons(player,all_beacons)
if get_distance(player,the_beacon) < 101
message(console_window,"Radio: #{static(the_beacon, transmission(game_window,the_beacon,player))}")
else
message(console_window,"..zz..zZ..Zzz..")
end
when 102 # f
food = player.inventory["Food"]
if food > 0
update_inventory(hud_window, 102, player, -1)
player.hunger += 1
Ncurses.mvwaddstr(hud_window, 4, 1, "Hunger: #{player.hunger}")
Ncurses.wrefresh(hud_window)
else
message(console_window, "You have no food to eat.")
end
when 109 # m
medkit = player.inventory["Medkit"]
if medkit > 0
player.hp += 1
update_inventory(hud_window, 109, player, -1)
Ncurses.mvwaddstr(hud_window, 3, 1, "HP: #{player.hp}")
Ncurses.wrefresh(hud_window)
else
message(console_window, "You have no medkits.")
end
when 27 # ESC - Main Menu
menu_active = 1
when 49 # 1 - Save Game
#temp_hash = {"seed" => "#{seed}"}
File.open("game.json", "w") do |f|
f.puts everything.to_json
end
when KEY_F2, 113, 81 # Quit Game with F2, q or Q
break
else
Ncurses.flash # Flash screen if undefined input selected
message(console_window,"Move not valid") # Display ascii decimal number of selected input
Ncurses.wrefresh(console_window)
end
if menu_active == 0
# Monsters Move
actors.except(player).each do |rawr|
if rawr.hp <= 0
Ncurses.mvwaddstr(game_window, rawr.xlines, rawr.ycols, "X") # Turn into dead body
Ncurses.wrefresh(viewport_window)
else
distance_from_player = [(player.xlines - rawr.xlines).abs,(player.ycols - rawr.ycols).abs] # Get positive value of distance between monster and player
if player_visible == 1 and ((distance_from_player[0] < (viewport_window_lines / 5) and distance_from_player[1] < viewport_window_columns / 5)) # if the monster is visible, chase player
mode_hunt2(game_window,hud_window, rawr, player, walkable, items, actors)
else # If player is not visible, wander around
mode_wander2(game_window,hud_window, rawr, player, walkable, items, actors)
end
end
end
# Starvation
if hunger_count <= 100
hunger_count += 1
else
player.hunger -= 1
hunger_count = 0
message(console_window,"Your stomach growls")
Ncurses.mvwaddstr(hud_window, 4, 1, "Hunger: #{player.hunger}")
Ncurses.wrefresh(hud_window)
end
end
end
# End Screen
if player.hp == 0 || player.hunger == 0 || player.inventory["Token"] == 2
# Starved or died
if player.hp == 0 || player.hunger == 0
Ncurses.clear
Ncurses.mvwaddstr(stdscr, standard_screen_columns[0] / 2, standard_screen_lines[0] / 2, "You have died in the cold wastes.")
Ncurses.mvwaddstr(stdscr, (standard_screen_columns[0] / 2) + 1, standard_screen_lines[0] / 2, "Abiit nemine salutato.")
Ncurses.mvwaddstr(stdscr, (standard_screen_columns[0] / 2) + 2, standard_screen_lines[0] / 2, "Press any key to quit")
Ncurses.wrefresh(stdscr)
Ncurses.napms(1000)
input = Ncurses.getch
Ncurses.endwin
Ncurses.clear
exit
end
# Collected all the tokens
if player.inventory["Token"] == 2 # Change this to reflect total tokens
Ncurses.clear
Ncurses.mvwaddstr(stdscr, standard_screen_columns[0] / 2, standard_screen_lines[0] / 2, "You collected all the tokens.")
Ncurses.mvwaddstr(stdscr, (standard_screen_columns[0] / 2) + 1, standard_screen_lines[0] / 2, "You have been rescued!")
Ncurses.mvwaddstr(stdscr, (standard_screen_columns[0] / 2) + 2, standard_screen_lines[0] / 2, "Press 'q' to quit")
Ncurses.wrefresh(stdscr)
Ncurses.napms(1000)
input = Ncurses.getch
Ncurses.endwin
Ncurses.clear
exit
end
end
Ncurses.clear
Ncurses.mvwaddstr(stdscr, standard_screen_columns[0] / 2, standard_screen_lines[0] / 2, "Good Bye!")
Ncurses.wrefresh(stdscr)
Ncurses.napms(1000)
Ncurses.endwin
removed message comments
require_relative 'library'
require 'ncurses'
require 'oj'
require 'json'
include Ncurses
def save_state(seed,total_bunkers,items,walkable,all_beacons,all_bunkers,actors)
# Save a copy of the initial Game State.
# Prepare data for JSON
all_the_data = {}
seed_json = {"seed" => seed}
total_bunkers_json = {"total_bunkers" => total_bunkers}
items_json = {"items" => items}
walkable_json = {"walkable" => walkable}
all_beacons_json = {"beacons" => all_beacons}
bunkers_json = {"bunkers" => all_bunkers}
actors_json = {"actors" => actors}
all_the_data.merge!(seed_json)
all_the_data.merge!(total_bunkers_json)
all_the_data.merge!(items_json)
all_the_data.merge!(walkable_json)
all_the_data.merge!(all_beacons_json)
all_the_data.merge!(bunkers_json)
all_the_data.merge!(actors_json)
# Save data to JSON
File.open('sample.json', 'w') do |f|
f.puts Oj::dump all_the_data
end
end
def scr_message(message,bars)
loading = "[" + "=" * bars + " " * (7 - bars) + "]"
Ncurses.mvwaddstr(stdscr, 3, 3, "#{message}")
Ncurses.mvwaddstr(stdscr, 4, 4, "#{loading}")
Ncurses.refresh
Ncurses.napms(0500)
end
def scr_clear
Ncurses.mvwaddstr(stdscr, 3, 3, " ")
Ncurses.refresh
end
=begin
inhospitableLog = File.open("inhospitableLog.txt", "w")
inhospitableLog.puts "#{Time.now} - Game Launched"
inhospitableLog.close
=end
#################################################################################
# Initialize #
#################################################################################
@new = 0
Ncurses.initscr # Start Ncurses
Ncurses.noecho # Do not show keyboard input at cursor location
Ncurses.start_color
Ncurses.curs_set(0) # Disable blinking cursor
Ncurses.cbreak # Only accept a single character of input
Ncurses.stdscr # Initialize Standard Screen, which uses dimensions of current Terminal window. Invoke with stdscr
Ncurses.keypad(stdscr,true) # Use expanded keyboard characters
Ncurses.init_pair(1, COLOR_BLACK, COLOR_WHITE)
game_initialized = 0
main_menu(game_initialized, stdscr)
Ncurses.mvwaddstr(stdscr, 2, 2, "Generating World")
Ncurses.mvwaddstr(stdscr, 3, 3, "Please wait...")
Ncurses.mvwaddstr(stdscr, 4, 4, "[ ]")
Ncurses.refresh
if @new == 1 # Set to 1 when loading variables, located in ui.rb on line 44
# Load JSON File
#Ncurses.mvwaddstr(stdscr, 3, 3, "Loading Saved Data")
#Ncurses.mvwaddstr(stdscr, 4, 4, "[= ]")
#Ncurses.refresh
scr_message("Loading Saved Data",1)
json = File.read('sample.json')
everything = JSON.parse(json)
scr_clear
# Instantiate Windows
# For each window, define lines,cols variables and work with those instead of direct numbers
# Demo game uses 4 windows: game_window (aka game map), Viewport (aka what the player sees), console_window and side hud_window.
# Screen and window variables
scr_message("Prepare Window Variables",2)
standard_screen_columns = [] # Standard Screen column aka y
standard_screen_lines = [] # Standard Screen lines aka x
Ncurses.getmaxyx(stdscr,standard_screen_columns,standard_screen_lines) # Get Max Y,X for standard screen, place them in arrays. getmaxyx outputs to arrays.
game_window_lines = 200
game_window_columns = 200
viewport_window_lines = 25
viewport_window_columns = 25
hud_window_lines = viewport_window_lines
hud_window_columns = 15
console_window_lines = 3
console_window_columns = viewport_window_columns + hud_window_columns
bunker_area_with_space = (viewport_window_lines * viewport_window_columns * 10) + 11 # 11 x 11 is the area of the demo bunker
scr_clear
# Load JSON Data
scr_message("Loading Game Variables",3)
total_bunkers = everything["total_bunkers"].to_i
seed = everything["seed"].to_i
actors_from_json = everything["actors"]
actors = []
items = everything["items"]
all_beacons = []
all_bunkers = everything["bunkers"]
walkable = everything["walkable"]
scr_clear
# Game Loop Variables
scr_message("Setting Loop Variables",4)
direction_steps = 0
counter = 0
dice_roll = false
hunger_count = 0
direction_steps = rand(10..25) # Meander long distances
player_visible = 1
scr_clear
# Create game windows, then generate the world
scr_message("Creating Game Windows",5)
game_window = Ncurses.newwin(game_window_lines, game_window_columns, 0, 0)
viewport_window = Ncurses.derwin(game_window,viewport_window_lines, viewport_window_columns, 0, 0) # Must not exceed size of terminal or else crash
console_window = Ncurses.newwin(console_window_lines, console_window_columns, viewport_window_lines, 0)
hud_window = Ncurses.newwin(hud_window_lines, hud_window_columns, 0, viewport_window_lines)
scr_clear
scr_message("Generating Map",6)
generate_map(game_window,total_bunkers,all_beacons,all_bunkers,actors,seed)
scr_clear
scr_message("Generate Actors",7)
player = Character.new(symb: everything["actors"][0]["symb"],symbcode: everything["actors"][0]["symbcode"],color: everything["actors"][0]["color"],xlines: everything["actors"][0]["xlines"],ycols: everything["actors"][0]["ycols"],blocked: everything["actors"][0]["blocked"],hp: everything["actors"][0]["hp"],hunger: everything["actors"][0]["hunger"],inventory: everything["actors"][0]["inventory"])
actors << player
player.draw(game_window)
everything["actors"].drop(1).each do |k|
actors << Character.new(symb: k["symb"],symbcode: k["symbcode"],color: k["color"],xlines: k["xlines"],ycols: k["ycols"],blocked: k["blocked"],hp: k["hp"],hunger: k["hunger"],inventory: k["inventory"]) # Instantiate characters from Json
draw_to_map(game_window,k)
end
everything["beacons"].each do |b|
all_beacons << Beacon.new(symb: b["symb"], xlines: b["xlines"], ycols: b["ycols"], message: b["message"])
draw_to_map(game_window,b)
end
scr_clear
else
# Instantiate Windows
# For each window, define lines,cols variables and work with those instead of direct numbers
# Demo game uses 4 windows: game_window (aka game map), Viewport (aka what the player sees), console_window and side hud_window.
# Screen and window variables
scr_message("Prepare Window Variables",2)
seed = 12345
standard_screen_columns = [] # Standard Screen column aka y
standard_screen_lines = [] # Standard Screen lines aka x
Ncurses.getmaxyx(stdscr,standard_screen_columns,standard_screen_lines) # Get Max Y,X for standard screen, place them in arrays. getmaxyx outputs to arrays.
game_window_lines = 200
game_window_columns = 200
viewport_window_lines = 25
viewport_window_columns = 25
hud_window_lines = viewport_window_lines
hud_window_columns = 15
console_window_lines = 3
console_window_columns = viewport_window_columns + hud_window_columns
bunker_area_with_space = (viewport_window_lines * viewport_window_columns * 10) + 11 # 11 x 11 is the area of the demo bunker
scr_clear
# Define Actors, Items, Terrain, Bunkers and Beacons
scr_message("Loading Game Variables",3)
total_bunkers = ((game_window_lines * game_window_columns) / bunker_area_with_space)
actors = []
items = [42,102,109]
walkable = [32,88,126,288,382]
all_beacons = []
all_bunkers = []
scr_clear
# Game Loop Variables
scr_message("Setting Loop Variables",4)
direction_steps = 0
counter = 0
dice_roll = false
hunger_count = 0
#counter = 0 #wander counter for monster
direction_steps = rand(10..25) # Meander long distances
player_visible = 1
scr_clear
# Create game windows, then generate the world
scr_message("Creating Game Windows",5)
game_window = Ncurses.newwin(game_window_lines, game_window_columns, 0, 0)
viewport_window = Ncurses.derwin(game_window,viewport_window_lines, viewport_window_columns, 0, 0) # Must not exceed size of terminal or else crash
console_window = Ncurses.newwin(console_window_lines, console_window_columns, viewport_window_lines, 0)
hud_window = Ncurses.newwin(hud_window_lines, hud_window_columns, 0, viewport_window_lines)
scr_clear
# Create Player Actor
scr_message("Generate Actors",6)
game_window_max_lines = []
game_window_max_columns = []
Ncurses.getmaxyx(game_window,game_window_max_columns,game_window_max_lines) # Get Max Y,X of game_window
player_start_lines = (game_window_max_lines[0] / 4)
player_start_columns = (game_window_max_columns[0] / 4)
player = Character.new(symb: '@', symbcode: 64, xlines: player_start_lines, ycols: player_start_columns, hp: 9, color: 2)
actors << player
scr_clear
scr_message("Generating Map",7)
generate_map(game_window,total_bunkers,all_beacons,all_bunkers,actors,seed)
# Place all Actors from array
spiral(game_window,10,player,walkable) # Find legal starting position for player
actors.each { |actor| actor.draw(game_window)} # Add all actors to the map
save_state(seed,total_bunkers,items,walkable,all_beacons,all_bunkers,actors)
scr_clear
end
menu_active = 0
game_initialized = 1
# Set up hud_window and console_window
borders(console_window) # Add borders to the console_window
Ncurses.wrefresh(console_window) # Refresh console_window window with message
hud_on(hud_window,player)
center(viewport_window,game_window,player.xlines,player.ycols) # Center map on player
Ncurses.wrefresh(viewport_window)
#################################################################################
# Game Loop #
#################################################################################
while player.hp > 0 && player.hunger > 0 && player.inventory["Token"] < total_bunkers # While Player hit points and hunger are above 0, and tokens are less than total, keep playing
if menu_active == 1
main_menu(game_initialized, game_window)
menu_active = 0
Ncurses.mvwaddstr(stdscr, 2, 2, "Returning to game...")
Ncurses.refresh
Ncurses.napms(1000)
end
hud_on(hud_window,player)
borders(console_window)
Ncurses.wrefresh(hud_window)
Ncurses.wrefresh(console_window)
Ncurses.wrefresh(viewport_window) # Fixed Monster location
#temp_hash = {"seed" => "#{seed}"}
#File.open("game.json", "w") do |f|
# f.puts temp_hash.to_json
#end
#inhospitableLog = File.open("inhospitableLog.txt", "w")
#actors.each { |a| inhospitableLog.puts "#{a}.to_yaml" }
#inhospitableLog.close
input = Ncurses.getch
case input
when KEY_UP, 119 # Move Up
check_space(game_window,hud_window,-1,0,player,walkable,items,actors)
center(viewport_window,game_window,player.xlines,player.ycols)
when KEY_DOWN, 115 # Move Down
check_space(game_window,hud_window,1,0,player,walkable,items,actors)
center(viewport_window,game_window,player.xlines,player.ycols)
when KEY_RIGHT, 100 # Move Right
check_space(game_window,hud_window,0,1,player,walkable,items,actors)
center(viewport_window,game_window,player.xlines,player.ycols)
when KEY_LEFT, 97 # Move Left
check_space(game_window,hud_window,0,-1,player,walkable,items,actors)
center(viewport_window,game_window,player.xlines,player.ycols)
when 32 # Spacebar, dont move
center(viewport_window,game_window,player.xlines,player.ycols)
when 104 # h
if player_visible == 1
player_visible = 0
elsif player_visible == 0
player_visible = 1
end
when 114 # r
the_beacon = get_distance_all_beacons(player,all_beacons)
if get_distance(player,the_beacon) < 101
message(console_window,"Radio: #{static(the_beacon, transmission(game_window,the_beacon,player))}")
else
message(console_window,"..zz..zZ..Zzz..")
end
when 102 # f
food = player.inventory["Food"]
if food > 0
update_inventory(hud_window, 102, player, -1)
player.hunger += 1
Ncurses.mvwaddstr(hud_window, 4, 1, "Hunger: #{player.hunger}")
Ncurses.wrefresh(hud_window)
else
message(console_window, "You have no food to eat.")
end
when 109 # m
medkit = player.inventory["Medkit"]
if medkit > 0
player.hp += 1
update_inventory(hud_window, 109, player, -1)
Ncurses.mvwaddstr(hud_window, 3, 1, "HP: #{player.hp}")
Ncurses.wrefresh(hud_window)
else
message(console_window, "You have no medkits.")
end
when 27 # ESC - Main Menu
menu_active = 1
when 49 # 1 - Save Game
#temp_hash = {"seed" => "#{seed}"}
File.open("game.json", "w") do |f|
f.puts everything.to_json
end
when KEY_F2, 113, 81 # Quit Game with F2, q or Q
break
else
Ncurses.flash # Flash screen if undefined input selected
message(console_window,"Move not valid") # Display ascii decimal number of selected input
Ncurses.wrefresh(console_window)
end
if menu_active == 0
# Monsters Move
actors.except(player).each do |rawr|
if rawr.hp <= 0
Ncurses.mvwaddstr(game_window, rawr.xlines, rawr.ycols, "X") # Turn into dead body
Ncurses.wrefresh(viewport_window)
else
distance_from_player = [(player.xlines - rawr.xlines).abs,(player.ycols - rawr.ycols).abs] # Get positive value of distance between monster and player
if player_visible == 1 and ((distance_from_player[0] < (viewport_window_lines / 5) and distance_from_player[1] < viewport_window_columns / 5)) # if the monster is visible, chase player
mode_hunt2(game_window,hud_window, rawr, player, walkable, items, actors)
else # If player is not visible, wander around
mode_wander2(game_window,hud_window, rawr, player, walkable, items, actors)
end
end
end
# Starvation
if hunger_count <= 100
hunger_count += 1
else
player.hunger -= 1
hunger_count = 0
message(console_window,"Your stomach growls")
Ncurses.mvwaddstr(hud_window, 4, 1, "Hunger: #{player.hunger}")
Ncurses.wrefresh(hud_window)
end
end
end
# End Screen
if player.hp == 0 || player.hunger == 0 || player.inventory["Token"] == 2
# Starved or died
if player.hp == 0 || player.hunger == 0
Ncurses.clear
Ncurses.mvwaddstr(stdscr, standard_screen_columns[0] / 2, standard_screen_lines[0] / 2, "You have died in the cold wastes.")
Ncurses.mvwaddstr(stdscr, (standard_screen_columns[0] / 2) + 1, standard_screen_lines[0] / 2, "Abiit nemine salutato.")
Ncurses.mvwaddstr(stdscr, (standard_screen_columns[0] / 2) + 2, standard_screen_lines[0] / 2, "Press any key to quit")
Ncurses.wrefresh(stdscr)
Ncurses.napms(1000)
input = Ncurses.getch
Ncurses.endwin
Ncurses.clear
exit
end
# Collected all the tokens
if player.inventory["Token"] == 2 # Change this to reflect total tokens
Ncurses.clear
Ncurses.mvwaddstr(stdscr, standard_screen_columns[0] / 2, standard_screen_lines[0] / 2, "You collected all the tokens.")
Ncurses.mvwaddstr(stdscr, (standard_screen_columns[0] / 2) + 1, standard_screen_lines[0] / 2, "You have been rescued!")
Ncurses.mvwaddstr(stdscr, (standard_screen_columns[0] / 2) + 2, standard_screen_lines[0] / 2, "Press 'q' to quit")
Ncurses.wrefresh(stdscr)
Ncurses.napms(1000)
input = Ncurses.getch
Ncurses.endwin
Ncurses.clear
exit
end
end
Ncurses.clear
Ncurses.mvwaddstr(stdscr, standard_screen_columns[0] / 2, standard_screen_lines[0] / 2, "Good Bye!")
Ncurses.wrefresh(stdscr)
Ncurses.napms(1000)
Ncurses.endwin |
597978b0-2d48-11e5-ba8d-7831c1c36510
598101a3-2d48-11e5-85e0-7831c1c36510
598101a3-2d48-11e5-85e0-7831c1c36510 |
4e180bc7-2d48-11e5-b36a-7831c1c36510
4e1e1661-2d48-11e5-add4-7831c1c36510
4e1e1661-2d48-11e5-add4-7831c1c36510 |
60f34c1c-2d48-11e5-8312-7831c1c36510
60f990fd-2d48-11e5-a789-7831c1c36510
60f990fd-2d48-11e5-a789-7831c1c36510 |
611b6e0f-2d48-11e5-8a9b-7831c1c36510
6121d1fd-2d48-11e5-bda2-7831c1c36510
6121d1fd-2d48-11e5-bda2-7831c1c36510 |
75049c54-2d48-11e5-a361-7831c1c36510
750a9140-2d48-11e5-918c-7831c1c36510
750a9140-2d48-11e5-918c-7831c1c36510 |
5e4ea6a8-2d48-11e5-908a-7831c1c36510
5e54aac7-2d48-11e5-99cf-7831c1c36510
5e54aac7-2d48-11e5-99cf-7831c1c36510 |
779ba342-2d48-11e5-af19-7831c1c36510
77a0e6e1-2d48-11e5-8d99-7831c1c36510
77a0e6e1-2d48-11e5-8d99-7831c1c36510 |
73119168-2d48-11e5-aef7-7831c1c36510
73181485-2d48-11e5-9177-7831c1c36510
73181485-2d48-11e5-9177-7831c1c36510 |
79ff6d3a-2d48-11e5-bf7e-7831c1c36510
7a04acbd-2d48-11e5-a1bf-7831c1c36510
7a04acbd-2d48-11e5-a1bf-7831c1c36510 |
5998898c-2d48-11e5-a2bf-7831c1c36510
599eed0c-2d48-11e5-9609-7831c1c36510
599eed0c-2d48-11e5-9609-7831c1c36510 |
6a942ed9-2d48-11e5-8828-7831c1c36510
6a99a463-2d48-11e5-b4a2-7831c1c36510
6a99a463-2d48-11e5-b4a2-7831c1c36510 |
# It was the night before Christmas and all through the house, not a creature was coding: UTF-8, not even with a mouse.
require 'bundler'
require 'tempfile'
require 'digest/md5'
Bundler.require(:default)
require 'sinatra/sprockets-helpers'
require 'sinatra/asset_pipeline'
require 'sass'
$config = MultiJson.load(File.open('./config.json').read)
# Monkey patched Redis for easy caching.
class Redis
def cache(key, expire=nil)
if (value = get(key)).nil?
value = yield(self)
set(key, value)
expire(key, expire) if expire
value
else
value
end
end
end
# Ease of use connection to the redis server.
$redis = Redis.new :driver=>:hiredis, :host=>$config['redis']['host'], :port=>$config['redis']["port"]
DataMapper.setup(:default, 'postgres://'+$config['postgres'])
#$adapter = DataMapper.setup(:default, :adapter=>'riak', :namespace=>'WebSync')
#class DataMapper::Adapters::RiakAdapter
# attr_accessor :riak
#end
#$riak = $adapter.riak
# Redis has issues with datamapper associations especially Many-to-many.
#$adapter = DataMapper.setup(:default, {:adapter => "redis"});
#$redis = $adapter.redis
#data = "window = {};"+File.read("./assets/javascripts/diff_match_patch.js") + File.read("./assets/javascripts/jsondiffpatch.min.js")
#$jsondiffpatch = ExecJS.compile data
Sinatra::Sprockets = Sprockets
=begin
module BJSONDiffPatch
def diff object1, object2
return $jsondiffpatch.eval "jsondiffpatch.diff(#{MultiJson.dump(object1)},#{MultiJson.dump(object2)})"
end
def patch object1, delta
return $jsondiffpatch.eval "jsondiffpatch.patch(#{MultiJson.dump(object1)},#{MultiJson.dump(delta)})"
end
end
class JsonDiffPatch
extend BJSONDiffPatch
end
=end
def json_to_html_node obj
html = "";
if obj['name']=="#text"
return obj['textContent']
end
html+="<"+obj['name']
obj.each do |k,v|
if k!="name"&&k!="textContent"&&k!="childNodes"
html+=" "+k+"="+MultiJson.dump(v)
end
end
if obj.has_key? 'childNodes'
html+=">";
obj['childNodes'].each do |elem|
html+= json_to_html_node(elem)
end
html+="</"+obj['name']+">"
else
html+="/>"
end
return html
end
def json_to_html obj
html = ""
obj.each do |elem|
html += json_to_html_node(elem)
end
return html
end
def node_to_json html
if html.name=="text"
return { name: "#text", textContent: html.to_s}
end
json = {
name: html.name.upcase
}
if defined? html.attributes
html.attributes.each do |name, attr|
json[attr.name]=attr.value
end
end
if html.children.length > 0
json['childNodes']=[]
html.children.each do |child|
json['childNodes'].push( node_to_json(child) )
end
end
return json
end
def html_to_json html
dom = Nokogiri::HTML(html)
json = []
dom.document.children.each do |elem|
json.push node_to_json(elem)
end
return json
end
class Document
include DataMapper::Resource
property :id, Serial
property :name, String
#property :body, Text
property :body, Json, :default=>{}, :lazy=>true
property :created, DateTime
property :last_edit_time, DateTime
property :public, Boolean, :default=>false
property :config, Json, :default=>{}
has n, :assets, :through => Resource
has n, :changes
belongs_to :user
def config_set key, value
n_config = config.dup
n_config[key]=value
self.config= n_config
end
end
class Change
include DataMapper::Resource
property :id, Serial
property :time, DateTime
property :patch, Json
belongs_to :user
belongs_to :document
end
# Assets could be javascript or css
class AssetGroup
include DataMapper::Resource
property :id, Serial
property :name, String
property :description, Text
has n, :assets, :through => Resource
end
class Asset
include DataMapper::Resource
property :id, Serial
property :name, String
property :description, Text
property :url, String
property :type, Discriminator
has n, :documents, :through => Resource
has n, :asset_groups, :through => Resource
end
class Javascript < Asset; end
class Stylesheet < Asset; end
class User
include DataMapper::Resource
property :email, String, :key=>true
property :password, BCryptHash
property :group, String, :default=>'user'
property :anonymous, Boolean, :default=> false
has n, :documents
has n, :changes
property :config, Json, :default=>{}
def config_set key, value
n_config = config.dup
n_config[key]=value
self.config= n_config
end
end
class AnonymousUser < User; end
DataMapper.finalize
DataMapper.auto_upgrade!
class WebSync < Sinatra::Base
register Sinatra::Synchrony
use Rack::Logger
helpers do
def logger
request.logger
end
def current_user
if logged_in?
return User.get(session['user'])
end
nil
end
def admin_required
if not admin?
redirect "/"
end
end
def admin?
c_user = current_user
not c_user.nil? and c_user.group=="admin"
end
def logged_in?
(!session['userhash'].nil?)&&$redis.get('userhash:'+session['userhash'])==session['user']
end
def login_required
if !logged_in?
redirect "/login?#{env["REQUEST_PATH"]}"
end
end
def register email, pass
email.downcase!
if User.get(email).nil?
user = User.create({:email=>email,:password=>pass})
authenticate email, pass
return user
elsif authenticate email, pass
return current_user
end
nil
end
def authenticate email, pass, expire=nil
email.downcase!
user = User.get(email)
if user.nil?
return false
end
if user.password==pass
session_key = SecureRandom.uuid
$redis.set("userhash:#{session_key}",email)
session['userhash']=session_key
session['user']=email
if !expire.nil?
$redis.expire("userhash:#{session_key}",expire)
end
return true
end
false
end
def logout
$redis.del "userhash:#{session['userhash']}"
session['userhash']=nil
session['user']=nil
end
def render_login_button
if logged_in?
return '<a href="/logout" title="Sign Out"><i class="icon-signout icon-large"></i><span class="hidden-phone"> Sign Out</span></a>'
else
return '<a href="/login" title="Sign In"><i class="icon-signin icon-large"></i><span class="hidden-phone"> Sign In</span></a>'
end
end
end
configure :development do
Bundler.require(:development)
set :assets_debug, true
use PryRescue::Rack
end
configure :production do
Bundler.require(:production)
set :assets_css_compressor, :sass
set :assets_js_compressor, :closure
set :assets_precompile, %w(*.css *.scss bundle-norm.js bundle-edit.js *.png *.favico *.jpg *.svg *.eot *.ttf *.woff)
set :assets_precompile_no_digest, %w(*.js)
end
configure do
use Rack::Session::Cookie, :expire_after => 60*60*24*7, :secret => $config['session_secret']
enable :sessions
set :session_secret, $config['session_secret']
set :server, 'thin'
set :sockets, []
set :template_engine, :erb
register Sinatra::AssetPipeline
sprockets.append_path File.join(root, 'assets', 'stylesheets')
sprockets.append_path File.join(root, 'assets', 'javascripts')
sprockets.append_path File.join(root, 'assets', 'images')
end
$dmp = DiffMatchPatch.new
#Javascript.first_or_create(:name=>'Tables',:description=>'Table editing support',:url=>'/assets/tables.js')
#Javascript.first_or_create(:name=>'Chat',:description=>'Talk with other users!',:url=>'/assets/chat.js')
if Asset.count == 0
puts "[DATABASE] Creating default assets."
$config["default_assets"].each do |asset|
a = Javascript.create(name:asset["name"],description:asset["description"],url:asset["url"])
puts " :: Creating: #{asset["name"]}, Success: #{a.save}"
end
end
if AssetGroup.count == 0
puts "[DATABASE] Creating default asset groups."
$config["default_asset_groups"].each do |group|
g = AssetGroup.create(name:group["name"],description:group["description"])
group["assets"].each do |asset|
a = Asset.first(name:asset)
if not a.nil?
g.assets << a
end
end
puts " :: Creating: #{g.name}, Success: #{g.save}"
end
end
get '/login' do
if !logged_in?
erb :login
else
redirect '/'
end
end
post '/login' do
redirect_loc = '/'
if params[:redirect]!=''
redirect_loc = params[:redirect]
end
if authenticate params[:email],params[:password]
redirect redirect_loc
else
redirect "/login?#{redirect_loc}"
end
end
get '/register' do
redirect '/login'
end
post '/register' do
if register params[:email],params[:password]
redirect '/'
else
redirect '/login'
end
end
get '/logout' do
if logged_in?
logout
end
redirect '/login'
end
not_found do
erb :not_found
end
#get '/assets/*.css' do
# content_type 'text/css'
# assets_environment[params[:splat][0]+'.css'].to_s
#end
#get '/assets/*.js' do
# content_type 'text/javascript'
# assets_environment[params[:splat][0]+'.js'].to_s
#end
get '/' do
@javascripts = []
if logged_in?
erb :file_list
else
erb :index
end
end
get '/documentation' do
erb :documentation
end
get '/admin' do
admin_required
erb :admin
end
get '/admin/assets' do
admin_required
erb :admin_assets
end
get '/admin/assets/:asset/edit' do
admin_required
erb :admin_assets_edit
end
get '/admin/assets/:asset/delete' do
admin_required
ass = Asset.get(params[:asset])
if not ass.nil?
ass.destroy
end
redirect '/admin/assets'
end
post '/admin/assets/:asset/edit' do
admin_required
ass = Asset.get(params[:asset])
if not ass.nil?
ass.name = params[:name]
ass.description = params[:desc]
ass.url = params[:url]
ass.type = params[:type]
ass.save
else
n_ass = Asset.create(:name=>params[:name],:description=>params[:desc],:url=>params[:url], :type=>params[:type])
n_ass.save
end
redirect '/admin/assets'
end
get '/admin/asset_groups/:asset/edit' do
admin_required
erb :admin_asset_groups_edit
end
get '/admin/asset_groups/:asset_group/:asset/add' do
ass = AssetGroup.get(params[:asset_group])
ass.assets << Asset.get(params[:asset])
ass.save
redirect "/admin/asset_groups/#{params[:asset_group]}/edit"
end
get '/admin/asset_groups/:asset_group/:asset/remove' do
ass = AssetGroup.get(params[:asset_group])
ass.assets.each do |a|
if a.id==params[:asset].to_i
ass.assets.delete a
end
end
ass.save
redirect "/admin/asset_groups/#{params[:asset_group]}/edit"
end
get '/admin/asset_groups/:asset/delete' do
admin_required
ass = AssetGroup.get(params[:asset])
if not ass.nil?
ass.assets = []
ass.save
ass.destroy
end
redirect '/admin/assets'
end
post '/admin/asset_groups/:asset/edit' do
admin_required
ass = AssetGroup.get(params[:asset])
if not ass.nil?
ass.name = params[:name]
ass.description = params[:desc]
ass.save
else
n_ass = AssetGroup.create(:name=>params[:name],:description=>params[:desc])
n_ass.save
end
redirect '/admin/assets'
end
get '/new/:group' do
login_required
doc = Document.create(
:name => "Unnamed #{params[:group]}",
:body => {body:[]},
:created => Time.now,
:last_edit_time => Time.now,
:user => current_user
)
group = AssetGroup.get(params[:group])
doc.assets = group.assets
doc.save
redirect "/#{doc.id}/edit"
end
get '/upload' do
login_required
erb :upload
end
post '/upload' do
if params[:file]==nil
redirect "/upload"
end
tempfile = params[:file][:tempfile]
filename = params[:file][:filename]
filetype = params[:file][:type]
content = nil
# TODO: Split upload/download into its own external server. Right now Unoconv is blocking. Also issues may arise if multiple copies of LibreOffice are running on the same server. Should probably use a single server instance of LibreOffice
`unoconv -f html #{tempfile.path}`
exit_status = $?.to_i
if exit_status == 0
content = File.read(tempfile.path+".html")
else
if filetype=="application/pdf"
content = PDFToHTMLR::PdfFilePath.new(tempfile.path).convert.force_encoding("UTF-8")
elsif filetype=='text/html'
content = File.read(tempfile.path)
elsif filename.split('.').pop=='docx'
# This pretty much just reads plain text...
content = Docx::Document.open(tempfile.path).to_html.force_encoding("UTF-8")
else
logger.info "Unoconv failed and Unrecognized filetype: #{params[:file][:type]}"
end
end
if content!=nil
# TODO: Upload into JSON format
doc = Document.create(
:name => filename,
:body => {body:html_to_json(content)},
:created => Time.now,
:last_edit_time => Time.now,
:user => current_user
)
doc.assets = AssetGroup.get(1).assets
doc.save
redirect "/#{doc.id}/edit"
else
redirect "/"
end
end
get '/:doc/download/:format' do
if !%w(bib doc docx doc6 doc95 docbook html odt ott ooxml pdb pdf psw rtf latex sdw sdw4 sdw3 stw sxw text txt vor vor4 vor3 xhtml bmp emf eps gif jpg met odd otg pbm pct pgm png ppm ras std svg svm swf sxd sxd3 sxd5 tiff wmf xpm odg odp pot ppt pwp sda sdd sdd3 sdd4 sti stp sxi vor5 csv dbf dif ods pts pxl sdc sdc4 sdc3 slk stc sxc xls xls5 xls95 xlt xlt5).include?(params[:format])
redirect '/'
end
login_required
doc_id = params[:doc]
doc = Document.get doc_id
if (!doc.public)&&doc.user!=current_user
redirect '/'
end
file = Tempfile.new('websync-export')
file.write( json_to_html( doc.body['body'] ) )
file.close
`unoconv -f #{params[:format]} #{file.path}`
if $?.to_i==0
export_file = file.path+"."+params[:format]
response.headers['content_type'] = `file --mime -b export_file`.split(';')[0]
attachment(doc.name+'.'+params[:format])
response.write(File.read(export_file))
else
redirect '/'
end
file.unlink
end
get '/:doc/json' do
login_required
doc_id = params[:doc]
doc = Document.get doc_id
if (!doc.public)&&doc.user!=current_user
redirect '/'
end
content_type 'application/json'
MultiJson.dump(doc.body)
end
get '/:doc/delete' do
login_required
doc_id = params[:doc]
doc = Document.get doc_id
if doc.user==current_user
doc.destroy!
end
redirect '/'
end
get '/:doc/:op' do
doc_id = params[:doc]
doc = Document.get doc_id
if doc.nil?
redirect 'notfound'
end
#if !request.websocket?
login_required
if (!doc.public)&&doc.user!=current_user
redirect '/'
end
@javascripts = [
#'/assets/bundle-edit.js'
]
@doc = doc
if !@doc.nil?
@client_id = $redis.incr("clientid")
@client_key = SecureRandom.uuid
$redis.set "websocket:id:#{@client_id}",current_user.email
$redis.set "websocket:key:#{@client_id}", @client_key
$redis.expire "websocket:id:#{@client_id}", 60*60*24*7
$redis.expire "websocket:key:#{@client_id}", 60*60*24*7
@no_menu = true
@edit = true
erb :edit
else
redirect '/'
end
=begin
# Websocket edit
else
#TODO: Authentication for websockets
redis_sock = EM::Hiredis.connect.pubsub
redis_sock.subscribe("doc:#{doc_id}")
authenticated = false
user = nil
client_id = nil
request.websocket do |ws|
websock = ws
ws.onopen do
warn "websocket open"
end
ws.onmessage do |msg|
data = MultiJson.load(msg.force_encoding("UTF-8"));
puts "JSON: #{msg}"
if data['type']=='auth'
if $redis.get("websocket:key:#{data['id']}") == data['key']
# Extend key expiry time
email = $redis.get "websocket:id:#{data['id']}"
user = User.get(email)
if (!doc.public)&&doc.user!=user
redis_sock.close_connection
ws.close_connection
return
end
authenticated = true
client_id = data['id']
$redis.expire "websocket:id:#{client_id}", 60*60*24*7
$redis.expire "websocket:key:#{client_id}", 60*60*24*7
user_prop = "doc:#{doc_id}:users"
user_raw = $redis.get(user_prop)
if !user_raw.nil?
users = MultiJson.load(user_raw)
else
users = {}
end
user_id = Digest::MD5.hexdigest(email.strip.downcase)
users[client_id]={id:user_id,email:email.strip}
$redis.set user_prop,MultiJson.dump(users)
$redis.publish "doc:#{doc_id}", MultiJson.dump({type:"client_bounce",client:client_id,data:MultiJson.dump({type:"new_user",id:client_id,user:{id:user_id,email:email.strip}})})
ws.send MultiJson.dump({type:'info',user_id:user_id,users:users})
puts "[Websocket Client Authed] ID: #{client_id}, Email: #{email}"
else
ws.close_connection
end
end
if authenticated
# Patch data
if data['type']=='data_patch'&&data.has_key?('patch')
doc = Document.get doc_id
doc.body = JsonDiffPatch.patch(doc.body,data['patch'])
doc.last_edit_time = Time.now
if !doc.save
puts("Save errors: #{doc.errors.inspect}")
end
$redis.publish "doc:#{doc_id}", MultiJson.dump({type:"client_bounce",client:client_id,data:msg})
# Sets the name
elsif data['type']=="name_update"
doc.name = data["name"]
doc.last_edit_time = Time.now
if !doc.save
puts("Save errors: #{doc.errors.inspect}")
end
$redis.publish "doc:#{doc_id}", MultiJson.dump({type:"client_bounce",client:client_id,data:msg})
# Loads scripts
elsif data['type']=="load_scripts"
msg = {type:'scripts', js:[],css:[]}
doc.assets.each do |asset|
arr = :js;
if asset.type=="javascript"
arr = :js
elsif asset.type=="stylesheet"
arr = :css
end
msg[arr].push asset.url
end
ws.send MultiJson.dump msg
elsif data['type']=='connection'
elsif data['type']=='config'
if data['action']=='set'
if data['property']=='public'
doc.public = data['value']
doc.save
else
if data['space']=='user'
user.config_set data['property'],data['value']
user.save
elsif data['space']=='document'
doc.config_set data['property'],data['value']
doc.save
end
end
elsif data['action']=='get'
if data['property']=='public'
ws.send MultiJson.dump({type: 'config',action: 'get', property:'public', value: doc.public})
else
if data['space']=='user'
ws.send MultiJson.dump({type: 'config', action: data['action'], space: data['space'], property: data['property'], value: user.config[data['property']],id:data['id']})
elsif data['space']=='document'
ws.send MultiJson.dump({type: 'config', action: data['action'], space: data['space'], property: data['property'], value: doc.config[data['property']],id:data['id']})
end
end
end
elsif data['type']=='client_event'
$redis.publish "doc:#{doc_id}", MultiJson.dump({type:"client_bounce",client:client_id,data:MultiJson.dump({type:"client_event",event:data['event'],from:client_id,data:data['data']})})
end
end
end
ws.onclose do
warn("websocket closed")
redis_sock.close_connection
if authenticated
user_prop = "doc:#{doc_id}:users"
users = MultiJson.load($redis.get(user_prop))
users.delete client_id
$redis.set user_prop,MultiJson.dump(users)
$redis.publish "doc:#{doc_id}", MultiJson.dump({type:"client_bounce",client:client_id,data:MultiJson.dump({type:"exit_user",id:client_id})})
end
ws.close_connection
end
redis_sock.on(:message) do |channel, message|
#puts "[#{client_id}]#{channel}: #{message}"
data = MultiJson.load(message)
if data['client']!=client_id
if data['type']=="client_bounce"
ws.send data['data']
end
end
end
end
end
=end
end
=begin
# This might be completely useless since it seems like you only have to structure for diff.
# Create a diff after replacing all HTML tags with unicode characters.
def diff_htmlToChars_ text1, text2
lineArray = [] # e.g. lineArray[4] == 'Hello\n'
lineHash = {} # e.g. lineHash['Hello\n'] == 4
# '\x00' is a valid character, but various debuggers don't like it.
# So we'll insert a junk entry to avoid generating a null character.
lineArray[0] = ''
#/**
#* Split a text into an array of strings. Reduce the texts to a string of
#* hashes where each Unicode character represents one line.
#* Modifies linearray and linehash through being a closure.
#* @param {string} text String to encode.
#* @return {string} Encoded string.
#* @private
#*/
def diff_linesToCharsMunge_ text, lineArray, lineHash
chars = ""+text
#// Walk the text, pulling out a substring for each line.
#// text.split('\n') would would temporarily double our memory footprint.
#// Modifying text would create many large strings to garbage collect.
lineStart = 0
lineEnd = 0
#// Keeping our own length variable is faster than looking it up.
lineArrayLength = lineArray.length;
while lineEnd <(text.length - 1)
prevLineEnd = lineEnd
if prevLineEnd==nil
prevLineEnd=0
end
lineStart = text.index('<',lineEnd)
if lineStart.nil?
lineEnd=nil
break
else
lineEnd = text.index('>', lineStart)
end
if lineEnd.nil?
lineEnd = text.length - 1
end
line = text[lineStart..lineEnd]
lineStart = lineEnd + 1
if lineHash.has_key? line
chars.gsub!(line,[lineHash[line]].pack("U"))
else
chars.gsub!(line,[lineArrayLength].pack("U"))
lineHash[line] = lineArrayLength
lineArray[lineArrayLength] = line
lineArrayLength +=1
end
end
return chars;
end
chars1 = diff_linesToCharsMunge_(text1, lineArray,lineHash)
chars2 = diff_linesToCharsMunge_(text2,lineArray,lineHash)
return {chars1: chars1, chars2: chars2, lineArray: lineArray}
end
def diff_charsToHTML_ diffs, lineArray
(0..(diffs.length-1)).each do |x|
chars = diffs[x][1];
text = ""+chars
(0..(lineArray-1)).each do |y|
text.gsub!([y].pack("U"),lineArray[y])
end
diffs[x][1] = text;
end
end
=end
end
Fixed document naming
# It was the night before Christmas and all through the house, not a creature was coding: UTF-8, not even with a mouse.
require 'bundler'
require 'tempfile'
require 'digest/md5'
Bundler.require(:default)
require 'sinatra/sprockets-helpers'
require 'sinatra/asset_pipeline'
require 'sass'
$config = MultiJson.load(File.open('./config.json').read)
# Monkey patched Redis for easy caching.
class Redis
def cache(key, expire=nil)
if (value = get(key)).nil?
value = yield(self)
set(key, value)
expire(key, expire) if expire
value
else
value
end
end
end
# Ease of use connection to the redis server.
$redis = Redis.new :driver=>:hiredis, :host=>$config['redis']['host'], :port=>$config['redis']["port"]
DataMapper.setup(:default, 'postgres://'+$config['postgres'])
#$adapter = DataMapper.setup(:default, :adapter=>'riak', :namespace=>'WebSync')
#class DataMapper::Adapters::RiakAdapter
# attr_accessor :riak
#end
#$riak = $adapter.riak
# Redis has issues with datamapper associations especially Many-to-many.
#$adapter = DataMapper.setup(:default, {:adapter => "redis"});
#$redis = $adapter.redis
#data = "window = {};"+File.read("./assets/javascripts/diff_match_patch.js") + File.read("./assets/javascripts/jsondiffpatch.min.js")
#$jsondiffpatch = ExecJS.compile data
Sinatra::Sprockets = Sprockets
=begin
module BJSONDiffPatch
def diff object1, object2
return $jsondiffpatch.eval "jsondiffpatch.diff(#{MultiJson.dump(object1)},#{MultiJson.dump(object2)})"
end
def patch object1, delta
return $jsondiffpatch.eval "jsondiffpatch.patch(#{MultiJson.dump(object1)},#{MultiJson.dump(delta)})"
end
end
class JsonDiffPatch
extend BJSONDiffPatch
end
=end
def json_to_html_node obj
html = "";
if obj['name']=="#text"
return obj['textContent']
end
html+="<"+obj['name']
obj.each do |k,v|
if k!="name"&&k!="textContent"&&k!="childNodes"
html+=" "+k+"="+MultiJson.dump(v)
end
end
if obj.has_key? 'childNodes'
html+=">";
obj['childNodes'].each do |elem|
html+= json_to_html_node(elem)
end
html+="</"+obj['name']+">"
else
html+="/>"
end
return html
end
def json_to_html obj
html = ""
obj.each do |elem|
html += json_to_html_node(elem)
end
return html
end
def node_to_json html
if html.name=="text"
return { name: "#text", textContent: html.to_s}
end
json = {
name: html.name.upcase
}
if defined? html.attributes
html.attributes.each do |name, attr|
json[attr.name]=attr.value
end
end
if html.children.length > 0
json['childNodes']=[]
html.children.each do |child|
json['childNodes'].push( node_to_json(child) )
end
end
return json
end
def html_to_json html
dom = Nokogiri::HTML(html)
json = []
dom.document.children.each do |elem|
json.push node_to_json(elem)
end
return json
end
class Document
include DataMapper::Resource
property :id, Serial
property :name, String
#property :body, Text
property :body, Json, :default=>{}, :lazy=>true
property :created, DateTime
property :last_edit_time, DateTime
property :public, Boolean, :default=>false
property :config, Json, :default=>{}
has n, :assets, :through => Resource
has n, :changes
belongs_to :user
def config_set key, value
n_config = config.dup
n_config[key]=value
self.config= n_config
end
end
class Change
include DataMapper::Resource
property :id, Serial
property :time, DateTime
property :patch, Json
belongs_to :user
belongs_to :document
end
# Assets could be javascript or css
class AssetGroup
include DataMapper::Resource
property :id, Serial
property :name, String
property :description, Text
has n, :assets, :through => Resource
end
class Asset
include DataMapper::Resource
property :id, Serial
property :name, String
property :description, Text
property :url, String
property :type, Discriminator
has n, :documents, :through => Resource
has n, :asset_groups, :through => Resource
end
class Javascript < Asset; end
class Stylesheet < Asset; end
class User
include DataMapper::Resource
property :email, String, :key=>true
property :password, BCryptHash
property :group, String, :default=>'user'
property :anonymous, Boolean, :default=> false
has n, :documents
has n, :changes
property :config, Json, :default=>{}
def config_set key, value
n_config = config.dup
n_config[key]=value
self.config= n_config
end
end
class AnonymousUser < User; end
DataMapper.finalize
DataMapper.auto_upgrade!
class WebSync < Sinatra::Base
register Sinatra::Synchrony
use Rack::Logger
helpers do
def logger
request.logger
end
def current_user
if logged_in?
return User.get(session['user'])
end
nil
end
def admin_required
if not admin?
redirect "/"
end
end
def admin?
c_user = current_user
not c_user.nil? and c_user.group=="admin"
end
def logged_in?
(!session['userhash'].nil?)&&$redis.get('userhash:'+session['userhash'])==session['user']
end
def login_required
if !logged_in?
redirect "/login?#{env["REQUEST_PATH"]}"
end
end
def register email, pass
email.downcase!
if User.get(email).nil?
user = User.create({:email=>email,:password=>pass})
authenticate email, pass
return user
elsif authenticate email, pass
return current_user
end
nil
end
def authenticate email, pass, expire=nil
email.downcase!
user = User.get(email)
if user.nil?
return false
end
if user.password==pass
session_key = SecureRandom.uuid
$redis.set("userhash:#{session_key}",email)
session['userhash']=session_key
session['user']=email
if !expire.nil?
$redis.expire("userhash:#{session_key}",expire)
end
return true
end
false
end
def logout
$redis.del "userhash:#{session['userhash']}"
session['userhash']=nil
session['user']=nil
end
def render_login_button
if logged_in?
return '<a href="/logout" title="Sign Out"><i class="icon-signout icon-large"></i><span class="hidden-phone"> Sign Out</span></a>'
else
return '<a href="/login" title="Sign In"><i class="icon-signin icon-large"></i><span class="hidden-phone"> Sign In</span></a>'
end
end
end
configure :development do
Bundler.require(:development)
set :assets_debug, true
use PryRescue::Rack
end
configure :production do
Bundler.require(:production)
set :assets_css_compressor, :sass
set :assets_js_compressor, :closure
set :assets_precompile, %w(*.css *.scss bundle-norm.js bundle-edit.js *.png *.favico *.jpg *.svg *.eot *.ttf *.woff)
set :assets_precompile_no_digest, %w(*.js)
end
configure do
use Rack::Session::Cookie, :expire_after => 60*60*24*7, :secret => $config['session_secret']
enable :sessions
set :session_secret, $config['session_secret']
set :server, 'thin'
set :sockets, []
set :template_engine, :erb
register Sinatra::AssetPipeline
sprockets.append_path File.join(root, 'assets', 'stylesheets')
sprockets.append_path File.join(root, 'assets', 'javascripts')
sprockets.append_path File.join(root, 'assets', 'images')
end
$dmp = DiffMatchPatch.new
#Javascript.first_or_create(:name=>'Tables',:description=>'Table editing support',:url=>'/assets/tables.js')
#Javascript.first_or_create(:name=>'Chat',:description=>'Talk with other users!',:url=>'/assets/chat.js')
if Asset.count == 0
puts "[DATABASE] Creating default assets."
$config["default_assets"].each do |asset|
a = Javascript.create(name:asset["name"],description:asset["description"],url:asset["url"])
puts " :: Creating: #{asset["name"]}, Success: #{a.save}"
end
end
if AssetGroup.count == 0
puts "[DATABASE] Creating default asset groups."
$config["default_asset_groups"].each do |group|
g = AssetGroup.create(name:group["name"],description:group["description"])
group["assets"].each do |asset|
a = Asset.first(name:asset)
if not a.nil?
g.assets << a
end
end
puts " :: Creating: #{g.name}, Success: #{g.save}"
end
end
get '/login' do
if !logged_in?
erb :login
else
redirect '/'
end
end
post '/login' do
redirect_loc = '/'
if params[:redirect]!=''
redirect_loc = params[:redirect]
end
if authenticate params[:email],params[:password]
redirect redirect_loc
else
redirect "/login?#{redirect_loc}"
end
end
get '/register' do
redirect '/login'
end
post '/register' do
if register params[:email],params[:password]
redirect '/'
else
redirect '/login'
end
end
get '/logout' do
if logged_in?
logout
end
redirect '/login'
end
not_found do
erb :not_found
end
#get '/assets/*.css' do
# content_type 'text/css'
# assets_environment[params[:splat][0]+'.css'].to_s
#end
#get '/assets/*.js' do
# content_type 'text/javascript'
# assets_environment[params[:splat][0]+'.js'].to_s
#end
get '/' do
@javascripts = []
if logged_in?
erb :file_list
else
erb :index
end
end
get '/documentation' do
erb :documentation
end
get '/admin' do
admin_required
erb :admin
end
get '/admin/assets' do
admin_required
erb :admin_assets
end
get '/admin/assets/:asset/edit' do
admin_required
erb :admin_assets_edit
end
get '/admin/assets/:asset/delete' do
admin_required
ass = Asset.get(params[:asset])
if not ass.nil?
ass.destroy
end
redirect '/admin/assets'
end
post '/admin/assets/:asset/edit' do
admin_required
ass = Asset.get(params[:asset])
if not ass.nil?
ass.name = params[:name]
ass.description = params[:desc]
ass.url = params[:url]
ass.type = params[:type]
ass.save
else
n_ass = Asset.create(:name=>params[:name],:description=>params[:desc],:url=>params[:url], :type=>params[:type])
n_ass.save
end
redirect '/admin/assets'
end
get '/admin/asset_groups/:asset/edit' do
admin_required
erb :admin_asset_groups_edit
end
get '/admin/asset_groups/:asset_group/:asset/add' do
ass = AssetGroup.get(params[:asset_group])
ass.assets << Asset.get(params[:asset])
ass.save
redirect "/admin/asset_groups/#{params[:asset_group]}/edit"
end
get '/admin/asset_groups/:asset_group/:asset/remove' do
ass = AssetGroup.get(params[:asset_group])
ass.assets.each do |a|
if a.id==params[:asset].to_i
ass.assets.delete a
end
end
ass.save
redirect "/admin/asset_groups/#{params[:asset_group]}/edit"
end
get '/admin/asset_groups/:asset/delete' do
admin_required
ass = AssetGroup.get(params[:asset])
if not ass.nil?
ass.assets = []
ass.save
ass.destroy
end
redirect '/admin/assets'
end
post '/admin/asset_groups/:asset/edit' do
admin_required
ass = AssetGroup.get(params[:asset])
if not ass.nil?
ass.name = params[:name]
ass.description = params[:desc]
ass.save
else
n_ass = AssetGroup.create(:name=>params[:name],:description=>params[:desc])
n_ass.save
end
redirect '/admin/assets'
end
get '/new/:group' do
login_required
group = AssetGroup.get(params[:group])
doc = Document.create(
:name => "Unnamed #{group.name}",
:body => {body:[]},
:created => Time.now,
:last_edit_time => Time.now,
:user => current_user
)
doc.assets = group.assets
doc.save
redirect "/#{doc.id}/edit"
end
get '/upload' do
login_required
erb :upload
end
post '/upload' do
if params[:file]==nil
redirect "/upload"
end
tempfile = params[:file][:tempfile]
filename = params[:file][:filename]
filetype = params[:file][:type]
content = nil
# TODO: Split upload/download into its own external server. Right now Unoconv is blocking. Also issues may arise if multiple copies of LibreOffice are running on the same server. Should probably use a single server instance of LibreOffice
`unoconv -f html #{tempfile.path}`
exit_status = $?.to_i
if exit_status == 0
content = File.read(tempfile.path+".html")
else
if filetype=="application/pdf"
content = PDFToHTMLR::PdfFilePath.new(tempfile.path).convert.force_encoding("UTF-8")
elsif filetype=='text/html'
content = File.read(tempfile.path)
elsif filename.split('.').pop=='docx'
# This pretty much just reads plain text...
content = Docx::Document.open(tempfile.path).to_html.force_encoding("UTF-8")
else
logger.info "Unoconv failed and Unrecognized filetype: #{params[:file][:type]}"
end
end
if content!=nil
# TODO: Upload into JSON format
doc = Document.create(
:name => filename,
:body => {body:html_to_json(content)},
:created => Time.now,
:last_edit_time => Time.now,
:user => current_user
)
doc.assets = AssetGroup.get(1).assets
doc.save
redirect "/#{doc.id}/edit"
else
redirect "/"
end
end
get '/:doc/download/:format' do
if !%w(bib doc docx doc6 doc95 docbook html odt ott ooxml pdb pdf psw rtf latex sdw sdw4 sdw3 stw sxw text txt vor vor4 vor3 xhtml bmp emf eps gif jpg met odd otg pbm pct pgm png ppm ras std svg svm swf sxd sxd3 sxd5 tiff wmf xpm odg odp pot ppt pwp sda sdd sdd3 sdd4 sti stp sxi vor5 csv dbf dif ods pts pxl sdc sdc4 sdc3 slk stc sxc xls xls5 xls95 xlt xlt5).include?(params[:format])
redirect '/'
end
login_required
doc_id = params[:doc]
doc = Document.get doc_id
if (!doc.public)&&doc.user!=current_user
redirect '/'
end
file = Tempfile.new('websync-export')
file.write( json_to_html( doc.body['body'] ) )
file.close
`unoconv -f #{params[:format]} #{file.path}`
if $?.to_i==0
export_file = file.path+"."+params[:format]
response.headers['content_type'] = `file --mime -b export_file`.split(';')[0]
attachment(doc.name+'.'+params[:format])
response.write(File.read(export_file))
else
redirect '/'
end
file.unlink
end
get '/:doc/json' do
login_required
doc_id = params[:doc]
doc = Document.get doc_id
if (!doc.public)&&doc.user!=current_user
redirect '/'
end
content_type 'application/json'
MultiJson.dump(doc.body)
end
get '/:doc/delete' do
login_required
doc_id = params[:doc]
doc = Document.get doc_id
if doc.user==current_user
doc.destroy!
end
redirect '/'
end
get '/:doc/:op' do
doc_id = params[:doc]
doc = Document.get doc_id
if doc.nil?
redirect 'notfound'
end
#if !request.websocket?
login_required
if (!doc.public)&&doc.user!=current_user
redirect '/'
end
@javascripts = [
#'/assets/bundle-edit.js'
]
@doc = doc
if !@doc.nil?
@client_id = $redis.incr("clientid")
@client_key = SecureRandom.uuid
$redis.set "websocket:id:#{@client_id}",current_user.email
$redis.set "websocket:key:#{@client_id}", @client_key
$redis.expire "websocket:id:#{@client_id}", 60*60*24*7
$redis.expire "websocket:key:#{@client_id}", 60*60*24*7
@no_menu = true
@edit = true
erb :edit
else
redirect '/'
end
=begin
# Websocket edit
else
#TODO: Authentication for websockets
redis_sock = EM::Hiredis.connect.pubsub
redis_sock.subscribe("doc:#{doc_id}")
authenticated = false
user = nil
client_id = nil
request.websocket do |ws|
websock = ws
ws.onopen do
warn "websocket open"
end
ws.onmessage do |msg|
data = MultiJson.load(msg.force_encoding("UTF-8"));
puts "JSON: #{msg}"
if data['type']=='auth'
if $redis.get("websocket:key:#{data['id']}") == data['key']
# Extend key expiry time
email = $redis.get "websocket:id:#{data['id']}"
user = User.get(email)
if (!doc.public)&&doc.user!=user
redis_sock.close_connection
ws.close_connection
return
end
authenticated = true
client_id = data['id']
$redis.expire "websocket:id:#{client_id}", 60*60*24*7
$redis.expire "websocket:key:#{client_id}", 60*60*24*7
user_prop = "doc:#{doc_id}:users"
user_raw = $redis.get(user_prop)
if !user_raw.nil?
users = MultiJson.load(user_raw)
else
users = {}
end
user_id = Digest::MD5.hexdigest(email.strip.downcase)
users[client_id]={id:user_id,email:email.strip}
$redis.set user_prop,MultiJson.dump(users)
$redis.publish "doc:#{doc_id}", MultiJson.dump({type:"client_bounce",client:client_id,data:MultiJson.dump({type:"new_user",id:client_id,user:{id:user_id,email:email.strip}})})
ws.send MultiJson.dump({type:'info',user_id:user_id,users:users})
puts "[Websocket Client Authed] ID: #{client_id}, Email: #{email}"
else
ws.close_connection
end
end
if authenticated
# Patch data
if data['type']=='data_patch'&&data.has_key?('patch')
doc = Document.get doc_id
doc.body = JsonDiffPatch.patch(doc.body,data['patch'])
doc.last_edit_time = Time.now
if !doc.save
puts("Save errors: #{doc.errors.inspect}")
end
$redis.publish "doc:#{doc_id}", MultiJson.dump({type:"client_bounce",client:client_id,data:msg})
# Sets the name
elsif data['type']=="name_update"
doc.name = data["name"]
doc.last_edit_time = Time.now
if !doc.save
puts("Save errors: #{doc.errors.inspect}")
end
$redis.publish "doc:#{doc_id}", MultiJson.dump({type:"client_bounce",client:client_id,data:msg})
# Loads scripts
elsif data['type']=="load_scripts"
msg = {type:'scripts', js:[],css:[]}
doc.assets.each do |asset|
arr = :js;
if asset.type=="javascript"
arr = :js
elsif asset.type=="stylesheet"
arr = :css
end
msg[arr].push asset.url
end
ws.send MultiJson.dump msg
elsif data['type']=='connection'
elsif data['type']=='config'
if data['action']=='set'
if data['property']=='public'
doc.public = data['value']
doc.save
else
if data['space']=='user'
user.config_set data['property'],data['value']
user.save
elsif data['space']=='document'
doc.config_set data['property'],data['value']
doc.save
end
end
elsif data['action']=='get'
if data['property']=='public'
ws.send MultiJson.dump({type: 'config',action: 'get', property:'public', value: doc.public})
else
if data['space']=='user'
ws.send MultiJson.dump({type: 'config', action: data['action'], space: data['space'], property: data['property'], value: user.config[data['property']],id:data['id']})
elsif data['space']=='document'
ws.send MultiJson.dump({type: 'config', action: data['action'], space: data['space'], property: data['property'], value: doc.config[data['property']],id:data['id']})
end
end
end
elsif data['type']=='client_event'
$redis.publish "doc:#{doc_id}", MultiJson.dump({type:"client_bounce",client:client_id,data:MultiJson.dump({type:"client_event",event:data['event'],from:client_id,data:data['data']})})
end
end
end
ws.onclose do
warn("websocket closed")
redis_sock.close_connection
if authenticated
user_prop = "doc:#{doc_id}:users"
users = MultiJson.load($redis.get(user_prop))
users.delete client_id
$redis.set user_prop,MultiJson.dump(users)
$redis.publish "doc:#{doc_id}", MultiJson.dump({type:"client_bounce",client:client_id,data:MultiJson.dump({type:"exit_user",id:client_id})})
end
ws.close_connection
end
redis_sock.on(:message) do |channel, message|
#puts "[#{client_id}]#{channel}: #{message}"
data = MultiJson.load(message)
if data['client']!=client_id
if data['type']=="client_bounce"
ws.send data['data']
end
end
end
end
end
=end
end
=begin
# This might be completely useless since it seems like you only have to structure for diff.
# Create a diff after replacing all HTML tags with unicode characters.
def diff_htmlToChars_ text1, text2
lineArray = [] # e.g. lineArray[4] == 'Hello\n'
lineHash = {} # e.g. lineHash['Hello\n'] == 4
# '\x00' is a valid character, but various debuggers don't like it.
# So we'll insert a junk entry to avoid generating a null character.
lineArray[0] = ''
#/**
#* Split a text into an array of strings. Reduce the texts to a string of
#* hashes where each Unicode character represents one line.
#* Modifies linearray and linehash through being a closure.
#* @param {string} text String to encode.
#* @return {string} Encoded string.
#* @private
#*/
def diff_linesToCharsMunge_ text, lineArray, lineHash
chars = ""+text
#// Walk the text, pulling out a substring for each line.
#// text.split('\n') would would temporarily double our memory footprint.
#// Modifying text would create many large strings to garbage collect.
lineStart = 0
lineEnd = 0
#// Keeping our own length variable is faster than looking it up.
lineArrayLength = lineArray.length;
while lineEnd <(text.length - 1)
prevLineEnd = lineEnd
if prevLineEnd==nil
prevLineEnd=0
end
lineStart = text.index('<',lineEnd)
if lineStart.nil?
lineEnd=nil
break
else
lineEnd = text.index('>', lineStart)
end
if lineEnd.nil?
lineEnd = text.length - 1
end
line = text[lineStart..lineEnd]
lineStart = lineEnd + 1
if lineHash.has_key? line
chars.gsub!(line,[lineHash[line]].pack("U"))
else
chars.gsub!(line,[lineArrayLength].pack("U"))
lineHash[line] = lineArrayLength
lineArray[lineArrayLength] = line
lineArrayLength +=1
end
end
return chars;
end
chars1 = diff_linesToCharsMunge_(text1, lineArray,lineHash)
chars2 = diff_linesToCharsMunge_(text2,lineArray,lineHash)
return {chars1: chars1, chars2: chars2, lineArray: lineArray}
end
def diff_charsToHTML_ diffs, lineArray
(0..(diffs.length-1)).each do |x|
chars = diffs[x][1];
text = ""+chars
(0..(lineArray-1)).each do |y|
text.gsub!([y].pack("U"),lineArray[y])
end
diffs[x][1] = text;
end
end
=end
end
|
5e907e05-2d48-11e5-9b65-7831c1c36510
5e967e75-2d48-11e5-965b-7831c1c36510
5e967e75-2d48-11e5-965b-7831c1c36510 |
class Main
def start
puts("Welcome to Simple Tasks Project")
end
def clear_screen
system "clear" or system "cls"
end
def read_from_file
filePath = "Samples/realData.txt"
fileReader = FileReader.new(filePath)
fileReader.read_file
puts("Printing file content".colorize(:green))
#p(fileReader.lines) # lines are in the array
fileReader.lines.each do |line|
p(line)
end
@fileLines = fileReader.lines
end
def convert_fileLines_to_objects
# TODO: assumption that input file has only one week - this will be changed later
@weekConverter = WeekConverter.new(@fileLines)
@weekConverter.convert_lines_to_week
puts("Printing week for test purposes...")
@weekConverter.print_week
end
def end
puts("Program execution finished".colorize(:green))
end
end
Allows to run application from any path
class Main
def start
puts("Welcome to Simple Tasks Project")
end
def clear_screen
system "clear" or system "cls"
end
def read_from_file
file_relative_path = "/Samples/realData.txt"
path = File.join(File.dirname(__FILE__), file_relative_path) # Directory of current file is stored in __FILE__
fileReader = FileReader.new(path)
fileReader.read_file
puts("Printing file content".colorize(:green))
#p(fileReader.lines) # lines are in the array
fileReader.lines.each do |line|
p(line)
end
@fileLines = fileReader.lines
end
def convert_fileLines_to_objects
# TODO: assumption that input file has only one week - this will be changed later
@weekConverter = WeekConverter.new(@fileLines)
@weekConverter.convert_lines_to_week
puts("Printing week for test purposes...")
@weekConverter.print_week
end
def end
puts("Program execution finished".colorize(:green))
end
end
|
73e3d617-2d48-11e5-b7fe-7831c1c36510
73e9de8a-2d48-11e5-b0bb-7831c1c36510
73e9de8a-2d48-11e5-b0bb-7831c1c36510 |
58392d3a-2d48-11e5-b209-7831c1c36510
583ee2de-2d48-11e5-9690-7831c1c36510
583ee2de-2d48-11e5-9690-7831c1c36510 |
#!/usr/bin/env ruby
require "sinatra"
require_relative "utils"
require_relative "printing_report"
require "yaml"
require "sequel"
require "pathname"
$config = YAML::load_file("./config.yaml")
DB = Sequel.sqlite $config["database"]
$cachedJobs = []
def updateJobs
newJobs = Utils.getJobs $config["printer_name"]
$cachedJobs.keep_if {|job| newJobs.include? job }
$cachedJobs = $cachedJobs + ($newJobs - $cachedJobs)
end
use Rack::Auth::Basic do |username, password|
[username, password] == [$config["username"], $config["password"]]
end
get '/api/list' do
updateJobs
$cachedJobs.to_json
end
get '/api/resume/all' do
updateJobs
$cachedJobs.each { |job|
job.resume
PrintingReport.logPrintJob DB, job, (job.pageCount * $config["price_per_page"] + $config["price_per_print"]) if $config["log_printing"]
}
end
get '/api/resume/:jobid' do |jobid|
updateJobs
job = $cachedJobs.find { |x| x.id == jobid.to_i }
return "0" if job.nil?
job.resume
# Deviamos guardar no BD agora, no resume do job, em outro momento? Só Deus sabe.
PrintingReport.logPrintJob DB, job, (job.pageCount * $config["price_per_page"] + $config["price_per_print"]) if $config["log_printing"]
return "1"
end
get '/api/cancel/all' do
updateJobs
$cachedJobs.each { |job| job.cancel }
end
get '/api/cancel/:jobid' do |jobid|
updateJobs
job = $cachedJobs.find { |x| x.id == jobid.to_i }
return "0" if job.nil?
job.cancel
return "1"
end
get '/api/price/page' do
"#{$config["price_per_page"]}"
end
get '/api/price/print' do
"#{$config["price_per_print"]}"
end
get '/api/logs/all' do
PrintingReport.listPrintLogs(DB).to_json
end
get '/api/logs/today' do
PrintingReport.listPrintLogsOnDay(DB, Date.today).to_json
end
get '/api/logs/day/:year/:month/:day' do |year, month, day|
year = year.to_i
month = month.to_i
day = day.to_i
PrintingReport.listPrintLogsOnDay(DB, Date.new(year, month, day)).to_json
end
get '/api/logs/daysRange/:startYear/:startMonth/:startDay/:endYear/:endMonth/:endDay' do |startYear, startMonth, startDay, endYear, endMonth, endDay|
# Isso é horrivel.
startYear, startMonth, startDay, endYear, endMonth, endDay = [startYear, startMonth, startDay, endYear, endMonth, endDay].map { |i| i.to_i }
startDate = Date.new startYear, startMonth, startDay
endDate = Date.new endYear, endMonth, endDay
# É preciso adicionar um dia para listar os trabalhos que aconteceram naquele dia, até 23:59:59.
PrintingReport.listPrintLogsBetween(DB, startDate, endDate + 1).to_json
end
get '/api/images/getRandomBackground' do
picture = (Dir.glob(File.join settings.public_folder, 'images/*.png') + Dir.glob(File.join settings.public_folder, 'images/*.jpg')).shuffle.sample
Pathname.new(picture).relative_path_from(Pathname.new settings.public_folder).to_s
end
get '/' do
send_file File.join(settings.public_folder, 'index.html')
end
get '/logs' do
send_file File.join(settings.public_folder, 'logs.html')
end
top_retardv2.exe
#!/usr/bin/env ruby
require "sinatra"
require_relative "utils"
require_relative "printing_report"
require "yaml"
require "sequel"
require "pathname"
$config = YAML::load_file("./config.yaml")
DB = Sequel.sqlite $config["database"]
$cachedJobs = []
def updateJobs
newJobs = Utils.getJobs $config["printer_name"]
$cachedJobs.keep_if {|job| newJobs.include? job }
$cachedJobs = $cachedJobs + (newJobs - $cachedJobs)
end
use Rack::Auth::Basic do |username, password|
[username, password] == [$config["username"], $config["password"]]
end
get '/api/list' do
updateJobs
$cachedJobs.to_json
end
get '/api/resume/all' do
updateJobs
$cachedJobs.each { |job|
job.resume
PrintingReport.logPrintJob DB, job, (job.pageCount * $config["price_per_page"] + $config["price_per_print"]) if $config["log_printing"]
}
end
get '/api/resume/:jobid' do |jobid|
updateJobs
job = $cachedJobs.find { |x| x.id == jobid.to_i }
return "0" if job.nil?
job.resume
# Deviamos guardar no BD agora, no resume do job, em outro momento? Só Deus sabe.
PrintingReport.logPrintJob DB, job, (job.pageCount * $config["price_per_page"] + $config["price_per_print"]) if $config["log_printing"]
return "1"
end
get '/api/cancel/all' do
updateJobs
$cachedJobs.each { |job| job.cancel }
end
get '/api/cancel/:jobid' do |jobid|
updateJobs
job = $cachedJobs.find { |x| x.id == jobid.to_i }
return "0" if job.nil?
job.cancel
return "1"
end
get '/api/price/page' do
"#{$config["price_per_page"]}"
end
get '/api/price/print' do
"#{$config["price_per_print"]}"
end
get '/api/logs/all' do
PrintingReport.listPrintLogs(DB).to_json
end
get '/api/logs/today' do
PrintingReport.listPrintLogsOnDay(DB, Date.today).to_json
end
get '/api/logs/day/:year/:month/:day' do |year, month, day|
year = year.to_i
month = month.to_i
day = day.to_i
PrintingReport.listPrintLogsOnDay(DB, Date.new(year, month, day)).to_json
end
get '/api/logs/daysRange/:startYear/:startMonth/:startDay/:endYear/:endMonth/:endDay' do |startYear, startMonth, startDay, endYear, endMonth, endDay|
# Isso é horrivel.
startYear, startMonth, startDay, endYear, endMonth, endDay = [startYear, startMonth, startDay, endYear, endMonth, endDay].map { |i| i.to_i }
startDate = Date.new startYear, startMonth, startDay
endDate = Date.new endYear, endMonth, endDay
# É preciso adicionar um dia para listar os trabalhos que aconteceram naquele dia, até 23:59:59.
PrintingReport.listPrintLogsBetween(DB, startDate, endDate + 1).to_json
end
get '/api/images/getRandomBackground' do
picture = (Dir.glob(File.join settings.public_folder, 'images/*.png') + Dir.glob(File.join settings.public_folder, 'images/*.jpg')).shuffle.sample
Pathname.new(picture).relative_path_from(Pathname.new settings.public_folder).to_s
end
get '/' do
send_file File.join(settings.public_folder, 'index.html')
end
get '/logs' do
send_file File.join(settings.public_folder, 'logs.html')
end
|
class Mesa < Formula
desc "Cross-driver middleware"
homepage "https://dri.freedesktop.org"
url "https://mesa.freedesktop.org/archive/17.0.0/mesa-17.0.0.tar.xz"
sha256 "39db3d59700159add7f977307d12a7dfe016363e760ad82280ac4168ea668481"
revision 1
bottle do
sha256 "c4261b5848761366779d18b7ad50c86798ed1513249da942a7072623823822e3" => :x86_64_linux
end
option "without-test", "Skip compile-time tests"
option "with-static", "Build static libraries (not recommended)"
depends_on "pkg-config" => :build
depends_on :python => :build
depends_on "flex" => :build
depends_on "bison" => :build
depends_on "libtool" => :build
depends_on :x11
depends_on "linuxbrew/xorg/libdrm"
depends_on "systemd" # provides libudev <= needed by "gbm"
depends_on "linuxbrew/xorg/libsha1"
depends_on "llvm"
depends_on "libelf" # radeonsi requires libelf when using llvm
depends_on "linuxbrew/xorg/libomxil-bellagio"
depends_on "linuxbrew/xorg/wayland" => :recommended
depends_on "valgrind" => :recommended
depends_on "linuxbrew/xorg/libglvnd" => :optional
depends_on "linuxbrew/xorg/libva" => :recommended
depends_on "linuxbrew/xorg/libvdpau"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "linuxbrew/xorg/libpthread-stubs" => :build
#
# There is a circular dependency between Mesa and libva:
# libva should be installed:
# 1. before Mesa with "disable-egl" and "disable-egl" options [libva formula]
# 2. after Mesa without the above two options [this formula]
#
resource "mako" do
url "https://files.pythonhosted.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz"
sha256 "48559ebd872a8e77f92005884b3d88ffae552812cdf17db6768e5c3be5ebbe0d"
end
resource "libva" do
url "https://www.freedesktop.org/software/vaapi/releases/libva/libva-1.7.3.tar.bz2"
sha256 "22bc139498065a7950d966dbdb000cad04905cbd3dc8f3541f80d36c4670b9d9"
end
patch :p1 do
url "https://gist.githubusercontent.com/rwhogg/088a3e771be0f0556d2286c034544d18/raw/efd587120964745a61a2571a431ffc38341dc37c/mesa-patch-from-linux-from-scratch.patch"
sha256 "53492ca476e3df2de210f749983e17de4bec026a904db826acbcbd1ef83e71cd"
end
def install
# Reduce memory usage below 4 GB for Circle CI.
ENV["MAKEFLAGS"] = "-j8" if ENV["CIRCLECI"]
# inreplace "configure.ac", "$SED -i -e 's/brw_blorp.cpp/brw_blorp.c/'", "# $SED -i -e 's/brw_blorp.cpp/brw_blorp.c/'"
ENV.prepend_create_path "PYTHONPATH", libexec/"vendor/lib/python2.7/site-packages"
resource("mako").stage do
system "python", *Language::Python.setup_install_args(libexec/"vendor")
end
args = %W[
CFLAGS=#{ENV.cflags}
CXXFLAGS=#{ENV.cflags}
--disable-silent-rules
--disable-dependency-tracking
--prefix=#{prefix}
--sysconfdir=#{etc}
--localstatedir=#{var}
--enable-texture-float
--enable-gles1
--enable-gles2
--enable-osmesa
--enable-xa
--enable-gbm
--with-egl-platforms=drm,x11,surfaceless#{build.with?("wayland") ? ",wayland" : ""}
--with-gallium-drivers=i915,nouveau,r300,r600,radeonsi,svga,swrast,swr
--enable-glx-tls
--enable-dri
--enable-dri3
--enable-gallium-tests
--enable-glx
--enable-opengl
--enable-shared-glapi
--enable-va
--enable-vdpau
--enable-xvmc
--disable-llvm-shared-libs
--with-dri-drivers=i965,nouveau,radeon,r200,swrast
--with-sha1=libsha1
--enable-gallium-llvm
--enable-sysfs
]
# enable-opencl => needs libclc
# enable-gallium-osmesa => mutually exclusive with enable-osmesa
args << "--enable-static=#{build.with?("static") ? "yes" : "no"}"
args << "--enable-libglvnd" if build.with? "libglvnd"
inreplace "bin/ltmain.sh", /.*seems to be moved"/, '#\1seems to be moved"'
system "./autogen.sh", *args
system "make"
system "make", "-C", "xdemos", "DEMOS_PREFIX=#{prefix}"
system "make", "check" if build.with?("test")
system "make", "install"
system "make", "-C", "xdemos", "DEMOS_PREFIX=#{prefix}", "install"
if build.with?("libva")
resource("libva").stage do
args = %W[
--prefix=#{Formula["libva"].opt_prefix}
--sysconfdir=#{etc}
--localstatedir=#{var}
--disable-dependency-tracking
--disable-silent-rules
]
# Be explicit about the configure flags
args << "--enable-static=#{build.with?("static") ? "yes" : "no"}"
### Set environment flags:
# $ pkg-config --cflags egl | tr ' ' '\n'
# $ pkg-config --cflags gl | tr ' ' '\n'
ENV["EGL_CFLAGS"] = "-I#{include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libdrm"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libdrm"].opt_include}/libdrm"
ENV.append "EGL_CFLAGS", "-I#{Formula["libxdamage"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["damageproto"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libxfixes"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["fixesproto"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libx11"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libxcb"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libxxf86vm"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libxext"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libxau"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libxdmcp"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["xproto"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["kbproto"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["xextproto"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["xf86vidmodeproto"].opt_include}"
ENV["GLX_CFLAGS"] = ENV["EGL_CFLAGS"]
ENV["EGL_LIBS"] = "-L#{lib} -lEGL"
ENV["GLX_LIBS"] = "-L#{lib} -lGL"
system "autoreconf", "-fi" if build.without?("wayland") # needed only if Wayland is not installed
system "./configure", *args
system "make"
system "make", "install"
end
end
end
test do
output = shell_output("ldd #{lib}/libGL.so").chomp
libs = %w[
libxcb-dri3.so.0
libxcb-present.so.0
libxcb-randr.so.0
libxcb-xfixes.so.0
libxcb-render.so.0
libxcb-shape.so.0
libxcb-sync.so.1
libxshmfence.so.1
libglapi.so.0
libXext.so.6
libXdamage.so.1
libXfixes.so.3
libX11-xcb.so.1
libX11.so.6
libxcb-glx.so.0
libxcb-dri2.so.0
libxcb.so.1
libXxf86vm.so.1
libdrm.so.2
]
libs << "libexpat.so.1" if build.with?("wayland")
libs.each do |lib|
assert_match lib, output
end
end
end
mesa: reduce memory for Circle CI
Signed-off-by: Bob W. Hogg <c772a964fd55352a3510e5d535dd9ccc9ac30168@linux.com>
class Mesa < Formula
desc "Cross-driver middleware"
homepage "https://dri.freedesktop.org"
url "https://mesa.freedesktop.org/archive/17.0.0/mesa-17.0.0.tar.xz"
sha256 "39db3d59700159add7f977307d12a7dfe016363e760ad82280ac4168ea668481"
revision 1
bottle do
sha256 "c4261b5848761366779d18b7ad50c86798ed1513249da942a7072623823822e3" => :x86_64_linux
end
option "without-test", "Skip compile-time tests"
option "with-static", "Build static libraries (not recommended)"
depends_on "pkg-config" => :build
depends_on :python => :build
depends_on "flex" => :build
depends_on "bison" => :build
depends_on "libtool" => :build
depends_on :x11
depends_on "linuxbrew/xorg/libdrm"
depends_on "systemd" # provides libudev <= needed by "gbm"
depends_on "linuxbrew/xorg/libsha1"
depends_on "llvm"
depends_on "libelf" # radeonsi requires libelf when using llvm
depends_on "linuxbrew/xorg/libomxil-bellagio"
depends_on "linuxbrew/xorg/wayland" => :recommended
depends_on "valgrind" => :recommended
depends_on "linuxbrew/xorg/libglvnd" => :optional
depends_on "linuxbrew/xorg/libva" => :recommended
depends_on "linuxbrew/xorg/libvdpau"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "linuxbrew/xorg/libpthread-stubs" => :build
#
# There is a circular dependency between Mesa and libva:
# libva should be installed:
# 1. before Mesa with "disable-egl" and "disable-egl" options [libva formula]
# 2. after Mesa without the above two options [this formula]
#
resource "mako" do
url "https://files.pythonhosted.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz"
sha256 "48559ebd872a8e77f92005884b3d88ffae552812cdf17db6768e5c3be5ebbe0d"
end
resource "libva" do
url "https://www.freedesktop.org/software/vaapi/releases/libva/libva-1.7.3.tar.bz2"
sha256 "22bc139498065a7950d966dbdb000cad04905cbd3dc8f3541f80d36c4670b9d9"
end
patch :p1 do
url "https://gist.githubusercontent.com/rwhogg/088a3e771be0f0556d2286c034544d18/raw/efd587120964745a61a2571a431ffc38341dc37c/mesa-patch-from-linux-from-scratch.patch"
sha256 "53492ca476e3df2de210f749983e17de4bec026a904db826acbcbd1ef83e71cd"
end
def install
# Reduce memory usage below 4 GB for Circle CI.
ENV["MAKEFLAGS"] = "-j2" if ENV["CIRCLECI"]
# inreplace "configure.ac", "$SED -i -e 's/brw_blorp.cpp/brw_blorp.c/'", "# $SED -i -e 's/brw_blorp.cpp/brw_blorp.c/'"
ENV.prepend_create_path "PYTHONPATH", libexec/"vendor/lib/python2.7/site-packages"
resource("mako").stage do
system "python", *Language::Python.setup_install_args(libexec/"vendor")
end
args = %W[
CFLAGS=#{ENV.cflags}
CXXFLAGS=#{ENV.cflags}
--disable-silent-rules
--disable-dependency-tracking
--prefix=#{prefix}
--sysconfdir=#{etc}
--localstatedir=#{var}
--enable-texture-float
--enable-gles1
--enable-gles2
--enable-osmesa
--enable-xa
--enable-gbm
--with-egl-platforms=drm,x11,surfaceless#{build.with?("wayland") ? ",wayland" : ""}
--with-gallium-drivers=i915,nouveau,r300,r600,radeonsi,svga,swrast,swr
--enable-glx-tls
--enable-dri
--enable-dri3
--enable-gallium-tests
--enable-glx
--enable-opengl
--enable-shared-glapi
--enable-va
--enable-vdpau
--enable-xvmc
--disable-llvm-shared-libs
--with-dri-drivers=i965,nouveau,radeon,r200,swrast
--with-sha1=libsha1
--enable-gallium-llvm
--enable-sysfs
]
# enable-opencl => needs libclc
# enable-gallium-osmesa => mutually exclusive with enable-osmesa
args << "--enable-static=#{build.with?("static") ? "yes" : "no"}"
args << "--enable-libglvnd" if build.with? "libglvnd"
inreplace "bin/ltmain.sh", /.*seems to be moved"/, '#\1seems to be moved"'
system "./autogen.sh", *args
system "make"
system "make", "-C", "xdemos", "DEMOS_PREFIX=#{prefix}"
system "make", "check" if build.with?("test")
system "make", "install"
system "make", "-C", "xdemos", "DEMOS_PREFIX=#{prefix}", "install"
if build.with?("libva")
resource("libva").stage do
args = %W[
--prefix=#{Formula["libva"].opt_prefix}
--sysconfdir=#{etc}
--localstatedir=#{var}
--disable-dependency-tracking
--disable-silent-rules
]
# Be explicit about the configure flags
args << "--enable-static=#{build.with?("static") ? "yes" : "no"}"
### Set environment flags:
# $ pkg-config --cflags egl | tr ' ' '\n'
# $ pkg-config --cflags gl | tr ' ' '\n'
ENV["EGL_CFLAGS"] = "-I#{include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libdrm"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libdrm"].opt_include}/libdrm"
ENV.append "EGL_CFLAGS", "-I#{Formula["libxdamage"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["damageproto"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libxfixes"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["fixesproto"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libx11"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libxcb"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libxxf86vm"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libxext"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libxau"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["libxdmcp"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["xproto"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["kbproto"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["xextproto"].opt_include}"
ENV.append "EGL_CFLAGS", "-I#{Formula["xf86vidmodeproto"].opt_include}"
ENV["GLX_CFLAGS"] = ENV["EGL_CFLAGS"]
ENV["EGL_LIBS"] = "-L#{lib} -lEGL"
ENV["GLX_LIBS"] = "-L#{lib} -lGL"
system "autoreconf", "-fi" if build.without?("wayland") # needed only if Wayland is not installed
system "./configure", *args
system "make"
system "make", "install"
end
end
end
test do
output = shell_output("ldd #{lib}/libGL.so").chomp
libs = %w[
libxcb-dri3.so.0
libxcb-present.so.0
libxcb-randr.so.0
libxcb-xfixes.so.0
libxcb-render.so.0
libxcb-shape.so.0
libxcb-sync.so.1
libxshmfence.so.1
libglapi.so.0
libXext.so.6
libXdamage.so.1
libXfixes.so.3
libX11-xcb.so.1
libX11.so.6
libxcb-glx.so.0
libxcb-dri2.so.0
libxcb.so.1
libXxf86vm.so.1
libdrm.so.2
]
libs << "libexpat.so.1" if build.with?("wayland")
libs.each do |lib|
assert_match lib, output
end
end
end
|
require 'formula'
# 2.2.20 does not build on OS X. See:
# https://github.com/BIC-MNI/minc/pull/16
# https://github.com/mxcl/homebrew/issues/22152
class Minc < Formula
homepage 'http://en.wikibooks.org/wiki/MINC'
url 'https://github.com/BIC-MNI/minc/archive/minc-2-1-13.tar.gz'
version '2.1.13'
sha1 '62eeeab62bb5c977e11166d4e43ba384fd029fd1'
head 'https://github.com/BIC-MNI/minc.git'
depends_on :autoconf
depends_on :automake
depends_on :libtool
depends_on 'netcdf'
depends_on 'hdf5'
fails_with :clang do
# TODO This is an easy fix, someone send it upstream!
build 425
cause "Throws 'non-void function 'miget_real_value_hyperslab' should return a value'"
end
def install
system "./autogen.sh"
system "./configure", "--prefix=#{prefix}", "--disable-dependency-tracking"
system "make install"
end
end
minc: bump fails_with clang
Fixes #1095.
require 'formula'
# 2.2.20 does not build on OS X. See:
# https://github.com/BIC-MNI/minc/pull/16
# https://github.com/mxcl/homebrew/issues/22152
class Minc < Formula
homepage 'http://en.wikibooks.org/wiki/MINC'
url 'https://github.com/BIC-MNI/minc/archive/minc-2-1-13.tar.gz'
version '2.1.13'
sha1 '62eeeab62bb5c977e11166d4e43ba384fd029fd1'
head 'https://github.com/BIC-MNI/minc.git'
depends_on :autoconf
depends_on :automake
depends_on :libtool
depends_on 'netcdf'
depends_on 'hdf5'
fails_with :clang do
# TODO This is an easy fix, someone send it upstream!
build 503
cause "Throws 'non-void function 'miget_real_value_hyperslab' should return a value'"
end
def install
system "./autogen.sh"
system "./configure", "--prefix=#{prefix}", "--disable-dependency-tracking"
system "make install"
end
end
|
# coding: utf-8
require_relative 'point2d'
require_relative 'direction'
class Mode
# List of operators which should not be ignored while in string mode.
STRING_CMDS = "\"'\\/_|"
def initialize(state)
@state = state
end
def do_tick
cmd = @state.cell
if @state.string_mode
if cmd >= 0 && cmd <= 1114111 && STRING_CMDS[cmd.chr]
case cmd.chr
when '"'
@state.string_mode = false
process_string
@state.current_string = []
when "'"
move
@state.current_string << @state.cell
else
process(self.class::OPERATORS[cmd.chr], cmd)
end
else
@state.current_string << cmd
end
else
opcode = :nop
opcode = self.class::OPERATORS[cmd.chr] if cmd >= 0 && cmd <= 1114111 # maximum Unicode code point
process(opcode, cmd)
end
move
@state.tick += 1
end
def process
raise NotImplementedError
end
def process_string
raise NotImplementedError
end
def move
raise NotImplementedError
end
def push val
@state.push val
end
def pop
raise NotImplementedError
end
def shift
raise NotImplementedError
end
def unshift val
@state.unshift val
end
def peek
raise NotImplementedError
end
end
class Cardinal < Mode
OPERATORS = {
' ' => :nop,
'@' => :terminate,
'/' => :mirror,
'\\' => :mirror,
'_' => :wall,
'|' => :wall,
'<' => :move_west,
'>' => :move_east,
'^' => :move_north,
'v' => :move_south,
'{' => :turn_left,
'}' => :turn_right,
'0' => :digit, '1' => :digit, '2' => :digit, '3' => :digit, '4' => :digit, '5' => :digit, '6' => :digit, '7' => :digit, '8' => :digit, '9' => :digit,
'+' => :add,
'-' => :sub,
'*' => :mul,
':' => :div,
'%' => :mod,
'[' => :mp_left,
']' => :mp_right,
'"' => :string_mode,
"'" => :escape,
'i' => :input,
'o' => :output,
#'(' => ,
#')' => ,
#'!' => ,
#'#' => ,
#'$' => ,
#'&' => ,
#',' => ,
#'.' => ,
#';' => ,
#'=' => ,
#'?' => ,
#'~' => ,
#'`' => ,
#'A' => ,
# ...
#'Z' => ,
#'a' => ,
# ...
#'z' => ,
}
OPERATORS.default = :nop
def move
@state.ip += @state.dir.vec
@state.wrap
end
def pop
val = nil
loop do
val = @state.pop
if val.is_a?(String)
found = false
val.scan(/-?\d+/) { push $&.to_i; found = true }
next if !found
val = @state.pop
end
break
end
val || 0
end
def process_string
@state.stack += @state.current_string
end
def process opcode, cmd
case opcode
when :terminate
@state.done = true
when :mirror
@state.dir = @state.dir.reflect cmd.chr
@state.set_ordinal
when :wall
@state.dir = @state.dir.reflect cmd.chr
when :move_east
@state.dir = East.new
when :move_west
@state.dir = West.new
when :move_south
@state.dir = South.new
when :move_north
@state.dir = North.new
when :turn_left
@state.dir = @state.dir.left
when :turn_right
@state.dir = @state.dir.right
when :mp_left
@state.mp -= 1
when :mp_right
@state.mp += 1
when :string_mode
@state.string_mode = true
when :escape
move
push @state.cell
when :input
char = @state.in_str.getc
push(char ? char.ord : -1)
when :output
@state.out_str << pop.chr
when :digit
push cmd.chr.to_i
when :add
push(pop + pop)
when :sub
y = pop
push(pop - y)
when :mul
push(pop * pop)
when :div
y = pop
push(pop / y)
when :mod
y = pop
push(pop % y)
end
end
end
class Ordinal < Mode
OPERATORS = {
' ' => :nop,
'/' => :mirror,
'\\' => :mirror,
'_' => :wall,
'|' => :wall,
'@' => :terminate,
'0' => :digit, '1' => :digit, '2' => :digit, '3' => :digit, '4' => :digit, '5' => :digit, '6' => :digit, '7' => :digit, '8' => :digit, '9' => :digit,
'+' => :concat,
'-' => :sub,
'*' => :riffle,
':' => :split,
'%' => :mod,
'<' => :ensure_west,
'>' => :ensure_east,
'^' => :ensure_north,
'v' => :ensure_south,
'{' => :strafe_left,
'}' => :strafe_right,
'"' => :string_mode,
"'" => :escape,
'i' => :input,
'o' => :output,
#'(' => ,
#')' => ,
#'[' => ,
#']' => ,
#'!' => ,
#'#' => ,
#'$' => ,
#'&' => ,
#',' => ,
#'.' => ,
#';' => ,
#'=' => ,
#'?' => ,
#'`' => ,
#'~' => ,
#'A' => ,
# ...
#'Z' => ,
#'a' => ,
# ...
#'z' => ,
}
OPERATORS.default = :nop
def move
if @state.width == 1 || @state.height == 1
return
end
new_pos = @state.ip + @state.dir.vec + @state.storage_offset
@state.dir = @state.dir.reflect('|') if new_pos.x < 0 || new_pos.x >= @state.width
@state.dir = @state.dir.reflect('_') if new_pos.y < 0 || new_pos.y >= @state.height
@state.ip += @state.dir.vec
end
def pop
val = @state.pop
val ? val.to_s : ''
end
def process_string
# Will throw an error when cell isn't a valid code point
push @state.current_string.map(&:chr)*''
end
def process opcode, cmd
case opcode
when :terminate
@state.done = true
when :mirror
@state.dir = @state.dir.reflect cmd.chr
@state.set_cardinal
when :wall
@state.dir = @state.dir.reflect cmd.chr
when :ensure_west
@state.dir = @state.dir.reflect cmd.chr if @state.dir.x > 0
when :ensure_east
@state.dir = @state.dir.reflect cmd.chr if @state.dir.x < 0
when :ensure_north
@state.dir = @state.dir.reflect cmd.chr if @state.dir.y > 0
when :ensure_south
@state.dir = @state.dir.reflect cmd.chr if @state.dir.y < 0
when :strafe_left
@state.ip += (@state.dir.reverse + @state.dir.left) / 2
when :strafe_right
@state.ip += (@state.dir.reverse + @state.dir.right) / 2
when :string_mode
@state.string_mode = true
when :escape
move
push @state.cell.chr # Will throw an error when cell isn't a valid code point
when :digit
push(pop + cmd.chr)
when :input
line = @state.in_str.gets
push(line ? line.chomp : '')
when :output
@state.out_str << pop
when :concat
push(pop + pop)
when :riffle
sep = pop
push(pop.chars * sep)
when :split
sep = pop
$state.stack += pop.split(sep, -1)
end
end
end
A couple more built-ins
# coding: utf-8
require_relative 'point2d'
require_relative 'direction'
class Mode
# List of operators which should not be ignored while in string mode.
STRING_CMDS = "\"'\\/_|"
def initialize(state)
@state = state
end
def do_tick
cmd = @state.cell
if @state.string_mode
if cmd >= 0 && cmd <= 1114111 && STRING_CMDS[cmd.chr]
case cmd.chr
when '"'
@state.string_mode = false
process_string
@state.current_string = []
when "'"
move
@state.current_string << @state.cell
else
process(self.class::OPERATORS[cmd.chr], cmd)
end
else
@state.current_string << cmd
end
else
opcode = :nop
opcode = self.class::OPERATORS[cmd.chr] if cmd >= 0 && cmd <= 1114111 # maximum Unicode code point
process(opcode, cmd)
end
move
@state.tick += 1
end
def process
raise NotImplementedError
end
def process_string
raise NotImplementedError
end
def move
raise NotImplementedError
end
def push val
@state.push val
end
def pop
raise NotImplementedError
end
def peek
val = pop
push val
val
end
def shift
raise NotImplementedError
end
def unshift val
@state.unshift val
end
def peek
raise NotImplementedError
end
end
class Cardinal < Mode
OPERATORS = {
' ' => :nop,
'@' => :terminate,
'/' => :mirror,
'\\' => :mirror,
'_' => :wall,
'|' => :wall,
'<' => :move_west,
'>' => :move_east,
'^' => :move_north,
'v' => :move_south,
'{' => :turn_left,
'}' => :turn_right,
'#' => :trampoline,
'?' => :cond_trampoline,
'0' => :digit, '1' => :digit, '2' => :digit, '3' => :digit, '4' => :digit, '5' => :digit, '6' => :digit, '7' => :digit, '8' => :digit, '9' => :digit,
'+' => :add,
'-' => :sub,
'*' => :mul,
':' => :div,
'%' => :mod,
'[' => :mp_left,
']' => :mp_right,
'"' => :string_mode,
"'" => :escape,
'i' => :input,
'o' => :output,
'A' => :bitand,
'N' => :bitnot,
'O' => :bitor,
'X' => :bitxor,
#'(' => ,
#')' => ,
#'!' => ,
#'$' => ,
#'&' => ,
#',' => ,
#'.' => ,
#';' => ,
#'=' => ,
#'~' => ,
#'`' => ,
#'A' => ,
# ...
#'Z' => ,
#'a' => ,
# ...
#'z' => ,
}
OPERATORS.default = :nop
def move
@state.ip += @state.dir.vec
@state.wrap
end
def pop
val = nil
loop do
val = @state.pop
if val.is_a?(String)
found = false
val.scan(/-?\d+/) { push $&.to_i; found = true }
next if !found
val = @state.pop
end
break
end
val || 0
end
def process_string
@state.stack += @state.current_string
end
def process opcode, cmd
case opcode
when :terminate
@state.done = true
when :mirror
@state.dir = @state.dir.reflect cmd.chr
@state.set_ordinal
when :wall
@state.dir = @state.dir.reflect cmd.chr
when :move_east
@state.dir = East.new
when :move_west
@state.dir = West.new
when :move_south
@state.dir = South.new
when :move_north
@state.dir = North.new
when :turn_left
@state.dir = @state.dir.left
when :turn_right
@state.dir = @state.dir.right
when :trampoline
move
when :cond_trampoline
move if pop == 0
when :mp_left
@state.mp -= 1
when :mp_right
@state.mp += 1
when :string_mode
@state.string_mode = true
when :escape
move
push @state.cell
when :input
char = @state.in_str.getc
push(char ? char.ord : -1)
when :output
@state.out_str << pop.chr
when :digit
push cmd.chr.to_i
when :add
push(pop + pop)
when :sub
y = pop
push(pop - y)
when :mul
push(pop * pop)
when :div
y = pop
push(pop / y)
when :mod
y = pop
push(pop % y)
when :bitand
push(pop & pop)
when :bitnot
push(~pop)
when :bitor
push(pop | pop)
when :bitxor
push(pop ^ pop)
end
end
end
class Ordinal < Mode
OPERATORS = {
' ' => :nop,
'/' => :mirror,
'\\' => :mirror,
'_' => :wall,
'|' => :wall,
'@' => :terminate,
'0' => :digit, '1' => :digit, '2' => :digit, '3' => :digit, '4' => :digit, '5' => :digit, '6' => :digit, '7' => :digit, '8' => :digit, '9' => :digit,
'+' => :concat,
'-' => :sub,
'*' => :riffle,
':' => :split,
'%' => :mod,
'<' => :ensure_west,
'>' => :ensure_east,
'^' => :ensure_north,
'v' => :ensure_south,
'{' => :strafe_left,
'}' => :strafe_right,
'#' => :trampoline,
'?' => :cond_trampoline,
'"' => :string_mode,
"'" => :escape,
'i' => :input,
'o' => :output,
#'(' => ,
#')' => ,
#'[' => ,
#']' => ,
#'!' => ,
#'$' => ,
#'&' => ,
#',' => ,
#'.' => ,
#';' => ,
#'=' => ,
#'?' => ,
#'`' => ,
#'~' => ,
#'A' => ,
# ...
#'Z' => ,
#'a' => ,
# ...
#'z' => ,
}
OPERATORS.default = :nop
def move
if @state.width == 1 || @state.height == 1
return
end
new_pos = @state.ip + @state.dir.vec + @state.storage_offset
@state.dir = @state.dir.reflect('|') if new_pos.x < 0 || new_pos.x >= @state.width
@state.dir = @state.dir.reflect('_') if new_pos.y < 0 || new_pos.y >= @state.height
@state.ip += @state.dir.vec
end
def pop
val = @state.pop
val ? val.to_s : ''
end
def process_string
# Will throw an error when cell isn't a valid code point
push @state.current_string.map(&:chr)*''
end
def process opcode, cmd
case opcode
when :terminate
@state.done = true
when :mirror
@state.dir = @state.dir.reflect cmd.chr
@state.set_cardinal
when :wall
@state.dir = @state.dir.reflect cmd.chr
when :ensure_west
@state.dir = @state.dir.reflect cmd.chr if @state.dir.x > 0
when :ensure_east
@state.dir = @state.dir.reflect cmd.chr if @state.dir.x < 0
when :ensure_north
@state.dir = @state.dir.reflect cmd.chr if @state.dir.y > 0
when :ensure_south
@state.dir = @state.dir.reflect cmd.chr if @state.dir.y < 0
when :strafe_left
@state.ip += (@state.dir.reverse + @state.dir.left) / 2
when :strafe_right
@state.ip += (@state.dir.reverse + @state.dir.right) / 2
when :trampoline
move
when :cond_trampoline
move if pop == ''
when :string_mode
@state.string_mode = true
when :escape
move
push @state.cell.chr # Will throw an error when cell isn't a valid code point
when :digit
push(pop + cmd.chr)
when :input
line = @state.in_str.gets
push(line ? line.chomp : '')
when :output
@state.out_str << pop
when :concat
push(pop + pop)
when :riffle
sep = pop
push(pop.chars * sep)
when :split
sep = pop
$state.stack += pop.split(sep, -1)
end
end
end |
# coding: utf-8
require_relative 'point2d'
require_relative 'direction'
require 'prime'
require 'date'
class Mode
# List of operators which should not be ignored while in string mode.
STRING_CMDS = "\"'\\/_|"
def initialize(state)
@state = state
end
def is_char? val
val && (val >= 0 && val <= 0xD7FF || val >= 0xE000 && val <= 0x10FFFF)
end
def process
raise NotImplementedError
end
def process_string
raise NotImplementedError
end
# Returns true when the resulting cell is a command.
def move
raw_move if @state.cell == "'".ord
raw_move
cell = @state.cell
case cell
when '/'.ord, '\\'.ord
@state.dir = @state.dir.reflect cell.chr
@state.toggle_mode
return false
when '_'.ord, '|'.ord
@state.dir = @state.dir.reflect cell.chr
return false
end
return true if @state.string_mode
@state.print_debug_info if cell == '`'.ord
is_char?(cell) && self.class::OPERATORS.has_key?(cell.chr)
end
# Moves the IP a single cell without regard for mirrors, walls or no-ops.
# Does respect grid boundaries.
def raw_move
raise NotImplementedError
end
def push val
@state.push val
end
def pop
raise NotImplementedError
end
def push_return
@state.push_return
end
def pop_return
@state.pop_return
end
def peek_return
@state.peek_return
end
def peek
val = pop
push val
val
end
end
class Cardinal < Mode
OPERATORS = {
'@' => :terminate,
'<' => :move_west,
'>' => :move_east,
'^' => :move_north,
'v' => :move_south,
'{' => :turn_left,
'}' => :turn_right,
'#' => :trampoline,
'$' => :cond_trampoline,
'=' => :cond_sign,
'&' => :repeat_iterator,
'~' => :swap,
'.' => :dup,
';' => :discard,
',' => :rotate_stack,
'0' => :digit, '1' => :digit, '2' => :digit, '3' => :digit, '4' => :digit, '5' => :digit, '6' => :digit, '7' => :digit, '8' => :digit, '9' => :digit,
'+' => :add,
'-' => :sub,
'*' => :mul,
':' => :div,
'%' => :mod,
'!' => :store_tape,
'?' => :load_tape,
'[' => :mp_left,
']' => :mp_right,
'(' => :search_left,
')' => :search_right,
'"' => :leave_string_mode,
"'" => :escape,
'I' => :input,
'O' => :output,
'i' => :raw_input,
'o' => :raw_output,
'A' => :bitand,
'B' => :divisors,
'C' => :binomial,
'D' => :deduplicate,
'E' => :power,
'F' => :divides,
'G' => :gcd,
'H' => :abs,
'J' => :jump_raw,
'K' => :return_raw,
'L' => :lcm,
'M' => :divmod,
'N' => :bitnot,
'P' => :factorial,
'Q' => :convert,
'R' => :negate,
'S' => :replace_divisors,
'T' => :sleep,
'U' => :random,
'V' => :bitor,
'W' => :discard_return,
'X' => :bitxor,
'Y' => :unpack,
'Z' => :pack,
'a' => :const_10,
'b' => :random_swap,
'c' => :prime_factors,
'd' => :stack_depth,
'e' => :const_m1,
'f' => :prime_factor_pairs,
'g' => :get_cell,
'h' => :inc,
'j' => :jump,
'k' => :return,
'l' => :clear_bits,
'm' => :floor,
'n' => :not,
'p' => :put_cell,
'q' => :get_mp,
'r' => :range,
's' => :sortswap,
't' => :dec,
'u' => :set_bits,
'w' => :push_return,
'x' => :extract_bit,
'y' => :bitif,
'z' => :drop_small_factors,
}
OPERATORS.default = :nop
def raw_move
@state.ip += @state.dir.vec
@state.wrap
end
def pop
val = nil
loop do
val = @state.pop
if val.is_a?(String)
found = false
val.scan(/(?:^|(?!\G))-?\d+/) { push $&.to_i; found = true }
next if !found
val = @state.pop
end
break
end
val || 0
end
def process cmd
opcode = OPERATORS[cmd]
case opcode
when :nop
raise "No-op reached process(). This shouldn't happen."
when :terminate
@state.done = true
when :move_east
@state.dir = East.new
when :move_west
@state.dir = West.new
when :move_south
@state.dir = South.new
when :move_north
@state.dir = North.new
when :turn_left
@state.dir = @state.dir.left
when :turn_right
@state.dir = @state.dir.right
when :trampoline
@state.skip_next
when :cond_trampoline
@state.skip_next if pop == 0
when :cond_sign
val = pop
if val < 0
@state.dir = @state.dir.left
elsif val > 0
@state.dir = @state.dir.right
end
when :repeat_iterator
@state.add_iterator pop
when :jump
push_return
y = pop
x = pop
@state.jump(x,y)
when :return
@state.jump(*pop_return)
when :jump_raw
y = pop
x = pop
@state.jump(x,y)
when :return_raw
@state.jump(*peek_return)
when :push_return
push_return
when :discard_return
pop_return
when :get_cell
y = pop
x = pop
push @state.cell(Point2D.new(x,y))
when :put_cell
v = pop
y = pop
x = pop
@state.put_cell(Point2D.new(x,y), v)
when :store_tape
@state.tape[@state.mp] = pop
when :load_tape
push (@state.tape[@state.mp] || -1)
when :mp_left
@state.mp -= 1
when :mp_right
@state.mp += 1
when :search_left
val = pop
(@state.mp-1).downto(@state.tape.keys.min-1).each do |i|
if @state.tape[i] == val
@state.mp = i
break
end
end
when :search_right
val = pop
(@state.mp+1..@state.tape.keys.max+1).each do |i|
if @state.tape[i] == val
@state.mp = i
break
end
end
when :get_mp
push @state.mp
when :leave_string_mode
@state.stack += @state.current_string
when :escape
raw_move
push @state.cell
@state.ip -= @state.dir.vec
when :input
char = @state.in_str.getc
while char && char.scrub('') == ''
char = @state.in_str.getc
end
push(char ? char.ord : -1)
when :output
# Will throw an error when value isn't a valid code point
val = pop
if is_char?(val)
@state.out_str << val.chr
end
when :raw_input
push(@state.in_str.getbyte || -1)
when :raw_output
# TODO: do
when :digit
push cmd.chr.to_i
when :add
push(pop + pop)
when :sub
y = pop
push(pop - y)
when :mul
push(pop * pop)
when :div
y = pop
push(pop / y)
when :mod
y = pop
push(pop % y)
when :divmod
y = pop
x = pop
push(x / y)
push(x % y)
when :inc
push(pop+1)
when :dec
push(pop-1)
when :abs
push(pop.abs)
when :power
y = pop
x = pop
if y < 0
push 1/x**y.abs
else
push x**y
end
when :bitand
push(pop & pop)
when :bitnot
push(~pop)
when :bitor
push(pop | pop)
when :bitxor
push(pop ^ pop)
when :bitif
z = pop
y = pop
x = pop
push(x&y | ~x&z)
when :clear_bits
x = pop
if x > 0
msb = Math.log2(x).floor
elsif x < -1
msb = Math.log2(~x).floor
else
msb = 0
end
push (x & -(2**msb))
when :set_bits
x = pop
if x > 0
msb = Math.log2(x).floor
elsif x < -1
msb = Math.log2(~x).floor
else
msb = 0
end
push (x | (2**msb-1))
when :extract_bit
y = pop
x = pop
if y >= 0
push x[y]
else
if x > 0
msb = Math.log2(x).floor
elsif x < -1
msb = Math.log2(~x).floor
else
msb = 0
end
push x[msb-y+1]
end
when :factorial
val = pop
if val >= 0
push (1..val).reduce(1, :*)
else
push (val..-1).reduce(1, :*)
end
when :binomial
k = pop
n = pop
k = n-k if n > 0 && k > n/2
if k < 0
push 0
else
prod = 1
(1..k).each do |i|
prod *= n
prod /= i
n -= 1
end
push prod
end
when :negate
push -pop
when :prime_factors
n = pop
if n == 0
push 0
else
Prime.prime_division(n).each{ |p,n| n.times{ push p } }
end
when :prime_factor_pairs
n = pop
if n == 0
push 0
push 1
else
Prime.prime_division(n).flatten.each{ |x| push x }
end
when :deduplicate
n = pop
if n == 0
push 0
else
push Prime.int_from_prime_division(Prime.prime_division(n).map{ |p,n| [p,1]})
end
when :divides
y = pop
x = pop
if y != 0 && x % y == 0
push y
else
push 0
end
when :gcd
push (pop.gcd pop)
when :lcm
push (pop.lcm pop)
when :floor
y = pop
x = pop
push (x/y)*y
when :replace_divisors
z = pop
y = pop
x = pop
if x == 0
push 0
elsif y == 1 || y == -1
if z == y
push x
elsif z == 0
push 0
else
loop { next }
end
else
order = 0
while x%y == 0
order += 1
x /= y
end
x *= z**order
end
when :divisors
n = pop
sgn = n <=> 0
n = n.abs
k = 1
small_divs = []
large_divs = []
while k*k <= n
if n%k == 0
small_divs << k
large_divs << n/k if k*k != n
end
k += 1
end
(small_divs + large_divs.reverse).each {|k| push k*sgn}
when :drop_small_factors
k = pop
n = pop
if n != 0
if k > 0
(2..k).each {|i| n /= i while n % i == 0}
else
-2.downto(k) {|i| n /= i while n % i == 0}
end
end
push n
when :pack
y = pop
x = pop
# Map integers to naturals
sgn = x <=> 0
x = x*sgn*2 + [0, sgn].min
sgn = y <=> 0
y = y*sgn*2 + [0, sgn].min
# Map two naturals to one
z = (x+y)*(x+y+1)/2 + y
# Map the natural back to an integer
z = (-1)**z * ((z+1)/2)
push z
when :unpack
z = pop
# Map the integer to a positive natural
sgn = z <=> 0
z = z*sgn*2 + [0, sgn].min
# Map the natural to two
y = z
x = 0
while x < y
x += 1
y -= x
end
x -= y
# Map the naturals back to integers
x = (-1)**x * ((x+1)/2)
y = (-1)**y * ((y+1)/2)
push x
push y
when :not
push (pop == 0 ? 1 : 0)
when :range
val = pop
if val >= 0
0.upto(val) {|i| push i}
else
(-val).downto(0) {|i| push i}
end
when :random
val = pop
if val > 0
push rand val
elsif val == 0
push 0 # TODO: or something else?
else
push -(rand val)
end
when :random_swap
top = pop
second = pop
top, second = [top, second].shuffle
push second
push top
when :sortswap
top = pop
second = pop
top, second = second, top if top < second
push second
push top
when :swap
top = pop
second = pop
push top
push second
when :dup
top = pop
push top
push top
when :discard
pop
when :stack_depth
push @state.stack.size
when :rotate_stack
n = pop
if n > 0
if n >= @state.stack.size
push 0
else
push @state.stack[-n-1]
@state.stack.delete_at(-n-2)
end
elsif n < 0
top = pop
@state.stack = [0]*[-n-@state.stack.size, 0].max + @state.stack
@state.stack.insert(n-1, top)
end
when :convert
n = pop
n.times.map{pop}.reverse.each{|v| push v}
when :sleep
sleep pop/1000.0
when :const_10
push 10
when :const_m1
push -1
end
end
end
class Ordinal < Mode
OPERATORS = {
'@' => :terminate,
'0' => :digit, '1' => :digit, '2' => :digit, '3' => :digit, '4' => :digit, '5' => :digit, '6' => :digit, '7' => :digit, '8' => :digit, '9' => :digit,
'+' => :concat,
'-' => :drop,
'*' => :riffle,
':' => :occurrences,
'%' => :split,
'<' => :ensure_west,
'>' => :ensure_east,
'^' => :ensure_north,
'v' => :ensure_south,
'{' => :turn_left,
'}' => :turn_right,
'#' => :trampoline,
'$' => :cond_trampoline,
'=' => :cond_cmp,
'&' => :fold_iterator,
'~' => :swap,
'.' => :dup,
';' => :discard,
',' => :permute_stack,
'!' => :store_register,
'?' => :load_register,
'[' => :register_left,
']' => :register_right,
'(' => :search_left,
')' => :search_right,
'"' => :leave_string_mode,
"'" => :escape,
'I' => :input,
'O' => :output,
'i' => :raw_input,
'o' => :raw_output,
'A' => :intersection,
'B' => :substrings,
'C' => :subsequences,
'D' => :deduplicate,
'F' => :find,
'G' => :longest_common_substring,
'H' => :trim,
'J' => :jump_raw,
'K' => :return_raw,
'L' => :shortest_common_superstring,
'M' => :inclusive_split,
'N' => :complement,
'P' => :permutations,
'Q' => :reverse_stack,
'R' => :reverse,
'S' => :replace,
'T' => :datetime,
'U' => :random_choice,
'V' => :union,
'W' => :discard_return,
'X' => :symdifference,
'Y' => :unzip,
'Z' => :zip,
'a' => :const_lf,
'b' => :shuffle,
'c' => :characters,
'd' => :push_joined_stack,
'e' => :const_empty,
'f' => :runs,
'g' => :get_diagonal,
'h' => :head,
'j' => :jump,
'k' => :return,
'l' => :lower_case,
'm' => :truncate_to_shorter,
'u' => :upper_case,
'n' => :not,
'p' => :put_diagonal,
'q' => :join_tape,
'r' => :expand_ranges,
's' => :sort,
't' => :tail,
'w' => :push_return,
'x' => :permute,
'y' => :transliterate,
'z' => :discard_up_to,
#'(' => ,
#')' => ,
#'!' => ,
#'$' => ,
#'&' => ,
#',' => ,
#'.' => ,
#';' => ,
#'=' => ,
#'?' => ,
#'`' => ,
#'A' => ,
# ...
#'Z' => ,
#'a' => ,
# ...
#'z' => ,
}
OPERATORS.default = :nop
def raw_move
if @state.width == 1 || @state.height == 1
return
end
new_pos = @state.ip + @state.dir.vec + @state.storage_offset
@state.dir = @state.dir.reflect('|') if new_pos.x < 0 || new_pos.x >= @state.width
@state.dir = @state.dir.reflect('_') if new_pos.y < 0 || new_pos.y >= @state.height
@state.ip += @state.dir.vec
end
def pop
val = @state.pop
val ? val.to_s : ''
end
def scan_source label
ip_dir = @state.dir
grid = @state.grid
while !ip_dir.is_a? NorthEast
grid = grid.transpose.reverse
ip_dir = ip_dir.left
end
height = grid.size
width = height == 0 ? 0 : grid[0].size
positions = []
(0..width+height-2).map do |d|
min_x = [0,d-height+1].max
max_x = [width-1,d].min
line = (min_x..max_x).map do |x|
y = d - x
grid[y][x].chr
end.join
line.scan(/(?=#{Regexp.escape(label)})/) do
x = min_x + $`.size + label.size - 1
y = d-x
positions << [x,y]
end
end
ip_dir = @state.dir
while !ip_dir.is_a? NorthEast
ip_dir = ip_dir.left
positions.map! {|x, y| [grid.size - y - 1, x]}
grid = grid.reverse.transpose
end
positions
end
def process cmd
opcode = OPERATORS[cmd]
case opcode
when :nop
raise "No-op reached process(). This shouldn't happen."
when :terminate
@state.done = true
when :ensure_west
@state.dir = @state.dir.reflect '|' if @state.dir.vec.x > 0
when :ensure_east
@state.dir = @state.dir.reflect '|' if @state.dir.vec.x < 0
when :ensure_north
@state.dir = @state.dir.reflect '_' if @state.dir.vec.y > 0
when :ensure_south
@state.dir = @state.dir.reflect '_' if @state.dir.vec.y < 0
when :turn_left
@state.dir = @state.dir.left
when :turn_right
@state.dir = @state.dir.right
when :trampoline
@state.skip_next
when :cond_trampoline
@state.skip_next if pop == ''
when :cond_cmp
top = pop
second = pop
if top > second
@state.dir = @state.dir.left
elsif top < second
@state.dir = @state.dir.right
end
when :fold_iterator
@state.add_iterator pop
when :jump
label = pop
positions = scan_source(label)
if !positions.empty?
push_return
@state.jump(*positions[0])
end
when :return
@state.jump(*pop_return)
when :jump_raw
label = pop
positions = scan_source(label)
@state.jump(*positions[0]) if !positions.empty?
when :return_raw
@state.jump(*peek_return)
when :push_return
push_return
when :discard_return
pop_return
when :get_diagonal
label = pop
positions = scan_source(label)
if !positions.empty?
cursor = Point2D.new(*positions[0]) + @state.dir.vec
string = ''
while is_char? @state.cell(cursor)
string << @state.cell(cursor)
cursor += @state.dir.vec
end
push string
end
when :put_diagonal
value = pop
label = pop
positions = scan_source(label)
if !positions.empty?
cursor = Point2D.new(*positions[0]) + @state.dir.vec
value.each_char {|c|
@state.put_cell(cursor, c.ord)
cursor += @state.dir.vec
}
end
when :store_register
i = @state.rp
pop.each_char do |c|
@state.tape[i] = c.ord
i += 1
end
@state.tape[i] = -1
when :load_register
push @state.read_register
when :register_left
@state.rp -= 1 while is_char? @state.tape[@state.rp-1]
@state.rp -= 1
@state.rp -= 1 while is_char? @state.tape[@state.rp-1]
when :register_right
@state.rp += 1 while is_char? @state.tape[@state.rp]
@state.rp += 1
when :search_left
needle = pop
string = ""
cursor = @state.rp-1
cursor -= 1 while is_char? @state.tape[cursor-1]
(cursor-2).downto(@state.tape.keys.min-1).each do |i|
if is_char?(@state.tape[i])
string << @state.tape[i]
elsif string.reverse[needle]
@state.rp = i+1
break
else
string = ""
end
end
when :search_right
needle = pop
string = ""
cursor = @state.rp
cursor += 1 while is_char? @state.tape[cursor]
(cursor+1..@state.tape.keys.max+1).each do |i|
if is_char?(@state.tape[i])
string << @state.tape[i]
elsif string[needle]
@state.rp = i - string.size
break
else
string = ""
end
end
when :join_tape
push @state.tape.keys.sort.map{|i| @state.tape[i]}.select{|v| is_char?(v)}.map(&:chr).join
when :leave_string_mode
# Will throw an error when cell isn't a valid code point
push @state.current_string.map(&:chr).join
when :escape
raw_move
push @state.cell.chr # Will throw an error when cell isn't a valid code point
@state.ip -= @state.dir.vec
when :digit
push(pop + cmd.chr)
when :input
line = @state.in_str.gets
push(line ? line.scrub('').chomp : '')
when :output
@state.out_str.puts pop
when :raw_input
str = @state.in_str.read
push(str ? str.scrub('') : '')
when :raw_output
@state.out_str << pop
when :concat
top = pop
second = pop
push(second + top)
when :drop
y = pop
x = pop
result = x.chars
x.scan(/(?=#{Regexp.escape(y)})/) do
y.size.times do |i|
result[$`.size + i] = 0
end
end
push (result-[0]).join
when :riffle
sep = pop
push(pop.chars * sep)
when :occurrences
sep = pop
pop.scan(/#{Regexp.escape(sep)}/){ push sep }
when :split
sep = pop
@state.stack += pop.split(sep, -1)
when :inclusive_split
sep = pop
str = pop
splits = str.split(sep, -1)
str.scan(/#{Regexp.escape(sep)}/){ push splits.shift; push sep }
push splits.shift
when :replace
target = pop
needle = pop
haystack = pop
push haystack.gsub(needle, target)
when :trim
push pop.gsub(/^[ \n\t]+|[ \n\t]+$/, '')
when :transliterate
target = pop
source = pop
string = pop
if target.empty?
source.each_char {|c| string.gsub!(c, '')}
else
max_char_count = string.chars.uniq.map{|c| string.count c}.max
source *= max_char_count
string *= source.size / string.size + 1
string = string.chars.map{ |c|
if (i = source.index c)
d = target[i]
source[i] = ''
target[i] = ''
d
else
c
end
}.join
end
push string
when :discard_up_to
y = pop
x = pop
i = x.index y
x[0,i+y.size] = '' if i
push x
when :find
needle = pop
haystack = pop
push(haystack[needle] || '')
when :truncate_to_shorter
top = pop
second = pop
length = [top.size, second.size].min
push second[0,length]
push top[0,length]
when :zip
top = pop.chars
second = pop.chars
result = []
while !top.empty? || !second.empty?
result << (second.shift || '')
result << (top.shift || '')
end
push result * ''
when :unzip
str = pop
left = ''
right = ''
str.scan(/(.)(.|$)/s) do
left << $1
right << $2
end
push left
push right
when :shortest_common_superstring
top = pop
second = pop
len = [top.size, second.size].min
len.downto(0) do |i|
if second[-i,i] == top[0,i]
push second+top[i..-1]
break
end
end
when :longest_common_substring
top = pop
second = pop
second.size.downto(0) do |l|
if l == 0
push ""
else
shared = second.chars.each_cons(l).select {|s| top[s.join]}
if !shared.empty?
shared.uniq.each{|s| push s.join}
break
end
end
end
when :intersection
second = pop
first = pop
result = first.chars.select {|c|
test = second[c]
second[c] = '' if test
test
}
push result.join
when :union
second = pop
first = pop
first.each_char {|c| second[c] = '' if second[c]}
push(first + second)
when :symdifference
second = pop
first = pop
temp_second = second.clone
first.each_char {|c| second[c] = '' if second[c]}
temp_second.each_char {|c| first[c] = '' if first[c]}
push first+second
when :complement
second = pop
first = pop
second.each_char {|c| first[c] = '' if first[c]}
push first
when :deduplicate
push pop.chars.uniq.join
when :sort
push pop.chars.sort.join
when :shuffle
push pop.chars.shuffle.join
when :random_choice
push pop.chars.sample || ''
when :characters
@stack.state += pop.chars
when :runs
pop.scan(/(.)\1*/s){push $&}
when :head
str = pop
if str == ''
push ''
push ''
else
push str[0]
push str[1..-1]
end
when :tail
str = pop
if str == ''
push ''
push ''
else
push str[0..-2]
push str[-1]
end
when :lower_case
push pop.downcase
when :upper_case
push pop.upcase
when :swap_case
push pop.swapcase
when :not
push(pop == '' ? 'Jabberwocky' : '')
when :reverse
push pop.reverse
when :permutations
@state.stack += pop.chars.permutation.map{|p| p.join}.to_a
when :subsequences
str = pop.chars
(0..str.size).each do |l|
str.combination(l).each {|s| push s.join}
end
when :substrings
str = pop.chars
(1..str.size).each do |l|
str.each_cons(l).each {|s| push s.join}
end
when :permute
top = pop
second = pop
push (0...second.size).stable_sort_by{|i|
c = top[i]
c ? c.ord : 1114112 # Value greater than any code point, so that trailing
# characters remain in place.
}.map{|i| second[i]}.join
when :expand_ranges
val = pop
push val.chars.each_cons(2).map{ |a,b|
if a > b
(b..a).drop(1).to_a.reverse.join
else
(a...b).to_a.join
end
}.join + (val[-1] || '')
when :swap
top = pop
second = pop
push top
push second
when :dup
top = pop
push top
push top
when :discard
pop
when :push_joined_stack
push @state.stack.join
when :reverse_stack
@state.stack.reverse!.map!(:to_s)
when :permute_stack
top = pop
max_size = [@state.stack.size, top.size].max
@state.stack = (-max_size..-1).stable_sort_by{|i|
c = top[i]
c ? c.ord : -1 # Value less than any code point, so that leading
# stack elements remain in place.
}.map{|i| @state.stack[i] || ''}
when :datetime
push DateTime.now.strftime '%Y-%m-%dT%H:%M:%S.%L%:z'
when :const_lf
push "\n"
when :const_empty
push ""
end
end
end
Add superimpose
# coding: utf-8
require_relative 'point2d'
require_relative 'direction'
require 'prime'
require 'date'
class Mode
# List of operators which should not be ignored while in string mode.
STRING_CMDS = "\"'\\/_|"
def initialize(state)
@state = state
end
def is_char? val
val && (val >= 0 && val <= 0xD7FF || val >= 0xE000 && val <= 0x10FFFF)
end
def process
raise NotImplementedError
end
# Returns true when the resulting cell is a command.
def move
raw_move if @state.cell == "'".ord
raw_move
cell = @state.cell
case cell
when '/'.ord, '\\'.ord
@state.dir = @state.dir.reflect cell.chr
@state.toggle_mode
return false
when '_'.ord, '|'.ord
@state.dir = @state.dir.reflect cell.chr
return false
end
return true if @state.string_mode
@state.print_debug_info if cell == '`'.ord
is_char?(cell) && self.class::OPERATORS.has_key?(cell.chr)
end
# Moves the IP a single cell without regard for mirrors, walls or no-ops.
# Does respect grid boundaries.
def raw_move
raise NotImplementedError
end
def push val
@state.push val
end
def pop
raise NotImplementedError
end
def push_return
@state.push_return
end
def pop_return
@state.pop_return
end
def peek_return
@state.peek_return
end
def peek
val = pop
push val
val
end
end
class Cardinal < Mode
OPERATORS = {
'@' => :terminate,
'<' => :move_west,
'>' => :move_east,
'^' => :move_north,
'v' => :move_south,
'{' => :turn_left,
'}' => :turn_right,
'#' => :trampoline,
'$' => :cond_trampoline,
'=' => :cond_sign,
'&' => :repeat_iterator,
'~' => :swap,
'.' => :dup,
';' => :discard,
',' => :rotate_stack,
'0' => :digit, '1' => :digit, '2' => :digit, '3' => :digit, '4' => :digit, '5' => :digit, '6' => :digit, '7' => :digit, '8' => :digit, '9' => :digit,
'+' => :add,
'-' => :sub,
'*' => :mul,
':' => :div,
'%' => :mod,
'!' => :store_tape,
'?' => :load_tape,
'[' => :mp_left,
']' => :mp_right,
'(' => :search_left,
')' => :search_right,
'"' => :leave_string_mode,
"'" => :escape,
'I' => :input,
'O' => :output,
'i' => :raw_input,
'o' => :raw_output,
'A' => :bitand,
'B' => :divisors,
'C' => :binomial,
'D' => :deduplicate,
'E' => :power,
'F' => :divides,
'G' => :gcd,
'H' => :abs,
'J' => :jump_raw,
'K' => :return_raw,
'L' => :lcm,
'M' => :divmod,
'N' => :bitnot,
'P' => :factorial,
'Q' => :convert,
'R' => :negate,
'S' => :replace_divisors,
'T' => :sleep,
'U' => :random,
'V' => :bitor,
'W' => :discard_return,
'X' => :bitxor,
'Y' => :unpack,
'Z' => :pack,
'a' => :const_10,
'b' => :random_swap,
'c' => :prime_factors,
'd' => :stack_depth,
'e' => :const_m1,
'f' => :prime_factor_pairs,
'g' => :get_cell,
'h' => :inc,
'j' => :jump,
'k' => :return,
'l' => :clear_bits,
'm' => :floor,
'n' => :not,
'p' => :put_cell,
'q' => :get_mp,
'r' => :range,
's' => :sortswap,
't' => :dec,
'u' => :set_bits,
'w' => :push_return,
'x' => :extract_bit,
'y' => :bitif,
'z' => :drop_small_factors,
}
OPERATORS.default = :nop
def raw_move
@state.ip += @state.dir.vec
@state.wrap
end
def pop
val = nil
loop do
val = @state.pop
if val.is_a?(String)
found = false
val.scan(/(?:^|(?!\G))-?\d+/) { push $&.to_i; found = true }
next if !found
val = @state.pop
end
break
end
val || 0
end
def process cmd
opcode = OPERATORS[cmd]
case opcode
when :nop
raise "No-op reached process(). This shouldn't happen."
when :terminate
@state.done = true
when :move_east
@state.dir = East.new
when :move_west
@state.dir = West.new
when :move_south
@state.dir = South.new
when :move_north
@state.dir = North.new
when :turn_left
@state.dir = @state.dir.left
when :turn_right
@state.dir = @state.dir.right
when :trampoline
@state.skip_next
when :cond_trampoline
@state.skip_next if pop == 0
when :cond_sign
val = pop
if val < 0
@state.dir = @state.dir.left
elsif val > 0
@state.dir = @state.dir.right
end
when :repeat_iterator
@state.add_iterator pop
when :jump
push_return
y = pop
x = pop
@state.jump(x,y)
when :return
@state.jump(*pop_return)
when :jump_raw
y = pop
x = pop
@state.jump(x,y)
when :return_raw
@state.jump(*peek_return)
when :push_return
push_return
when :discard_return
pop_return
when :get_cell
y = pop
x = pop
push @state.cell(Point2D.new(x,y))
when :put_cell
v = pop
y = pop
x = pop
@state.put_cell(Point2D.new(x,y), v)
when :store_tape
@state.tape[@state.mp] = pop
when :load_tape
push (@state.tape[@state.mp] || -1)
when :mp_left
@state.mp -= 1
when :mp_right
@state.mp += 1
when :search_left
val = pop
(@state.mp-1).downto(@state.tape.keys.min-1).each do |i|
if @state.tape[i] == val
@state.mp = i
break
end
end
when :search_right
val = pop
(@state.mp+1..@state.tape.keys.max+1).each do |i|
if @state.tape[i] == val
@state.mp = i
break
end
end
when :get_mp
push @state.mp
when :leave_string_mode
@state.stack += @state.current_string
when :escape
raw_move
push @state.cell
@state.ip -= @state.dir.vec
when :input
char = @state.in_str.getc
while char && char.scrub('') == ''
char = @state.in_str.getc
end
push(char ? char.ord : -1)
when :output
# Will throw an error when value isn't a valid code point
val = pop
if is_char?(val)
@state.out_str << val.chr
end
when :raw_input
push(@state.in_str.getbyte || -1)
when :raw_output
# TODO: do
when :digit
push cmd.chr.to_i
when :add
push(pop + pop)
when :sub
y = pop
push(pop - y)
when :mul
push(pop * pop)
when :div
y = pop
push(pop / y)
when :mod
y = pop
push(pop % y)
when :divmod
y = pop
x = pop
push(x / y)
push(x % y)
when :inc
push(pop+1)
when :dec
push(pop-1)
when :abs
push(pop.abs)
when :power
y = pop
x = pop
if y < 0
push 1/x**y.abs
else
push x**y
end
when :bitand
push(pop & pop)
when :bitnot
push(~pop)
when :bitor
push(pop | pop)
when :bitxor
push(pop ^ pop)
when :bitif
z = pop
y = pop
x = pop
push(x&y | ~x&z)
when :clear_bits
x = pop
if x > 0
msb = Math.log2(x).floor
elsif x < -1
msb = Math.log2(~x).floor
else
msb = 0
end
push (x & -(2**msb))
when :set_bits
x = pop
if x > 0
msb = Math.log2(x).floor
elsif x < -1
msb = Math.log2(~x).floor
else
msb = 0
end
push (x | (2**msb-1))
when :extract_bit
y = pop
x = pop
if y >= 0
push x[y]
else
if x > 0
msb = Math.log2(x).floor
elsif x < -1
msb = Math.log2(~x).floor
else
msb = 0
end
push x[msb-y+1]
end
when :factorial
val = pop
if val >= 0
push (1..val).reduce(1, :*)
else
push (val..-1).reduce(1, :*)
end
when :binomial
k = pop
n = pop
k = n-k if n > 0 && k > n/2
if k < 0
push 0
else
prod = 1
(1..k).each do |i|
prod *= n
prod /= i
n -= 1
end
push prod
end
when :negate
push -pop
when :prime_factors
n = pop
if n == 0
push 0
else
Prime.prime_division(n).each{ |p,n| n.times{ push p } }
end
when :prime_factor_pairs
n = pop
if n == 0
push 0
push 1
else
Prime.prime_division(n).flatten.each{ |x| push x }
end
when :deduplicate
n = pop
if n == 0
push 0
else
push Prime.int_from_prime_division(Prime.prime_division(n).map{ |p,n| [p,1]})
end
when :divides
y = pop
x = pop
if y != 0 && x % y == 0
push y
else
push 0
end
when :gcd
push (pop.gcd pop)
when :lcm
push (pop.lcm pop)
when :floor
y = pop
x = pop
push (x/y)*y
when :replace_divisors
z = pop
y = pop
x = pop
if x == 0
push 0
elsif y == 1 || y == -1
if z == y
push x
elsif z == 0
push 0
else
loop { next }
end
else
order = 0
while x%y == 0
order += 1
x /= y
end
x *= z**order
end
when :divisors
n = pop
sgn = n <=> 0
n = n.abs
k = 1
small_divs = []
large_divs = []
while k*k <= n
if n%k == 0
small_divs << k
large_divs << n/k if k*k != n
end
k += 1
end
(small_divs + large_divs.reverse).each {|k| push k*sgn}
when :drop_small_factors
k = pop
n = pop
if n != 0
if k > 0
(2..k).each {|i| n /= i while n % i == 0}
else
-2.downto(k) {|i| n /= i while n % i == 0}
end
end
push n
when :pack
y = pop
x = pop
# Map integers to naturals
sgn = x <=> 0
x = x*sgn*2 + [0, sgn].min
sgn = y <=> 0
y = y*sgn*2 + [0, sgn].min
# Map two naturals to one
z = (x+y)*(x+y+1)/2 + y
# Map the natural back to an integer
z = (-1)**z * ((z+1)/2)
push z
when :unpack
z = pop
# Map the integer to a positive natural
sgn = z <=> 0
z = z*sgn*2 + [0, sgn].min
# Map the natural to two
y = z
x = 0
while x < y
x += 1
y -= x
end
x -= y
# Map the naturals back to integers
x = (-1)**x * ((x+1)/2)
y = (-1)**y * ((y+1)/2)
push x
push y
when :not
push (pop == 0 ? 1 : 0)
when :range
val = pop
if val >= 0
0.upto(val) {|i| push i}
else
(-val).downto(0) {|i| push i}
end
when :random
val = pop
if val > 0
push rand val
elsif val == 0
push 0 # TODO: or something else?
else
push -(rand val)
end
when :random_swap
top = pop
second = pop
top, second = [top, second].shuffle
push second
push top
when :sortswap
top = pop
second = pop
top, second = second, top if top < second
push second
push top
when :swap
top = pop
second = pop
push top
push second
when :dup
top = pop
push top
push top
when :discard
pop
when :stack_depth
push @state.stack.size
when :rotate_stack
n = pop
if n > 0
if n >= @state.stack.size
push 0
else
push @state.stack[-n-1]
@state.stack.delete_at(-n-2)
end
elsif n < 0
top = pop
@state.stack = [0]*[-n-@state.stack.size, 0].max + @state.stack
@state.stack.insert(n-1, top)
end
when :convert
n = pop
n.times.map{pop}.reverse.each{|v| push v}
when :sleep
sleep pop/1000.0
when :const_10
push 10
when :const_m1
push -1
end
end
end
class Ordinal < Mode
OPERATORS = {
'@' => :terminate,
'0' => :digit, '1' => :digit, '2' => :digit, '3' => :digit, '4' => :digit, '5' => :digit, '6' => :digit, '7' => :digit, '8' => :digit, '9' => :digit,
'+' => :superimpose,
'-' => :drop,
'*' => :concat,
':' => :occurrences,
'%' => :split,
'<' => :ensure_west,
'>' => :ensure_east,
'^' => :ensure_north,
'v' => :ensure_south,
'{' => :turn_left,
'}' => :turn_right,
'#' => :trampoline,
'$' => :cond_trampoline,
'=' => :cond_cmp,
'&' => :fold_iterator,
'~' => :swap,
'.' => :dup,
';' => :discard,
',' => :permute_stack,
'!' => :store_register,
'?' => :load_register,
'[' => :register_left,
']' => :register_right,
'(' => :search_left,
')' => :search_right,
'"' => :leave_string_mode,
"'" => :escape,
'I' => :input,
'O' => :output,
'i' => :raw_input,
'o' => :raw_output,
'A' => :intersection,
'B' => :substrings,
'C' => :subsequences,
'D' => :deduplicate,
'E' => :riffle,
'F' => :find,
'G' => :longest_common_substring,
'H' => :trim,
'J' => :jump_raw,
'K' => :return_raw,
'L' => :shortest_common_superstring,
'M' => :inclusive_split,
'N' => :complement,
'P' => :permutations,
'Q' => :reverse_stack,
'R' => :reverse,
'S' => :replace,
'T' => :datetime,
'U' => :random_choice,
'V' => :union,
'W' => :discard_return,
'X' => :symdifference,
'Y' => :unzip,
'Z' => :zip,
'a' => :const_lf,
'b' => :shuffle,
'c' => :characters,
'd' => :push_joined_stack,
'e' => :const_empty,
'f' => :runs,
'g' => :get_diagonal,
'h' => :head,
'j' => :jump,
'k' => :return,
'l' => :lower_case,
'm' => :truncate_to_shorter,
'u' => :upper_case,
'n' => :not,
'p' => :put_diagonal,
'q' => :join_tape,
'r' => :expand_ranges,
's' => :sort,
't' => :tail,
'w' => :push_return,
'x' => :permute,
'y' => :transliterate,
'z' => :discard_up_to,
#'(' => ,
#')' => ,
#'!' => ,
#'$' => ,
#'&' => ,
#',' => ,
#'.' => ,
#';' => ,
#'=' => ,
#'?' => ,
#'`' => ,
#'A' => ,
# ...
#'Z' => ,
#'a' => ,
# ...
#'z' => ,
}
OPERATORS.default = :nop
def raw_move
if @state.width == 1 || @state.height == 1
return
end
new_pos = @state.ip + @state.dir.vec + @state.storage_offset
@state.dir = @state.dir.reflect('|') if new_pos.x < 0 || new_pos.x >= @state.width
@state.dir = @state.dir.reflect('_') if new_pos.y < 0 || new_pos.y >= @state.height
@state.ip += @state.dir.vec
end
def pop
val = @state.pop
val ? val.to_s : ''
end
def scan_source label
ip_dir = @state.dir
grid = @state.grid
while !ip_dir.is_a? NorthEast
grid = grid.transpose.reverse
ip_dir = ip_dir.left
end
height = grid.size
width = height == 0 ? 0 : grid[0].size
positions = []
(0..width+height-2).map do |d|
min_x = [0,d-height+1].max
max_x = [width-1,d].min
line = (min_x..max_x).map do |x|
y = d - x
grid[y][x].chr
end.join
line.scan(/(?=#{Regexp.escape(label)})/) do
x = min_x + $`.size + label.size - 1
y = d-x
positions << [x,y]
end
end
ip_dir = @state.dir
while !ip_dir.is_a? NorthEast
ip_dir = ip_dir.left
positions.map! {|x, y| [grid.size - y - 1, x]}
grid = grid.reverse.transpose
end
positions
end
def process cmd
opcode = OPERATORS[cmd]
case opcode
when :nop
raise "No-op reached process(). This shouldn't happen."
when :terminate
@state.done = true
when :ensure_west
@state.dir = @state.dir.reflect '|' if @state.dir.vec.x > 0
when :ensure_east
@state.dir = @state.dir.reflect '|' if @state.dir.vec.x < 0
when :ensure_north
@state.dir = @state.dir.reflect '_' if @state.dir.vec.y > 0
when :ensure_south
@state.dir = @state.dir.reflect '_' if @state.dir.vec.y < 0
when :turn_left
@state.dir = @state.dir.left
when :turn_right
@state.dir = @state.dir.right
when :trampoline
@state.skip_next
when :cond_trampoline
@state.skip_next if pop == ''
when :cond_cmp
top = pop
second = pop
if top > second
@state.dir = @state.dir.left
elsif top < second
@state.dir = @state.dir.right
end
when :fold_iterator
@state.add_iterator pop
when :jump
label = pop
positions = scan_source(label)
if !positions.empty?
push_return
@state.jump(*positions[0])
end
when :return
@state.jump(*pop_return)
when :jump_raw
label = pop
positions = scan_source(label)
@state.jump(*positions[0]) if !positions.empty?
when :return_raw
@state.jump(*peek_return)
when :push_return
push_return
when :discard_return
pop_return
when :get_diagonal
label = pop
positions = scan_source(label)
if !positions.empty?
cursor = Point2D.new(*positions[0]) + @state.dir.vec
string = ''
while is_char? @state.cell(cursor)
string << @state.cell(cursor)
cursor += @state.dir.vec
end
push string
end
when :put_diagonal
value = pop
label = pop
positions = scan_source(label)
if !positions.empty?
cursor = Point2D.new(*positions[0]) + @state.dir.vec
value.each_char {|c|
@state.put_cell(cursor, c.ord)
cursor += @state.dir.vec
}
end
when :store_register
i = @state.rp
pop.each_char do |c|
@state.tape[i] = c.ord
i += 1
end
@state.tape[i] = -1
when :load_register
push @state.read_register
when :register_left
@state.rp -= 1 while is_char? @state.tape[@state.rp-1]
@state.rp -= 1
@state.rp -= 1 while is_char? @state.tape[@state.rp-1]
when :register_right
@state.rp += 1 while is_char? @state.tape[@state.rp]
@state.rp += 1
when :search_left
needle = pop
string = ""
cursor = @state.rp-1
cursor -= 1 while is_char? @state.tape[cursor-1]
(cursor-2).downto(@state.tape.keys.min-1).each do |i|
if is_char?(@state.tape[i])
string << @state.tape[i]
elsif string.reverse[needle]
@state.rp = i+1
break
else
string = ""
end
end
when :search_right
needle = pop
string = ""
cursor = @state.rp
cursor += 1 while is_char? @state.tape[cursor]
(cursor+1..@state.tape.keys.max+1).each do |i|
if is_char?(@state.tape[i])
string << @state.tape[i]
elsif string[needle]
@state.rp = i - string.size
break
else
string = ""
end
end
when :join_tape
push @state.tape.keys.sort.map{|i| @state.tape[i]}.select{|v| is_char?(v)}.map(&:chr).join
when :leave_string_mode
push @state.current_string.select{|c| is_char? c }.map(&:chr).join
when :escape
raw_move
if is_char?(@state.cell)
push @state.cell.chr
else
push ''
end
@state.ip -= @state.dir.vec
when :digit
push(pop + cmd.chr)
when :input
line = @state.in_str.gets
push(line ? line.scrub('').chomp : '')
when :output
@state.out_str.puts pop
when :raw_input
str = @state.in_str.read
push(str ? str.scrub('') : '')
when :raw_output
@state.out_str << pop
when :superimpose
top = pop
second = pop
result = ""
[top.size, second.size].max.times do |i|
result << [top[i] || 0.chr, second[i] || 0.chr].max
end
push result
when :concat
top = pop
second = pop
push(second + top)
when :drop
y = pop
x = pop
result = x.chars
x.scan(/(?=#{Regexp.escape(y)})/) do
y.size.times do |i|
result[$`.size + i] = 0
end
end
push (result-[0]).join
when :riffle
sep = pop
push(pop.chars * sep)
when :occurrences
sep = pop
pop.scan(/#{Regexp.escape(sep)}/){ push sep }
when :split
sep = pop
@state.stack += pop.split(sep, -1)
when :inclusive_split
sep = pop
str = pop
splits = str.split(sep, -1)
str.scan(/#{Regexp.escape(sep)}/){ push splits.shift; push sep }
push splits.shift
when :replace
target = pop
needle = pop
haystack = pop
push haystack.gsub(needle, target)
when :trim
push pop.gsub(/^[ \n\t]+|[ \n\t]+$/, '')
when :transliterate
target = pop
source = pop
string = pop
if target.empty?
source.each_char {|c| string.gsub!(c, '')}
else
max_char_count = string.chars.uniq.map{|c| string.count c}.max
source *= max_char_count
string *= source.size / string.size + 1
string = string.chars.map{ |c|
if (i = source.index c)
d = target[i]
source[i] = ''
target[i] = ''
d
else
c
end
}.join
end
push string
when :discard_up_to
y = pop
x = pop
i = x.index y
x[0,i+y.size] = '' if i
push x
when :find
needle = pop
haystack = pop
push(haystack[needle] || '')
when :truncate_to_shorter
top = pop
second = pop
length = [top.size, second.size].min
push second[0,length]
push top[0,length]
when :zip
top = pop.chars
second = pop.chars
result = []
while !top.empty? || !second.empty?
result << (second.shift || '')
result << (top.shift || '')
end
push result * ''
when :unzip
str = pop
left = ''
right = ''
str.scan(/(.)(.|$)/s) do
left << $1
right << $2
end
push left
push right
when :shortest_common_superstring
top = pop
second = pop
len = [top.size, second.size].min
len.downto(0) do |i|
if second[-i,i] == top[0,i]
push second+top[i..-1]
break
end
end
when :longest_common_substring
top = pop
second = pop
second.size.downto(0) do |l|
if l == 0
push ""
else
shared = second.chars.each_cons(l).select {|s| top[s.join]}
if !shared.empty?
shared.uniq.each{|s| push s.join}
break
end
end
end
when :intersection
second = pop
first = pop
result = first.chars.select {|c|
test = second[c]
second[c] = '' if test
test
}
push result.join
when :union
second = pop
first = pop
first.each_char {|c| second[c] = '' if second[c]}
push(first + second)
when :symdifference
second = pop
first = pop
temp_second = second.clone
first.each_char {|c| second[c] = '' if second[c]}
temp_second.each_char {|c| first[c] = '' if first[c]}
push first+second
when :complement
second = pop
first = pop
second.each_char {|c| first[c] = '' if first[c]}
push first
when :deduplicate
push pop.chars.uniq.join
when :sort
push pop.chars.sort.join
when :shuffle
push pop.chars.shuffle.join
when :random_choice
push pop.chars.sample || ''
when :characters
@stack.state += pop.chars
when :runs
pop.scan(/(.)\1*/s){push $&}
when :head
str = pop
if str == ''
push ''
push ''
else
push str[0]
push str[1..-1]
end
when :tail
str = pop
if str == ''
push ''
push ''
else
push str[0..-2]
push str[-1]
end
when :lower_case
push pop.downcase
when :upper_case
push pop.upcase
when :swap_case
push pop.swapcase
when :not
push(pop == '' ? 'Jabberwocky' : '')
when :reverse
push pop.reverse
when :permutations
@state.stack += pop.chars.permutation.map{|p| p.join}.to_a
when :subsequences
str = pop.chars
(0..str.size).each do |l|
str.combination(l).each {|s| push s.join}
end
when :substrings
str = pop.chars
(1..str.size).each do |l|
str.each_cons(l).each {|s| push s.join}
end
when :permute
top = pop
second = pop
push (0...second.size).stable_sort_by{|i|
c = top[i]
c ? c.ord : 1114112 # Value greater than any code point, so that trailing
# characters remain in place.
}.map{|i| second[i]}.join
when :expand_ranges
val = pop
push val.chars.each_cons(2).map{ |a,b|
if a > b
(b..a).drop(1).to_a.reverse.join
else
(a...b).to_a.join
end
}.join + (val[-1] || '')
when :swap
top = pop
second = pop
push top
push second
when :dup
top = pop
push top
push top
when :discard
pop
when :push_joined_stack
push @state.stack.join
when :reverse_stack
@state.stack.reverse!.map!(:to_s)
when :permute_stack
top = pop
max_size = [@state.stack.size, top.size].max
@state.stack = (-max_size..-1).stable_sort_by{|i|
c = top[i]
c ? c.ord : -1 # Value less than any code point, so that leading
# stack elements remain in place.
}.map{|i| @state.stack[i] || ''}
when :datetime
push DateTime.now.strftime '%Y-%m-%dT%H:%M:%S.%L%:z'
when :const_lf
push "\n"
when :const_empty
push ""
end
end
end
|
# Represent a pack of cards as a 1..52 array and deal cards from it.
module Cribbage
class Pack
def initialize
@cards = Array.new( 52, 1 )
@left = 52 # Cards left
end
def deal( klass = Card )
return nil if empty? # Is this valid? should we punish emptyness with an exception
card = rand 52
card = rand( 52 ) while @cards[card] == 0
@cards[card] = 0
@left -= 1
klass.new( (card / 4) + 1, (card % 4) + 1 )
end
def empty?
@left == 0
end
# I can't think of another way to cut a card at the moment
def cut( klass = Card )
deal klass
end
protected
attr_reader :left
end
end
Added GosuPack that can draw itself.
Also, @left has become @cards_left because @left is implicit in Region.
# Represent a pack of cards as a 1..52 array and deal cards from it.
require './region'
module Cribbage
class Pack
def initialize
@cards = Array.new( 52, 1 )
@cards_left = 52 # Cards left
end
def deal( klass = Card )
return nil if empty? # Is this valid? should we punish emptyness with an exception
card = rand 52
card = rand( 52 ) while @cards[card] == 0
@cards[card] = 0
@cards_left -= 1
klass.new( (card / 4) + 1, (card % 4) + 1 )
end
def empty?
@cards_left == 0
end
# I can't think of another way to cut a card at the moment
def cut( klass = Card )
deal klass
end
protected
attr_reader :cards_left
end
class GosuPack < Pack
include Region
def set_images( back, front )
@back, @front = back, front
end
def set_position( pos_left, pos_top )
set_area( pos_left, pos_top, CribbageGame::CARD_WIDTH, CribbageGame::CARD_HEIGHT )
end
def draw
@back.draw( left, top, 1 )
end
def draw_fan( pos_left, pos_top, gap, orient )
while left > 0
card = deal( GosuCard )
card.set_position( pos_left, pos_top )
card.draw( orient )
pos_left += gap
end
end
end
end
|
# heroku-ping: utility to keep heroku web applications active
# Copyright (c) 2013 Austin Seipp. See license notice in LICENSE.txt.
require 'net/http'
require 'clockwork'
require 'logger'
## -----------------------------------------------------------------------------
## -- Init ---------------------------------------------------------------------
LOG = Logger.new(STDOUT)
if ENV['PING_URL'] == nil
LOG.fatal "No PING_URL set. Aborting."
Kernel.exit -1
end
if ENV['PING_METHOD'] == nil
ENV['PING_METHOD'] = 'HEAD'
end
if ENV['PING_INTERVAL'] == nil
ENV['PING_INTERVAL'] = '1200'
end
## -----------------------------------------------------------------------------
## -- Handlers -----------------------------------------------------------------
def ping
LOG.info "Pinging #{ENV['PING_URL']}..."
LOG.info "HTTP method: #{ENV['PING_METHOD'].to_s.upcase}"
resp = request(ENV['PING_URL'], ENV['PING_METHOD'].downcase.to_sym)
if resp.code =~ /^[1-3]..$/ # Valid codes [100-399]
LOG.info "Status code: (#{resp.code})"
else
headers = ''
resp.each_header { | k, v | headers << "\n#{k} = #{v}" }
LOG.error "Status code: (#{resp.code})"
LOG.error "Response headers:#{headers}"
LOG.error "Response body:\n#{resp.body}" unless resp.body.nil?
end
end
def request(uri, type=:head)
url = URI.parse(uri)
Net::HTTP.start(url.host, url.port) do | http |
case type
when :head
http.head(url.path)
when :get
http.get(url.path)
else
raise ArgumentError, 'Unsupported HTTP method'
end
end
end
## -----------------------------------------------------------------------------
## -- Boilerplate --------------------------------------------------------------
module Clockwork
handler do |j|
case j
when 'ping.act' then ping
else raise ArgumentError, 'Invalid argument!'
end
end
every ENV['PING_INTERVAL'].to_i.seconds, 'ping.act'
LOG.info \
"Now pinging #{ENV['PING_URL']} every #{ENV['PING_INTERVAL']} seconds..."
end
Fixes pinging of root url path.
If PING_URL was set without ending slash the request would fail Solution
- expand '' to '/'
Signed-off-by: Aleksandrs Ļedovskis <5fcaa23113471e47256cc833d193d7c3d0e8ce4a@ledovskis.lv>
Signed-off-by: Austin Seipp <48c905ac3851f382b914247d97dd93273d5ff555@pobox.com>
Acked-by: Austin Seipp <48c905ac3851f382b914247d97dd93273d5ff555@pobox.com>
# heroku-ping: utility to keep heroku web applications active
# Copyright (c) 2013 Austin Seipp. See license notice in LICENSE.txt.
require 'net/http'
require 'clockwork'
require 'logger'
## -----------------------------------------------------------------------------
## -- Init ---------------------------------------------------------------------
LOG = Logger.new(STDOUT)
if ENV['PING_URL'] == nil
LOG.fatal "No PING_URL set. Aborting."
Kernel.exit -1
end
if ENV['PING_METHOD'] == nil
ENV['PING_METHOD'] = 'HEAD'
end
if ENV['PING_INTERVAL'] == nil
ENV['PING_INTERVAL'] = '1200'
end
## -----------------------------------------------------------------------------
## -- Handlers -----------------------------------------------------------------
def ping
LOG.info "Pinging #{ENV['PING_URL']}..."
LOG.info "HTTP method: #{ENV['PING_METHOD'].to_s.upcase}"
resp = request(ENV['PING_URL'], ENV['PING_METHOD'].downcase.to_sym)
if resp.code =~ /^[1-3]..$/ # Valid codes [100-399]
LOG.info "Status code: (#{resp.code})"
else
headers = ''
resp.each_header { | k, v | headers << "\n#{k} = #{v}" }
LOG.error "Status code: (#{resp.code})"
LOG.error "Response headers:#{headers}"
LOG.error "Response body:\n#{resp.body}" unless resp.body.nil?
end
end
def request(uri, type=:head)
url = URI.parse(uri)
url_path = url.path == '' ? '/' : url.path
Net::HTTP.start(url.host, url.port) do | http |
case type
when :head
http.head(url_path)
when :get
http.get(url_path)
else
raise ArgumentError, 'Unsupported HTTP method'
end
end
end
## -----------------------------------------------------------------------------
## -- Boilerplate --------------------------------------------------------------
module Clockwork
handler do |j|
case j
when 'ping.act' then ping
else raise ArgumentError, 'Invalid argument!'
end
end
every ENV['PING_INTERVAL'].to_i.seconds, 'ping.act'
LOG.info \
"Now pinging #{ENV['PING_URL']} every #{ENV['PING_INTERVAL']} seconds..."
end
|
def play
puts "Let's Play Rock, Paper, Scissors! To play, pick one of those three options."
player = gets.chomp.downcase.to_s
if player == ("lizard") || player == ("spock")
raise ArgumentError.new("The computer thinks you're a nerd and refuses to play.")
end
if player != ("rock") && player != ("paper") && player != ("scissors")
raise ArgumentError.new("The only input allowed is Rock, Paper, or Scissors.")
end
computer = ["Rock","Paper","Scissors"].sample.chomp.downcase
puts "The player has chosen #{player}."
puts "The computer responds with #{computer}!"
if player == computer
puts "It's a tie!"
return
else
case player
when "rock"
if computer == "paper"
puts "Computer wins!"
else puts "Player win!"
end
when "paper"
if computer == "scissors"
puts "Computer wins!"
else "Player wins!"
end
when "scissors"
if computer == "rock"
puts "Computer wins!"
else "Player wins!"
end
end
end
end
Refactored the code.
Previously, I used 'case statements' to determine if the Computer wins or loses, but searching up other Ruby solutions revealed that many people preferred storing all the possible victory conditions via a Hash. I debated whether it would actually prove useful, but ultimately decided to give into the consensus simply because it makes it easier to expand RPS to include variants.
Credit for this change mostly goes to https://github.com/aterreno/rock-paper-scissors/blob/master/lib/game_engine.rb
Aterreno's Ruby program is a bit more complicated than mine though, needing multiple files to run properly, and the need to specify the moves of both players before the program can determine the winner.
def play
@VictoryCheck = {"paper" => "rock", "rock" => "scissors", "scissors" => "paper"} #Key = Move, Value = Move That the Key Can Defeat.
puts "Let's Play Rock, Paper, Scissors! To play, pick one of those three options."
player = gets.chomp.downcase.to_s
if player == ("lizard") || player == ("spock")
raise ArgumentError.new("The computer thinks you're a nerd and refuses to play.")
end
if player != ("rock") && player != ("paper") && player != ("scissors") #This can probably be refactored away.
raise ArgumentError.new("The only input allowed is Rock, Paper, or Scissors.")
end
computer = ["Rock","Paper","Scissors"].sample.chomp.downcase
puts "The player has chosen #{player}."
puts "The computer responds with #{computer}!"
if player == computer
puts "It's a tie!"
elsif player == @VictoryCheck[computer] #We look up the Computer's move in the VictoryCheck Hash, and find its value. We then check to see if this value matches the Player's move. If it does, then the Computer defeats the Player.
puts "Computer wins!"
else
puts "Player wins!"
end
end
|
#!/usr/bin/ruby
require "webrick"
require "socket"
require "net/http"
require "uri"
class SingleFileServer
def initialize(file_path, additional_mime_types)
@file_path = file_path
@additional_mime_types = FORMATS.merge(additional_mime_types)
end
def start
server.start
end
def shutdown
server.shutdown
end
def url
"http://#{local_ip}:#{port}/"
end
private
attr_reader :file_path, :additional_mime_types
FORMATS = {
["mkv", "mp4", "wmv", "avi", "mov"] => "video/x-msvideo",
["mp3"] => "audio/mpeg"
}
def mime_types
mime_types = WEBrick::HTTPUtils::DefaultMimeTypes
additional_mime_types.each do |file_types, mime_type|
file_types.each do |file_type|
mime_types.store file_type, mime_type
end
end
mime_types
end
def server
@server ||= WEBrick::HTTPServer.new(
:Port => port,
:MimeTypes => mime_types,
:DocumentRoot => file_path
)
end
def port
8888
end
def local_ip
@local_ip ||= UDPSocket.open do |s|
s.connect '8.8.8.8', 1
s.addr.last
end
end
end
class TV
def initialize(control_url)
@soap_client = Soapy.new(
:endpoint => control_url,
:namespace => "urn:schemas-upnp-org:service:AVTransport:1",
:default_request_args => {"InstanceID" => "0"}
)
end
def stop
send_command("Stop")
end
def play
send_command("Play", "Speed" => "1")
end
def play_uri(uri)
stop
set_media_uri(uri)
play
end
private
attr_reader :soap_client
def set_media_uri(uri)
send_command("SetAVTransportURI", "CurrentURI" => uri)
end
def send_command(command, args={})
soap_client.send_command(command, args)
end
end
class Soapy
def initialize(opts={})
@endpoint = URI.parse(opts.fetch(:endpoint))
@namespace = opts.fetch(:namespace)
@default_request_args = opts.fetch(:default_request_args, {})
end
def send_command(command, args={})
post(
{
"SOAPACTION" => %Q{"#{namespace}##{command}"},
"Content-type" => "text/xml"
},
soap_body(command, default_request_args.merge(args))
)
end
private
attr_reader :endpoint, :namespace, :default_request_args
def post(headers, data)
Net::HTTP.new(endpoint.host, endpoint.port).post(endpoint.path, data, headers)
end
def soap_body(command, args)
xml_args = args.map{ |key, value| "<#{key}>#{value}</#{key}>" }.join
%Q{<?xml version="1.0"?>
<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/"
s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
<s:Body>
<u:#{command} xmlns:u="#{namespace}">
#{xml_args}
</u:#{command}>
</s:Body>
</s:Envelope>}
end
end
class Player
def initialize(opts)
@tv = TV.new(opts.fetch(:tv_control_url))
@file_path = opts.fetch(:file_path)
@server = SingleFileServer.new(file_path, opts.fetch(:additional_mime_types, {}))
end
def call
trap_interrupt
play_and_wait
end
private
attr_reader :tv, :file_path, :server
def trap_interrupt
trap 'INT' do
tv.stop
server.shutdown
end
end
def play_and_wait
http_server_thread = Thread.new { server.start }
tv.play_uri(server.url)
http_server_thread.join
end
end
Player.new(
:tv_control_url => "http://192.168.0.16:55000/dmr/control_2",
:file_path => ARGV.first
).call
Add default parameter for additional_mime_types.
#!/usr/bin/ruby
require "webrick"
require "socket"
require "net/http"
require "uri"
class SingleFileServer
def initialize(file_path, additional_mime_types={})
@file_path = file_path
@additional_mime_types = FORMATS.merge(additional_mime_types)
end
def start
server.start
end
def shutdown
server.shutdown
end
def url
"http://#{local_ip}:#{port}/"
end
private
attr_reader :file_path, :additional_mime_types
FORMATS = {
["mkv", "mp4", "wmv", "avi", "mov"] => "video/x-msvideo",
["mp3"] => "audio/mpeg"
}
def mime_types
mime_types = WEBrick::HTTPUtils::DefaultMimeTypes
additional_mime_types.each do |file_types, mime_type|
file_types.each do |file_type|
mime_types.store file_type, mime_type
end
end
mime_types
end
def server
@server ||= WEBrick::HTTPServer.new(
:Port => port,
:MimeTypes => mime_types,
:DocumentRoot => file_path
)
end
def port
8888
end
def local_ip
@local_ip ||= UDPSocket.open do |s|
s.connect '8.8.8.8', 1
s.addr.last
end
end
end
class TV
def initialize(control_url)
@soap_client = Soapy.new(
:endpoint => control_url,
:namespace => "urn:schemas-upnp-org:service:AVTransport:1",
:default_request_args => {"InstanceID" => "0"}
)
end
def stop
send_command("Stop")
end
def play
send_command("Play", "Speed" => "1")
end
def play_uri(uri)
stop
set_media_uri(uri)
play
end
private
attr_reader :soap_client
def set_media_uri(uri)
send_command("SetAVTransportURI", "CurrentURI" => uri)
end
def send_command(command, args={})
soap_client.send_command(command, args)
end
end
class Soapy
def initialize(opts={})
@endpoint = URI.parse(opts.fetch(:endpoint))
@namespace = opts.fetch(:namespace)
@default_request_args = opts.fetch(:default_request_args, {})
end
def send_command(command, args={})
post(
{
"SOAPACTION" => %Q{"#{namespace}##{command}"},
"Content-type" => "text/xml"
},
soap_body(command, default_request_args.merge(args))
)
end
private
attr_reader :endpoint, :namespace, :default_request_args
def post(headers, data)
Net::HTTP.new(endpoint.host, endpoint.port).post(endpoint.path, data, headers)
end
def soap_body(command, args)
xml_args = args.map{ |key, value| "<#{key}>#{value}</#{key}>" }.join
%Q{<?xml version="1.0"?>
<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/"
s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
<s:Body>
<u:#{command} xmlns:u="#{namespace}">
#{xml_args}
</u:#{command}>
</s:Body>
</s:Envelope>}
end
end
class Player
def initialize(opts)
@tv = TV.new(opts.fetch(:tv_control_url))
@file_path = opts.fetch(:file_path)
@server = SingleFileServer.new(file_path, opts.fetch(:additional_mime_types, {}))
end
def call
trap_interrupt
play_and_wait
end
private
attr_reader :tv, :file_path, :server
def trap_interrupt
trap 'INT' do
tv.stop
server.shutdown
end
end
def play_and_wait
http_server_thread = Thread.new { server.start }
tv.play_uri(server.url)
http_server_thread.join
end
end
Player.new(
:tv_control_url => "http://192.168.0.16:55000/dmr/control_2",
:file_path => ARGV.first
).call
|
Create equi.rb
def solution(a)
sum1 = 0
sum2 = 0
index = 1
while index < a.length
sum2 += a[index]
index += 1
end
#puts "sum2: " + sum2.to_s
index = 0
while index < a.length
#puts "index: " + index.to_s + " sum2: " + sum2.to_s
if (sum1 == sum2)
return index
elsif index < (a.length-1)
sum1 += a[index]
sum2 -= a[index+1]
end
index += 1
end
return -1
end
a = [1,2,3,6]
puts "a: " + a.to_s
solution(a)
|
#API access - https://code.google.com/apis/console/b/0/?pli=1#project:380955492060:access
#Configure your app - https://developers.google.com/google-apps/calendar/instantiate
#Calendar sample - http://code.google.com/p/google-api-ruby-client/source/browse/calendar/calendar.rb?repo=samples
require 'rubygems'
require 'google/api_client'
require 'yaml'
require 'pathname'
require 'open-uri'
require 'ri_cal'
require 'logger'
require 'active_support/core_ext/hash/indifferent_access'
module FacebookGoogleCalendarSync
module Logging
require 'logger'
@@logger = Logger.new(STDOUT)
def logger
@@logger
end
end
end
config = YAML.load_file(Pathname.new(ENV['HOME']) + '.facebook-google-calendar-sync' + 'config.yml').with_indifferent_access
class SyncException < StandardError
end
class GoogleCalendar
include FacebookGoogleCalendarSync::Logging
def initialize details, data
@details = details
@data = data
#@log = Logger.new(STDOUT)
end
def self.set_client client
@@client = client
end
def self.find_calendar_by_name calendar_name
target_calendar_details = @@client.find_calendar_details_by_name calendar_name
calendar = @@client.get_calendar target_calendar_details.id
GoogleCalendar.new(target_calendar_details, calendar)
end
def id
@details.id
end
def events
@data.items
end
def find_event_by_uid uid
events.find{ | event | event.i_cal_uid == uid }
end
def has_matching_target_event source_event
find_event_by_uid source_event.uid != nil
end
#returns true if the source_event was newly added,
#false if a matching target_event already existed and was updated
def add_or_update_event source_event
target_event = find_event_by_uid source_event.uid
source_event_hash = ICalToGoogleCalendarConverter.convert(source_event)
if target_event == nil
logger.info "Adding #{source_event.summary} to #{@details.summary}"
@@client.add_event id, source_event_hash
return true
else
if source_event.last_modified.to_time > target_event.updated || source_event.summary == 'Ladies Brunch'
logger.info "Updating #{source_event.summary} in #{@details.summary}"
@@client.update_event id, target_event.id, target_event.to_hash.merge(source_event_hash)
else
logger.info "Not updating #{source_event.summary} in #{@details.summary} as #{source_event.last_modified} is not later than #{target_event.updated}"
end
end
false
end
end
class GoogleCalendarClient
def initialize
oauth_yaml = YAML.load_file(Pathname.new(ENV['HOME']) + '.google-api.yaml')
@client = Google::APIClient.new({:application_name => "Facebook to Google Calendar Sync", :application_version => "0.1.0"})
@client.authorization.client_id = oauth_yaml["client_id"]
@client.authorization.client_secret = oauth_yaml["client_secret"]
@client.authorization.scope = oauth_yaml["scope"]
@client.authorization.refresh_token = oauth_yaml["refresh_token"]
@client.authorization.access_token = oauth_yaml["access_token"]
if @client.authorization.refresh_token && @client.authorization.expired?
@client.authorization.fetch_access_token!
end
@calendar_service = @client.discovered_api('calendar', 'v3')
end
def find_calendar_details_by_name calendar_name
result = @client.execute(:api_method => @calendar_service.calendar_list.list)
result.data.items.find { | calendar | calendar.summary == calendar_name}
end
def get_calendar calendar_id
result = @client.execute(:api_method => @calendar_service.events.list,
:parameters => {'calendarId' => calendar_id})
check_for_success result
result.data
end
def add_event calendar_id, event
result = @client.execute(:api_method => @calendar_service.events.insert,
:parameters => {'calendarId' => calendar_id},
:body_object => event,
:headers => {'Content-Type' => 'application/json'}
)
check_for_success result
result.data
end
def update_event calendar_id, event_id, event
result = @client.execute(:api_method => @calendar_service.events.update,
:parameters => {'calendarId' => calendar_id, 'eventId' => event_id},
:body_object => event,
:headers => {'Content-Type' => 'application/json'}
)
check_for_success result
result.data
end
private
def check_for_success result
raise SyncException.new(result.status.to_s + " " + result.body) unless result.status == 200
end
end
class ICalToGoogleCalendarConverter
def self.convert ical_event
{
'summary' => ical_event.summary,
'start' => date_hash(ical_event.dtstart),
'end' => date_hash(ical_event.dtend),
'iCalUID' => ical_event.uid,
'description' => ical_event.description
}
end
private
def self.date_hash date_time
if date_time.instance_of? Date
{'date' => date_time.strftime('%Y-%m-%d')}
else
{'dateTime' => date_time.strftime('%Y-%m-%dT%H:%M:%S.000%:z')}
end
end
end
source_calendar = open(config[:source_calendar_url]) { | response | components = RiCal.parse(response) }.first
google_calendar_client = GoogleCalendarClient.new
GoogleCalendar.set_client google_calendar_client
my_events_calendar = GoogleCalendar.find_calendar_by_name config[:my_events_calendar_name]
all_events_calendar = GoogleCalendar.find_calendar_by_name config[:all_events_calendar_name]
source_calendar.events.each do | source_event |
is_new_event = all_events_calendar.add_or_update_event source_event
if is_new_event || my_events_calendar.has_matching_target_event(source_event)
my_events_calendar.add_or_update_event source_event
end
end
Moving code to module
#API access - https://code.google.com/apis/console/b/0/?pli=1#project:380955492060:access
#Configure your app - https://developers.google.com/google-apps/calendar/instantiate
#Calendar sample - http://code.google.com/p/google-api-ruby-client/source/browse/calendar/calendar.rb?repo=samples
require 'rubygems'
require 'google/api_client'
require 'yaml'
require 'pathname'
require 'open-uri'
require 'ri_cal'
require 'logger'
require 'active_support/core_ext/hash/indifferent_access'
module FacebookGoogleCalendarSync
module Logging
require 'logger'
@@logger = Logger.new(STDOUT)
def logger
@@logger
end
end
class GoogleCalendar
include FacebookGoogleCalendarSync::Logging
def initialize details, data
@details = details
@data = data
#@log = Logger.new(STDOUT)
end
def self.set_client client
@@client = client
end
def self.find_calendar_by_name calendar_name
target_calendar_details = @@client.find_calendar_details_by_name calendar_name
calendar = @@client.get_calendar target_calendar_details.id
GoogleCalendar.new(target_calendar_details, calendar)
end
def id
@details.id
end
def events
@data.items
end
def find_event_by_uid uid
events.find{ | event | event.i_cal_uid == uid }
end
def has_matching_target_event source_event
find_event_by_uid source_event.uid != nil
end
#returns true if the source_event was newly added,
#false if a matching target_event already existed and was updated
def add_or_update_event source_event
target_event = find_event_by_uid source_event.uid
source_event_hash = ICalToGoogleCalendarConverter.convert(source_event)
if target_event == nil
logger.info "Adding #{source_event.summary} to #{@details.summary}"
@@client.add_event id, source_event_hash
return true
else
if source_event.last_modified.to_time > target_event.updated || source_event.summary == 'Ladies Brunch'
logger.info "Updating #{source_event.summary} in #{@details.summary}"
@@client.update_event id, target_event.id, target_event.to_hash.merge(source_event_hash)
else
logger.info "Not updating #{source_event.summary} in #{@details.summary} as #{source_event.last_modified} is not later than #{target_event.updated}"
end
end
false
end
end
class GoogleCalendarClient
def initialize
oauth_yaml = YAML.load_file(Pathname.new(ENV['HOME']) + '.google-api.yaml')
@client = Google::APIClient.new({:application_name => "Facebook to Google Calendar Sync", :application_version => "0.1.0"})
@client.authorization.client_id = oauth_yaml["client_id"]
@client.authorization.client_secret = oauth_yaml["client_secret"]
@client.authorization.scope = oauth_yaml["scope"]
@client.authorization.refresh_token = oauth_yaml["refresh_token"]
@client.authorization.access_token = oauth_yaml["access_token"]
if @client.authorization.refresh_token && @client.authorization.expired?
@client.authorization.fetch_access_token!
end
@calendar_service = @client.discovered_api('calendar', 'v3')
end
def find_calendar_details_by_name calendar_name
result = @client.execute(:api_method => @calendar_service.calendar_list.list)
result.data.items.find { | calendar | calendar.summary == calendar_name}
end
def get_calendar calendar_id
result = @client.execute(:api_method => @calendar_service.events.list,
:parameters => {'calendarId' => calendar_id})
check_for_success result
result.data
end
def add_event calendar_id, event
result = @client.execute(:api_method => @calendar_service.events.insert,
:parameters => {'calendarId' => calendar_id},
:body_object => event,
:headers => {'Content-Type' => 'application/json'}
)
check_for_success result
result.data
end
def update_event calendar_id, event_id, event
result = @client.execute(:api_method => @calendar_service.events.update,
:parameters => {'calendarId' => calendar_id, 'eventId' => event_id},
:body_object => event,
:headers => {'Content-Type' => 'application/json'}
)
check_for_success result
result.data
end
private
def check_for_success result
raise SyncException.new(result.status.to_s + " " + result.body) unless result.status == 200
end
end
class ICalToGoogleCalendarConverter
def self.convert ical_event
{
'summary' => ical_event.summary,
'start' => date_hash(ical_event.dtstart),
'end' => date_hash(ical_event.dtend),
'iCalUID' => ical_event.uid,
'description' => ical_event.description
}
end
private
def self.date_hash date_time
if date_time.instance_of? Date
{'date' => date_time.strftime('%Y-%m-%d')}
else
{'dateTime' => date_time.strftime('%Y-%m-%dT%H:%M:%S.000%:z')}
end
end
end
class SyncException < StandardError
end
def self.sync config
source_calendar = open(config[:source_calendar_url]) { | response | components = RiCal.parse(response) }.first
google_calendar_client = GoogleCalendarClient.new
GoogleCalendar.set_client google_calendar_client
my_events_calendar = GoogleCalendar.find_calendar_by_name config[:my_events_calendar_name]
all_events_calendar = GoogleCalendar.find_calendar_by_name config[:all_events_calendar_name]
source_calendar.events.each do | source_event |
is_new_event = all_events_calendar.add_or_update_event source_event
if is_new_event || my_events_calendar.has_matching_target_event(source_event)
my_events_calendar.add_or_update_event source_event
end
end
end
end
config = YAML.load_file(Pathname.new(ENV['HOME']) + '.facebook-google-calendar-sync' + 'config.yml').with_indifferent_access
FacebookGoogleCalendarSync.sync config
|
Then /^git repository is clean$/ do
assert { git.status(:porcelain => true) == '' }
end
Then /^git history has (\d+) commit(?:s)?$/ do |ncommits|
assert { git.rev_list(:all => true).lines.count == ncommits.to_i }
end
Then /^I\'m on "(.*?)" branch$/ do |expected_branch|
assert { git.rev_parse({:abbrev_ref => true}, 'HEAD').strip == expected_branch }
end
Then /^no other branch exists$/ do
assert { git.branch.lines.count == 1 }
end
Then /^branch "(.*?)" exists$/ do |branch_name|
assert { git.heads.include?(branch_name) }
end
Then /^branch "(.*?)" does not exist$/ do |branch_name|
deny { git.heads.include?(branch_name) }
end
Then /^tag "(.*?)" exists$/ do |tag_name|
assert { git.tags.include?(tag_name) }
end
Then /^tag "(.*?)" does not exist$/ do |tag_name|
deny { git.tags.include?(tag_name) }
end
Then /^tag matching (#{PATTERN}) exists$/ do |pat|
assert { git.tags.any? { |t| t =~ pat } }
end
Then /^tag matching (#{PATTERN}) does not exist$/ do |pat|
deny { git.tags.any? { |t| t =~ pat } }
end
Then /^there's a git log message including "(.*?)"$/ do |message|
assert { git.log.lines.any? { |ln| ln.include?(message) } }
end
Then /^there's a git commit note including "(.*?)" in "(.*?)"$/ do |value, key|
# Not in the assert block, because it raises an exception on failure.
contains_note = git.notes({:ref => 'vendor'}, 'list').lines.any? do |line|
note = YAML.load git.show(line.split[0])
(note[key] || note[key.to_sym]).to_s.include? value
end
assert { contains_note == true }
end
Then /^branch "(.*?)" exists in the remote repo$/ do |branch_name|
assert { remote_git.heads.include?(branch_name) }
end
Then /^tag "(.*?)" exists in the remote repo$/ do |tag_name|
assert { remote_git.tags.include?(tag_name) }
end
Then /^notes ref "(.*?)" exists in the remote repo$/ do |ref_name|
assert { remote_git.note_refs.include?(ref_name) }
end
Fix Rubinius compatibility.
Then /^git repository is clean$/ do
assert { git.status(:porcelain => true) == '' }
end
Then /^git history has (\d+) commit(?:s)?$/ do |ncommits|
assert { git.rev_list(:all => true).lines.count == ncommits.to_i }
end
Then /^I\'m on "(.*?)" branch$/ do |expected_branch|
assert { git.rev_parse({:abbrev_ref => true}, 'HEAD').strip == expected_branch }
end
Then /^no other branch exists$/ do
assert { git.branch.lines.count == 1 }
end
Then /^branch "(.*?)" exists$/ do |branch_name|
assert { git.heads.include?(branch_name) }
end
Then /^branch "(.*?)" does not exist$/ do |branch_name|
deny { git.heads.include?(branch_name) }
end
Then /^tag "(.*?)" exists$/ do |tag_name|
assert { git.tags.include?(tag_name) }
end
Then /^tag "(.*?)" does not exist$/ do |tag_name|
deny { git.tags.include?(tag_name) }
end
Then /^tag matching (#{PATTERN}) exists$/ do |pat|
assert { git.tags.any? { |t| t =~ pat } }
end
Then /^tag matching (#{PATTERN}) does not exist$/ do |pat|
deny { git.tags.any? { |t| t =~ pat } }
end
Then /^there's a git log message including "(.*?)"$/ do |message|
assert { git.log.lines.any? { |ln| ln.include?(message) } }
end
Then /^there's a git commit note including "(.*?)" in "(.*?)"$/ do |value, key|
# Not in the assert block, because it raises an exception on failure.
contains_note = git.notes({:ref => 'vendor'}, 'list').lines.any? do |line|
note = YAML.load git.show(line.split[0])
(note[key] || note[key.to_sym]).inspect.include? value
end
assert { contains_note == true }
end
Then /^branch "(.*?)" exists in the remote repo$/ do |branch_name|
assert { remote_git.heads.include?(branch_name) }
end
Then /^tag "(.*?)" exists in the remote repo$/ do |tag_name|
assert { remote_git.tags.include?(tag_name) }
end
Then /^notes ref "(.*?)" exists in the remote repo$/ do |ref_name|
assert { remote_git.note_refs.include?(ref_name) }
end
|
require 'spec_helper'
describe 'Creating a plan', type: :request do
context 'with valid params' do
let(:user) { User.create!(name: 'user for test') }
let(:params) do
{
plan: FactoryGirl.attributes_for(:plan).merge(assignables_attributes: [{
assignee_id: user.id, assignee_type: 'User'
}])
}
end
it 'creates a plan successfully' do
post '/task-manager/api/plans', params.merge(format: :json)
response.status.should == 201
end
end
end
Added more details of specs
require 'spec_helper'
describe 'API for creating a plan', type: :request do
context 'with valid params' do
let(:user) { User.create!(name: 'user for test') }
let(:params) do
{
plan: FactoryGirl.attributes_for(:plan).merge(assignables_attributes: [{
assignee_id: user.id, assignee_type: 'User'
}])
}
end
it 'creates a plan successfully' do
post '/task-manager/api/plans', params.merge(format: :json)
response.status.should == 201
end
end
end
|
require File.expand_path('../../spec_helper', __FILE__)
describe Bcsec::Configuration do
before do
@config = blank_config
end
def config_from(&block)
Bcsec::Configuration.new(&block)
end
def blank_config
Bcsec::Configuration.new
end
describe "authorities" do
it "requires at least one" do
lambda { blank_config.authorities }.should raise_error("No authorities configured")
end
it "can be safely accessed if empty" do
blank_config.authorities?.should be_false
end
end
describe "portal" do
it "must be set" do
lambda { @config.portal }.should raise_error("No portal configured")
end
it "is always a symbol" do
@config.portal = "foo"
@config.portal.should == :foo
end
it "can be safely accessed if nil" do
@config.portal?.should be_false
end
end
describe "ui_mode" do
it "defaults to :form" do
@config.ui_mode.should == :form
end
it "is always a symbol" do
@config.ui_mode = "foo"
@config.ui_mode.should == :foo
end
end
describe "api_modes" do
it "defaults to an empty list" do
@config.api_modes.should == []
end
it "is always a list of symbols" do
@config.api_modes = %w(a b c)
@config.api_modes.should == [:a, :b, :c]
end
it "rejects one nil mode" do
@config.api_modes = nil
@config.api_modes.should be_empty
end
it "removes the nil modes from a list" do
@config.api_modes = [:a, nil, :c, nil, nil]
@config.api_modes.should == [:a, :c]
end
end
describe "#logger" do
before do
@captured_stderr = StringIO.new
@real_stderr, $stderr = $stderr, @captured_stderr
end
after do
$stderr = @real_stderr
end
it "defaults to something that prints to stderr" do
@config.logger.info("Hello, world")
@captured_stderr.string.should =~ /Hello, world/
end
it "can be set" do
lambda { @config.logger = Logger.new(STDOUT) }.should_not raise_error
end
end
describe "DSL" do
describe "for basic attributes" do
it "can set the portal" do
config_from { portal :ENU }.portal.should == :ENU
end
it "can set the UI mode" do
config_from { ui_mode :cas }.ui_mode.should == :cas
end
it "can set one API mode" do
config_from { api_mode :basic }.api_modes.should == [:basic]
end
it "can set several API modes" do
config_from { api_modes :basic, :api_key }.api_modes.should == [:basic, :api_key]
end
end
describe "for additional authority parameters" do
it "can set parameters for arbitrary groups" do
config_from { foo_parameters :server => "test.local" }.parameters_for(:foo)[:server].should == "test.local"
end
it "can set (and name) one parameter at a time" do
config_from { foo_parameter :server => "test.bar" }.parameters_for(:foo)[:server].should == "test.bar"
end
it "combines parameters from multiple calls" do
start = config_from { netid_parameters :server => "ldap.foo.edu" }
start.enhance { netid_parameters :username => "arb" }
start.parameters_for(:netid)[:server].should == "ldap.foo.edu"
start.parameters_for(:netid)[:username].should == "arb"
end
end
describe "for authorities" do
it "can configure an authority from a symbol" do
config_from { authority :static }.authorities.first.class.should == Bcsec::Authorities::Static
end
it "can configure an authority from a string" do
config_from { authority "static" }.authorities.first.class.should == Bcsec::Authorities::Static
end
it "can configure an authority from a class" do
config_from { authority Bcsec::Authorities::Static }.authorities.first.class.should == Bcsec::Authorities::Static
end
it "can configure an authority from an instance" do
expected = Object.new
config_from { authority expected }.authorities.first.should == expected
end
it "it passes the configuration to an instantiated authority" do
actual = config_from { authority Struct.new(:config) }
actual.authorities.first.config.should == actual
end
it "defers instantiating the authorities until the configuration is complete" do
config_from {
portal :foo
authority Class.new {
attr_reader :initial_portal
def initialize(config)
@initial_portal = config.portal
end
}
portal :bar
}.authorities.first.initial_portal.should == :bar
end
end
describe "central parameters" do
before do
@actual = config_from { central File.expand_path("../bcsec-sample.yml", __FILE__) }
end
it "acquires the netid parameters" do
@actual.parameters_for(:netid)[:'ldap-servers'].should == ["registry.northwestern.edu"]
end
it "acquires the cc_pers parameters" do
@actual.parameters_for(:pers)[:user].should == "cc_pers_foo"
end
it "acquires the cas parameters" do
@actual.parameters_for(:cas)[:base_url].should == "https://cas.example.edu"
@actual.parameters_for(:cas)[:proxy_retrieval_url].should == "https://cas.example.edu/retrieve_pgt"
@actual.parameters_for(:cas)[:proxy_callback_url].should == "https://cas.example.edu/receive_pgt"
end
it "acquires all top-level parameters" do
@actual.parameters_for(:foo)[:bar].should == "baz"
end
it "adds the username and password to the activerecord configuration block" do
@actual.parameters_for(:pers)[:activerecord][:username].should == "cc_pers_foo"
@actual.parameters_for(:pers)[:activerecord][:password].should == "secret"
end
end
describe "deprecated attribute handling" do
it "warns when setting app_name" do
config_from { app_name "Sammy" }
deprecation_message.should =~
/app_name is unnecessary\. Remove it from your configuration\..*2.2/
end
it "warns when setting authenticator" do
config_from { authenticator :static }
deprecation_message.should =~
/authenticator is deprecated\. Use authority instead\..*2.2/
end
it "passes through the authenticator to authorities" do
config_from { authenticator :static }.authorities.first.class.
should == Bcsec::Authorities::Static
end
it "warns when setting authenticators" do
config_from { authenticators :static }
deprecation_message.should =~
/authenticators is deprecated\. Use authorities instead\..*2.2/
end
it "passes through the authenticators to authorities" do
config_from { authenticators :static }.authorities.first.class.
should == Bcsec::Authorities::Static
end
it "warns when using the :authenticate_only authenticator" do
config_from { authenticator :authenticate_only }
deprecation_message(1).should =~
/The :authenticate_only authenticator is no longer necessary. To prevent the portal access check, don't include a portal in the configuration..*2.2/
end
it "ignores the :authenticate_only authenticator" do
config_from { authenticator :static, :authenticate_only }.authorities.size.should == 1
end
it "warns when using the :mock authenticator" do
config_from { authenticator :mock }
deprecation_message(1).should =~
/The :mock authenticator is now the :static authority. Please update your configuration..*2.2/
end
it "converts the :mock authenticator to the :static authority" do
config_from { authenticator :mock }.authorities.first.class.
should == Bcsec::Authorities::Static
end
it "converts a left-over :mock authority to the :static authority" do
config_from { authority :mock }.authorities.first.class.
should == Bcsec::Authorities::Static
end
it "converts left-over renamed authorities to the new names" do
config_from { authorities :mock }.authorities.first.class.
should == Bcsec::Authorities::Static
end
it "warns when setting ldap_server" do
config_from { ldap_server "ldap.nu.edu" }
deprecation_message.should =~
/ldap_server is deprecated\. Use netid_parameters :server => "ldap.nu.edu" instead\..*2.2/
end
it "passes through ldap_server to netid_parameters" do
config_from { ldap_server "ldap.nu.edu" }.
parameters_for(:netid)[:server].should == "ldap.nu.edu"
end
it "warns when setting ldap_username" do
config_from { ldap_username "cn=joe" }
deprecation_message.should =~
/ldap_username is deprecated\. Use netid_parameters :user => "cn=joe" instead\..*2.2/
end
it "passes through ldap_username to netid_parameters" do
config_from { ldap_username "cn=joe" }.
parameters_for(:netid)[:user].should == "cn=joe"
end
it "warns when setting ldap_password" do
config_from { ldap_password "joesmom" }
deprecation_message.should =~
/ldap_password is deprecated\. Use netid_parameters :password => "joesmom" instead\..*2.2/
end
it "passes through ldap_server to netid_parameters" do
config_from { ldap_password "joesmom" }.
parameters_for(:netid)[:password].should == "joesmom"
end
it "warns when calling establish_cc_pers_connection" do
config_from { establish_cc_pers_connection }
deprecation_message.should =~
/establish_cc_pers_connection is deprecated\. Use pers_parameters :separate_connection => true instead\..*2.2/
end
it "converts establish_cc_pers_connection to a parameters_for(:pers)" do
config_from { establish_cc_pers_connection }.
parameters_for(:pers)[:separate_connection].should be_true
end
describe "in use_cas" do
before do
@config = config_from do
cas_parameters :base_url => "https://cas.example.edu"
use_cas
end
end
it "issues a deprecation warning" do
deprecation_message.should =~ /use_cas is deprecated\. Use api_modes :cas_proxy; ui_mode :cas; authorities :cas instead\..*2.2/
end
it "sets up the CAS UI mode" do
@config.ui_mode.should == :cas
end
it "sets up the CAS proxy API mode" do
@config.api_modes.should == [:cas_proxy]
end
it "sets up the CAS authority" do
@config.authorities.length.should == 1
@config.authorities.first.class.should == Bcsec::Authorities::Cas
end
end
it "fails when given rlogin_target" do
config_from { rlogin_target :foo }
deprecation_message.should =~
/rlogin is no longer supported\..*2.0/
end
it "fails when given rlogin_handler" do
config_from { rlogin_handler :foo }
deprecation_message.should =~
/rlogin is no longer supported\..*2.0/
end
end
end
describe "#enhance" do
it "preserves previous configuration properties" do
config_from { ui_mode :form }.enhance { portal :NOTIS }.ui_mode.should == :form
end
it "sets new configuration properties" do
config_from { ui_mode :form }.enhance { portal :NOTIS }.portal.should == :NOTIS
end
it "overrides repeated configuration properties" do
config_from { portal :NOTIS }.enhance { portal :eNOTIS }.portal.should == :eNOTIS
end
end
describe "#composite_authority" do
it "returns a composite authority for the configured authorities" do
config_from { authorities :static, :static }.composite_authority.authorities.size.should == 2
end
end
end
Cleanup line lengths. Done with #3812.
require File.expand_path('../../spec_helper', __FILE__)
describe Bcsec::Configuration do
before do
@config = blank_config
end
def config_from(&block)
Bcsec::Configuration.new(&block)
end
def blank_config
Bcsec::Configuration.new
end
describe "authorities" do
it "requires at least one" do
lambda { blank_config.authorities }.should raise_error("No authorities configured")
end
it "can be safely accessed if empty" do
blank_config.authorities?.should be_false
end
end
describe "portal" do
it "must be set" do
lambda { @config.portal }.should raise_error("No portal configured")
end
it "is always a symbol" do
@config.portal = "foo"
@config.portal.should == :foo
end
it "can be safely accessed if nil" do
@config.portal?.should be_false
end
end
describe "ui_mode" do
it "defaults to :form" do
@config.ui_mode.should == :form
end
it "is always a symbol" do
@config.ui_mode = "foo"
@config.ui_mode.should == :foo
end
end
describe "api_modes" do
it "defaults to an empty list" do
@config.api_modes.should == []
end
it "is always a list of symbols" do
@config.api_modes = %w(a b c)
@config.api_modes.should == [:a, :b, :c]
end
it "rejects one nil mode" do
@config.api_modes = nil
@config.api_modes.should be_empty
end
it "removes the nil modes from a list" do
@config.api_modes = [:a, nil, :c, nil, nil]
@config.api_modes.should == [:a, :c]
end
end
describe "#logger" do
before do
@captured_stderr = StringIO.new
@real_stderr, $stderr = $stderr, @captured_stderr
end
after do
$stderr = @real_stderr
end
it "defaults to something that prints to stderr" do
@config.logger.info("Hello, world")
@captured_stderr.string.should =~ /Hello, world/
end
it "can be set" do
lambda { @config.logger = Logger.new(STDOUT) }.should_not raise_error
end
end
describe "DSL" do
describe "for basic attributes" do
it "can set the portal" do
config_from { portal :ENU }.portal.should == :ENU
end
it "can set the UI mode" do
config_from { ui_mode :cas }.ui_mode.should == :cas
end
it "can set one API mode" do
config_from { api_mode :basic }.api_modes.should == [:basic]
end
it "can set several API modes" do
config_from { api_modes :basic, :api_key }.api_modes.should == [:basic, :api_key]
end
end
describe "for additional authority parameters" do
it "can set parameters for arbitrary groups" do
config_from { foo_parameters :server => "test.local" }.
parameters_for(:foo)[:server].should == "test.local"
end
it "can set (and name) one parameter at a time" do
config_from { foo_parameter :server => "test.bar" }.
parameters_for(:foo)[:server].should == "test.bar"
end
it "combines parameters from multiple calls" do
start = config_from { netid_parameters :server => "ldap.foo.edu" }
start.enhance { netid_parameters :username => "arb" }
start.parameters_for(:netid)[:server].should == "ldap.foo.edu"
start.parameters_for(:netid)[:username].should == "arb"
end
end
describe "for authorities" do
it "can configure an authority from a symbol" do
config_from { authority :static }.authorities.first.class.should == Bcsec::Authorities::Static
end
it "can configure an authority from a string" do
config_from { authority "static" }.authorities.first.class.
should == Bcsec::Authorities::Static
end
it "can configure an authority from a class" do
config_from { authority Bcsec::Authorities::Static }.authorities.first.class.
should == Bcsec::Authorities::Static
end
it "can configure an authority from an instance" do
expected = Object.new
config_from { authority expected }.authorities.first.should == expected
end
it "it passes the configuration to an instantiated authority" do
actual = config_from { authority Struct.new(:config) }
actual.authorities.first.config.should == actual
end
it "defers instantiating the authorities until the configuration is complete" do
config_from {
portal :foo
authority Class.new {
attr_reader :initial_portal
def initialize(config)
@initial_portal = config.portal
end
}
portal :bar
}.authorities.first.initial_portal.should == :bar
end
end
describe "central parameters" do
before do
@actual = config_from { central File.expand_path("../bcsec-sample.yml", __FILE__) }
end
it "acquires the netid parameters" do
@actual.parameters_for(:netid)[:'ldap-servers'].should == ["registry.northwestern.edu"]
end
it "acquires the cc_pers parameters" do
@actual.parameters_for(:pers)[:user].should == "cc_pers_foo"
end
it "acquires the cas parameters" do
@actual.parameters_for(:cas)[:base_url].should == "https://cas.example.edu"
@actual.parameters_for(:cas)[:proxy_retrieval_url].
should == "https://cas.example.edu/retrieve_pgt"
@actual.parameters_for(:cas)[:proxy_callback_url].
should == "https://cas.example.edu/receive_pgt"
end
it "acquires all top-level parameters" do
@actual.parameters_for(:foo)[:bar].should == "baz"
end
it "adds the username and password to the activerecord configuration block" do
@actual.parameters_for(:pers)[:activerecord][:username].should == "cc_pers_foo"
@actual.parameters_for(:pers)[:activerecord][:password].should == "secret"
end
end
describe "deprecated attribute handling" do
it "warns when setting app_name" do
config_from { app_name "Sammy" }
deprecation_message.should =~
/app_name is unnecessary\. Remove it from your configuration\..*2.2/
end
it "warns when setting authenticator" do
config_from { authenticator :static }
deprecation_message.should =~
/authenticator is deprecated\. Use authority instead\..*2.2/
end
it "passes through the authenticator to authorities" do
config_from { authenticator :static }.authorities.first.class.
should == Bcsec::Authorities::Static
end
it "warns when setting authenticators" do
config_from { authenticators :static }
deprecation_message.should =~
/authenticators is deprecated\. Use authorities instead\..*2.2/
end
it "passes through the authenticators to authorities" do
config_from { authenticators :static }.authorities.first.class.
should == Bcsec::Authorities::Static
end
it "warns when using the :authenticate_only authenticator" do
config_from { authenticator :authenticate_only }
deprecation_message(1).should =~
/The :authenticate_only authenticator is no longer necessary. To prevent the portal access check, don't include a portal in the configuration..*2.2/
end
it "ignores the :authenticate_only authenticator" do
config_from { authenticator :static, :authenticate_only }.authorities.size.should == 1
end
it "warns when using the :mock authenticator" do
config_from { authenticator :mock }
deprecation_message(1).should =~
/The :mock authenticator is now the :static authority. Please update your configuration..*2.2/
end
it "converts the :mock authenticator to the :static authority" do
config_from { authenticator :mock }.authorities.first.class.
should == Bcsec::Authorities::Static
end
it "converts a left-over :mock authority to the :static authority" do
config_from { authority :mock }.authorities.first.class.
should == Bcsec::Authorities::Static
end
it "converts left-over renamed authorities to the new names" do
config_from { authorities :mock }.authorities.first.class.
should == Bcsec::Authorities::Static
end
it "warns when setting ldap_server" do
config_from { ldap_server "ldap.nu.edu" }
deprecation_message.should =~
/ldap_server is deprecated\. Use netid_parameters :server => "ldap.nu.edu" instead\..*2.2/
end
it "passes through ldap_server to netid_parameters" do
config_from { ldap_server "ldap.nu.edu" }.
parameters_for(:netid)[:server].should == "ldap.nu.edu"
end
it "warns when setting ldap_username" do
config_from { ldap_username "cn=joe" }
deprecation_message.should =~
/ldap_username is deprecated\. Use netid_parameters :user => "cn=joe" instead\..*2.2/
end
it "passes through ldap_username to netid_parameters" do
config_from { ldap_username "cn=joe" }.
parameters_for(:netid)[:user].should == "cn=joe"
end
it "warns when setting ldap_password" do
config_from { ldap_password "joesmom" }
deprecation_message.should =~
/ldap_password is deprecated\. Use netid_parameters :password => "joesmom" instead\..*2.2/
end
it "passes through ldap_server to netid_parameters" do
config_from { ldap_password "joesmom" }.
parameters_for(:netid)[:password].should == "joesmom"
end
it "warns when calling establish_cc_pers_connection" do
config_from { establish_cc_pers_connection }
deprecation_message.should =~
/establish_cc_pers_connection is deprecated\. Use pers_parameters :separate_connection => true instead\..*2.2/
end
it "converts establish_cc_pers_connection to a parameters_for(:pers)" do
config_from { establish_cc_pers_connection }.
parameters_for(:pers)[:separate_connection].should be_true
end
describe "in use_cas" do
before do
@config = config_from do
cas_parameters :base_url => "https://cas.example.edu"
use_cas
end
end
it "issues a deprecation warning" do
deprecation_message.should =~
/use_cas is deprecated\. Use api_modes :cas_proxy; ui_mode :cas; authorities :cas instead\..*2.2/
end
it "sets up the CAS UI mode" do
@config.ui_mode.should == :cas
end
it "sets up the CAS proxy API mode" do
@config.api_modes.should == [:cas_proxy]
end
it "sets up the CAS authority" do
@config.authorities.length.should == 1
@config.authorities.first.class.should == Bcsec::Authorities::Cas
end
end
it "fails when given rlogin_target" do
config_from { rlogin_target :foo }
deprecation_message.should =~
/rlogin is no longer supported\..*2.0/
end
it "fails when given rlogin_handler" do
config_from { rlogin_handler :foo }
deprecation_message.should =~
/rlogin is no longer supported\..*2.0/
end
end
end
describe "#enhance" do
it "preserves previous configuration properties" do
config_from { ui_mode :form }.enhance { portal :NOTIS }.ui_mode.should == :form
end
it "sets new configuration properties" do
config_from { ui_mode :form }.enhance { portal :NOTIS }.portal.should == :NOTIS
end
it "overrides repeated configuration properties" do
config_from { portal :NOTIS }.enhance { portal :eNOTIS }.portal.should == :eNOTIS
end
end
describe "#composite_authority" do
it "returns a composite authority for the configured authorities" do
config_from { authorities :static, :static }.composite_authority.authorities.size.should == 2
end
end
end
|
require 'bipbip'
require 'bipbip/plugin/redis'
describe Bipbip::Plugin::Redis do
let(:plugin) { Bipbip::Plugin::Redis.new('mysql', {'hostname' => 'localhost', 'port' => 6379}, 10) }
it 'should collect data' do
data = plugin.monitor
data['total_commands_processed'].should be_instance_of(Fixnum)
data['used_memory'].should be_instance_of(Fixnum)
end
end
name should be redis, not mysql
require 'bipbip'
require 'bipbip/plugin/redis'
describe Bipbip::Plugin::Redis do
let(:plugin) { Bipbip::Plugin::Redis.new('redis', {'hostname' => 'localhost', 'port' => 6379}, 10) }
it 'should collect data' do
data = plugin.monitor
data['total_commands_processed'].should be_instance_of(Fixnum)
data['used_memory'].should be_instance_of(Fixnum)
end
end
|
require File.expand_path('../../spec_helper', __FILE__)
describe "Builder#output_path" do
describe "with an output dir" do
it "should return output dir joined with source when base is '.'" do
b = Opal::Builder.new '', :out => 'build'
b.output_path('.', 'foo.rb').should == 'build/foo.js'
b.output_path('.', 'bar.rb').should == 'build/bar.js'
end
it "returns the parts joined but with first part of base removed" do
b = Opal::Builder.new '', :out => 'build'
b.output_path('lib', 'foo.rb').should == 'build/foo.js'
b.output_path('lib/foo', 'bar.rb').should == 'build/foo/bar.js'
b.output_path('lib/foo/bar', 'baz.rb').should == 'build/foo/bar/baz.js'
end
end
describe "without an output dir" do
it "should return base and source joined with .js extname" do
b = Opal::Builder.new '', :out => ''
b.output_path('.', 'foo.rb').should == 'foo.js'
b.output_path('lib', 'foo.rb').should == 'lib/foo.js'
b.output_path('lib/foo', 'bar.rb').should == 'lib/foo/bar.js'
end
it "supports '.' as output dir" do
b = Opal::Builder.new '', :out => '.'
b.output_path('.', 'foo.rb').should == 'foo.js'
b.output_path('lib', 'foo.rb').should == 'lib/foo.js'
b.output_path('lib/foo', 'bar.rb').should == 'lib/foo/bar.js'
end
it "supports nil as output dir" do
b = Opal::Builder.new '', :out => nil
b.output_path('.', 'foo.rb').should == 'foo.js'
b.output_path('lib', 'foo.rb').should == 'lib/foo.js'
b.output_path('lib/foo', 'bar.rb').should == 'lib/foo/bar.js'
end
it "supports false as output dir" do
b = Opal::Builder.new '', :out => false
b.output_path('.', 'foo.rb').should == 'foo.js'
b.output_path('lib', 'foo.rb').should == 'lib/foo.js'
b.output_path('lib/foo', 'bar.rb').should == 'lib/foo/bar.js'
end
end
end
Remove output_path_spec as now redundant
|
require 'spec_helper'
classes = {
'fastscripts' => 'http://www.red-sweater.com/fastscripts/FastScripts2.6.5.zip',
}
classes.each do | version, source |
describe version do
it do
should contain_package("FastScripts").with({:source => source,
:provider => "compressed_app"})
end
end
end
Clean up fastscripts_spec.rb a little
require 'spec_helper'
source_url = 'http://www.red-sweater.com/fastscripts/FastScripts2.6.5.zip'
describe 'fastscripts' do
it do
should contain_class('fastscripts')
should contain_package('FastScripts').with({:source => source_url,
:provider => 'compressed_app'})
end
end
|
require 'spec_helper'
describe 'sensu' do
let(:facts) { { :fqdn => 'testhost.domain.com', :osfamily => 'RedHat' } }
context 'redis config' do
context 'default settings' do
it { should contain_sensu_redis_config('testhost.domain.com').with(
:host => '127.0.0.1',
:port => 6379,
:db => 0,
:auto_reconnect => true
)}
end # default settings
context 'be configurable without sentinels' do
let(:params) { {
:redis_host => 'redis.domain.com',
:redis_port => 1234,
:redis_password => 'password',
:redis_db => 1,
:redis_auto_reconnect => false
} }
it { should contain_sensu_redis_config('testhost.domain.com').with(
:host => 'redis.domain.com',
:port => 1234,
:password => 'password',
:db => 1,
:auto_reconnect => false,
:sentinels => nil,
:master => nil
)}
end # be configurable without sentinels
context 'be configurable with sentinels' do
let(:params) { {
:redis_password => 'password',
:redis_db => 1,
:redis_auto_reconnect => false,
:redis_sentinels => [{
'host' => 'redis1.domain.com',
'port' => 1234
}, {
'host' => 'redis2.domain.com',
'port' => '5678'
}],
:redis_master => 'master-name'
} }
it { should contain_sensu_redis_config('testhost.domain.com').with(
:host => nil,
:port => nil,
:password => 'password',
:db => 1,
:auto_reconnect => false,
:sentinels => [{
'host' => 'redis1.domain.com',
'port' => 1234
}, {
'host' => 'redis2.domain.com',
'port' => 5678
}],
:master => "master-name"
)}
end # be configurable with sentinels
context 'with server' do
let(:params) { { :server => true } }
it do
should contain_file('/etc/sensu/conf.d/redis.json').with(
:ensure => 'present',
:owner => 'sensu',
:group => 'sensu',
:mode => '0440'
).that_comes_before("Sensu_redis_config[#{facts[:fqdn]}]")
end
end # with server
context 'with api' do
let(:params) { { :api => true } }
it do
should contain_file('/etc/sensu/conf.d/redis.json').with(
:ensure => 'present',
:owner => 'sensu',
:group => 'sensu',
:mode => '0440'
).that_comes_before("Sensu_redis_config[#{facts[:fqdn]}]")
end
end # with api
context 'purge configs' do
let(:params) { {
:purge => { 'config' => true },
:server => false,
:api => false,
} }
it { should contain_file('/etc/sensu/conf.d/redis.json').with_ensure('absent') }
end # purge configs
end #redis config
end
Pin the package provider for RedHat osfamily
Local tests fail when run on a Mac. This commit forces the package
provider to use yum. Since we're already assuming the osfamily is RedHat
and the default package provider for RedHat is yum, side effects from
this change should be minimal. And local tests will pass on a Mac.
Original error:
`Parameter ensure failed on Package[sensu]: Provider must have features
'upgradeable' to set 'ensure' to 'latest'`
require 'spec_helper'
describe 'sensu' do
let(:facts) { { :fqdn => 'testhost.domain.com', :osfamily => 'RedHat' } }
let(:pre_condition) { 'Package{ provider => "yum"}' }
context 'redis config' do
context 'default settings' do
it { should contain_sensu_redis_config('testhost.domain.com').with(
:host => '127.0.0.1',
:port => 6379,
:db => 0,
:auto_reconnect => true
)}
end # default settings
context 'be configurable without sentinels' do
let(:params) { {
:redis_host => 'redis.domain.com',
:redis_port => 1234,
:redis_password => 'password',
:redis_db => 1,
:redis_auto_reconnect => false
} }
it { should contain_sensu_redis_config('testhost.domain.com').with(
:host => 'redis.domain.com',
:port => 1234,
:password => 'password',
:db => 1,
:auto_reconnect => false,
:sentinels => nil,
:master => nil
)}
end # be configurable without sentinels
context 'be configurable with sentinels' do
let(:params) { {
:redis_password => 'password',
:redis_db => 1,
:redis_auto_reconnect => false,
:redis_sentinels => [{
'host' => 'redis1.domain.com',
'port' => 1234
}, {
'host' => 'redis2.domain.com',
'port' => '5678'
}],
:redis_master => 'master-name'
} }
it { should contain_sensu_redis_config('testhost.domain.com').with(
:host => nil,
:port => nil,
:password => 'password',
:db => 1,
:auto_reconnect => false,
:sentinels => [{
'host' => 'redis1.domain.com',
'port' => 1234
}, {
'host' => 'redis2.domain.com',
'port' => 5678
}],
:master => "master-name"
)}
end # be configurable with sentinels
context 'with server' do
let(:params) { { :server => true } }
it do
should contain_file('/etc/sensu/conf.d/redis.json').with(
:ensure => 'present',
:owner => 'sensu',
:group => 'sensu',
:mode => '0440'
).that_comes_before("Sensu_redis_config[#{facts[:fqdn]}]")
end
end # with server
context 'with api' do
let(:params) { { :api => true } }
it do
should contain_file('/etc/sensu/conf.d/redis.json').with(
:ensure => 'present',
:owner => 'sensu',
:group => 'sensu',
:mode => '0440'
).that_comes_before("Sensu_redis_config[#{facts[:fqdn]}]")
end
end # with api
context 'purge configs' do
let(:params) { {
:purge => { 'config' => true },
:server => false,
:api => false,
} }
it { should contain_file('/etc/sensu/conf.d/redis.json').with_ensure('absent') }
end # purge configs
end #redis config
end
|
require 'fileutils'
class NDSFileSystem
class InvalidFileError < StandardError ; end
class ConversionError < StandardError ; end
class OffsetPastEndOfFileError < StandardError ; end
class FileExpandError < StandardError ; end
attr_reader :files,
:files_by_path,
:files_by_index,
:overlays,
:rom
def open_directory(filesystem_directory)
@filesystem_directory = filesystem_directory
input_rom_path = "#{@filesystem_directory}/ftc/rom.nds"
@rom = File.open(input_rom_path, "rb") {|file| file.read}
read_from_rom()
@files.each do |id, file|
next unless file[:type] == :file
file[:size] = File.size(File.join(@filesystem_directory, file[:file_path]))
file[:end_offset] = file[:start_offset] + file[:size]
end
get_file_ram_start_offsets_and_file_data_types()
end
def open_and_extract_rom(input_rom_path, filesystem_directory)
@filesystem_directory = filesystem_directory
@rom = File.open(input_rom_path, "rb") {|file| file.read}
read_from_rom()
extract_to_hard_drive()
get_file_ram_start_offsets_and_file_data_types()
end
def open_rom(input_rom_path)
@filesystem_directory = nil
@rom = File.open(input_rom_path, "rb") {|file| file.read}
read_from_rom()
extract_to_memory()
get_file_ram_start_offsets_and_file_data_types()
end
def write_to_rom(output_rom_path)
print "Writing files to #{output_rom_path}... "
new_rom = @rom.dup
expanded_files = []
max_written_address = 0
files_written = 0
files_without_dirs.sort_by{|id, file| id}.each do |id, file|
file_data = get_file_data_from_opened_files_cache(file[:file_path])
new_file_size = file_data.length
offset = file[:id]*8
old_start_offset, old_end_offset = @rom[@file_allocation_table_offset+offset, 8].unpack("VV")
old_size = old_end_offset - old_start_offset
if new_file_size > old_size
expanded_files << file
next
end
new_start_offset = old_start_offset
new_end_offset = new_start_offset + new_file_size
new_rom[new_start_offset,new_file_size] = file_data
offset = file[:id]*8
new_rom[@file_allocation_table_offset+offset, 8] = [new_start_offset, new_end_offset].pack("VV")
max_written_address = new_end_offset if new_end_offset > max_written_address
# Update the lengths of changed overlay files.
if file[:overlay_id]
offset = file[:overlay_id] * 32
new_rom[@arm9_overlay_table_offset+offset+8, 4] = [new_file_size].pack("V")
end
files_written += 1
if block_given?
yield(files_written)
end
end
expanded_files.each do |file|
file_data = get_file_data_from_opened_files_cache(file[:file_path])
new_file_size = file_data.length
new_start_offset = max_written_address
new_end_offset = new_start_offset + new_file_size
new_rom[new_start_offset,new_file_size] = file_data
offset = file[:id]*8
new_rom[@file_allocation_table_offset+offset, 8] = [new_start_offset, new_end_offset].pack("VV")
max_written_address = new_end_offset if new_end_offset > max_written_address
# Update the lengths of changed overlay files.
if file[:overlay_id]
offset = file[:overlay_id] * 32
new_rom[@arm9_overlay_table_offset+offset+8, 4] = [new_file_size].pack("V")
end
files_written += 1
if block_given?
yield(files_written)
end
end
# Update arm9
file = @extra_files.find{|file| file[:name] == "arm9.bin"}
file_data = get_file_data_from_opened_files_cache(file[:file_path])
new_file_size = file_data.length
if @arm9_size != new_file_size
raise "ARM9 changed size"
end
new_rom[file[:start_offset], file[:size]] = file_data
File.open(output_rom_path, "wb") do |f|
f.write(new_rom)
end
puts "Done"
end
def all_files
@files.values + @extra_files
end
def print_files
@files.each do |id, file|
puts "%02X" % id
puts file.inspect
gets
end
end
def load_overlay(overlay_id)
overlay = @overlays[overlay_id]
load_file(overlay)
end
def load_file(file)
@currently_loaded_files[file[:ram_start_offset]] = file
end
def convert_ram_address_to_path_and_offset(ram_address)
@currently_loaded_files.each do |ram_start_offset, file|
ram_range = (file[:ram_start_offset]..file[:ram_start_offset]+file[:size]-1)
if ram_range.include?(ram_address)
offset_in_file = ram_address - file[:ram_start_offset]
return [file[:file_path], offset_in_file]
end
end
str = ""
@currently_loaded_files.each do |ram_start_offset, file|
if file[:overlay_id]
str << "\n overlay loaded: %02d" % file[:overlay_id]
end
str << "\n ram_range: %08X..%08X" % [file[:ram_start_offset], file[:ram_start_offset]+file[:size]]
str << "\n rom_start: %08X" % file[:start_offset]
end
raise ConversionError.new("Failed to convert ram address to rom address: %08X. #{str}" % ram_address)
end
def read(ram_address, length=1, options={})
file_path, offset_in_file = convert_ram_address_to_path_and_offset(ram_address)
return read_by_file(file_path, offset_in_file, length, options)
end
def read_by_file(file_path, offset_in_file, length, options={})
file = files_by_path[file_path]
if options[:allow_length_to_exceed_end_of_file]
max_offset = offset_in_file
else
max_offset = offset_in_file + length
end
if max_offset > file[:size]
if options[:allow_reading_into_next_file_in_ram] && file[:ram_start_offset]
next_file_in_ram = find_file_by_ram_start_offset(file[:ram_start_offset]+0xC)
if next_file_in_ram
return read_by_file(next_file_in_ram[:file_path], offset_in_file - file[:size], length, options=options)
end
end
raise OffsetPastEndOfFileError.new("Offset %08X (length %08X) is past end of file #{file_path} (%08X bytes long)" % [offset_in_file, length, file[:size]])
end
file_data = get_file_data_from_opened_files_cache(file_path)
return file_data[offset_in_file, length]
end
def read_until_end_marker(ram_address, end_markers)
file_path, offset_in_file = convert_ram_address_to_path_and_offset(ram_address)
file_data = get_file_data_from_opened_files_cache(file_path)
substring = file_data[offset_in_file..-1]
end_index = substring.index(end_markers.pack("C*"))
return substring[0,end_index]
end
def write(ram_address, new_data)
file_path, offset_in_file = convert_ram_address_to_path_and_offset(ram_address)
write_by_file(file_path, offset_in_file, new_data)
end
def write_by_file(file_path, offset_in_file, new_data)
file = files_by_path[file_path]
if offset_in_file + new_data.length > file[:size]
raise OffsetPastEndOfFileError.new("Offset %08X is past end of file #{file_path} (%08X bytes long)" % [offset_in_file, file[:size]])
end
file_data = get_file_data_from_opened_files_cache(file_path)
file_data[offset_in_file, new_data.length] = new_data
@opened_files_cache[file_path] = file_data
@uncommitted_files << file_path
end
def find_file_by_ram_start_offset(ram_start_offset)
files.values.find do |file|
file[:type] == :file && file[:ram_start_offset] == ram_start_offset
end
end
def commit_file_changes(base_directory = @filesystem_directory)
print "Committing changes to filesystem... "
@uncommitted_files.each do |file_path|
file_data = get_file_data_from_opened_files_cache(file_path)
full_path = File.join(base_directory, file_path)
full_dir = File.dirname(full_path)
FileUtils.mkdir_p(full_dir)
File.open(full_path, "wb") do |f|
f.write(file_data)
end
end
@uncommitted_files = []
puts "Done."
end
def has_uncommitted_files?
!@uncommitted_files.empty?
end
def expand_file_and_get_end_of_file_ram_address(ram_address, length_to_expand_by)
file_path, offset_in_file = convert_ram_address_to_path_and_offset(ram_address)
file = @currently_loaded_files.values.find{|file| file[:file_path] == file_path}
if file[:overlay_id] && ROOM_OVERLAYS.include?(file[:overlay_id]) && file[:size] + length_to_expand_by > MAX_ALLOWABLE_ROOM_OVERLAY_SIZE
raise FileExpandError.new("Failed to expand room overlay #{file[:overlay_id]} to #{file[:size] + length_to_expand_by} bytes because that is larger than the maximum size a room overlay can be (#{MAX_ALLOWABLE_ROOM_OVERLAY_SIZE} bytes).")
end
old_size = file[:size]
file[:size] += length_to_expand_by
# Expand the actual file data string, and fill it with 0 bytes.
write_by_file(file_path, old_size, "\0"*length_to_expand_by)
return file[:ram_start_offset] + old_size
end
def files_without_dirs
files.select{|id, file| file[:type] == :file}
end
private
def read_from_rom
@game_name = @rom[0x00,12]
raise InvalidFileError.new("Not a DSVania") unless %w(CASTLEVANIA1 CASTLEVANIA2 CASTLEVANIA3).include?(@game_name)
@game_code = @rom[0x0C,4]
raise InvalidFileError.new("Only the North American versions are supported") unless %w(ACVE ACBE YR9E).include?(@game_code)
@arm9_rom_offset, @arm9_entry_address, @arm9_ram_offset, @arm9_size = @rom[0x20,16].unpack("VVVV")
@arm7_rom_offset, @arm7_entry_address, @arm7_ram_offset, @arm7_size = @rom[0x30,16].unpack("VVVV")
@file_name_table_offset, @file_name_table_size, @file_allocation_table_offset, @file_allocation_table_size = @rom[0x40,16].unpack("VVVV")
@arm9_overlay_table_offset, @arm9_overlay_table_size = @rom[0x50,8].unpack("VV")
@arm7_overlay_table_offset, @arm7_overlay_table_size = @rom[0x58,8].unpack("VV")
@banner_start_offset = @rom[0x68,4].unpack("V").first
@banner_end_offset = @banner_start_offset + 0x840 # ??
@files = {}
@overlays = []
@currently_loaded_files = {}
@opened_files_cache = {}
@uncommitted_files = []
get_file_name_table()
get_overlay_table()
get_file_allocation_table()
get_extra_files()
generate_file_paths()
CONSTANT_OVERLAYS.each do |overlay_index|
load_overlay(overlay_index)
end
end
def extract_to_hard_drive
print "Extracting files from ROM... "
all_files.each do |file|
next unless file[:type] == :file
#next unless (file[:overlay_id] || file[:name] == "arm9.bin" || file[:name] == "rom.nds")
start_offset, end_offset, file_path = file[:start_offset], file[:end_offset], file[:file_path]
file_data = @rom[start_offset..end_offset-1]
output_path = File.join(@filesystem_directory, file_path)
output_dir = File.dirname(output_path)
FileUtils.mkdir_p(output_dir)
File.open(output_path, "wb") do |f|
f.write(file_data)
end
end
puts "Done."
end
def extract_to_memory
print "Extracting files from ROM to memory... "
all_files.each do |file|
next unless file[:type] == :file
start_offset, end_offset, file_path = file[:start_offset], file[:end_offset], file[:file_path]
file_data = @rom[start_offset..end_offset-1]
@opened_files_cache[file_path] = file_data
end
puts "Done."
end
def get_file_data_from_opened_files_cache(file_path)
if @opened_files_cache[file_path]
file_data = @opened_files_cache[file_path]
else
path = File.join(@filesystem_directory, file_path)
file_data = File.open(path, "rb") {|file| file.read}
@opened_files_cache[file_path] = file_data
end
return file_data
end
def get_file_name_table
file_name_table_data = @rom[@file_name_table_offset, @file_name_table_size]
subtable_offset, subtable_first_file_id, number_of_dirs = file_name_table_data[0x00,8].unpack("Vvv")
get_file_name_subtable(subtable_offset, subtable_first_file_id, 0xF000)
i = 1
while i < number_of_dirs
subtable_offset, subtable_first_file_id, parent_dir_id = file_name_table_data[0x00+i*8,8].unpack("Vvv")
get_file_name_subtable(subtable_offset, subtable_first_file_id, 0xF000 + i)
i += 1
end
end
def get_file_name_subtable(subtable_offset, subtable_first_file_id, parent_dir_id)
i = 0
offset = @file_name_table_offset + subtable_offset
next_file_id = subtable_first_file_id
while true
length = @rom[offset,1].unpack("C*").first
offset += 1
case length
when 0x01..0x7F
type = :file
name = @rom[offset,length]
offset += length
id = next_file_id
next_file_id += 1
when 0x81..0xFF
type = :subdir
length = length & 0x7F
name = @rom[offset,length]
offset += length
id = @rom[offset,2].unpack("v").first
offset += 2
when 0x00
# end of subtable
break
when 0x80
# reserved
break
end
@files[id] = {:name => name, :type => type, :parent_id => parent_dir_id, :id => id}
i += 1
end
end
def get_overlay_table
overlay_table_data = @rom[@arm9_overlay_table_offset, @arm9_overlay_table_size]
offset = 0x00
while offset < @arm9_overlay_table_size
overlay_id, overlay_ram_address, overlay_size, _, _, _, file_id, _ = overlay_table_data[0x00+offset,32].unpack("V*")
@files[file_id] = {:name => "overlay9_#{overlay_id}", :type => :file, :id => file_id, :overlay_id => overlay_id, :ram_start_offset => overlay_ram_address, :size => overlay_size}
@overlays << @files[file_id]
offset += 32
end
end
def get_file_ram_start_offsets_and_file_data_types
@files_by_index = []
offset = LIST_OF_FILE_RAM_LOCATIONS_START_OFFSET
while offset < LIST_OF_FILE_RAM_LOCATIONS_END_OFFSET
file_data = read(offset, LIST_OF_FILE_RAM_LOCATIONS_ENTRY_LENGTH)
ram_start_offset = file_data[0..3].unpack("V").first
file_data_type = file_data[4..5].unpack("v").first
file_path = file_data[6..-1]
file_path = file_path.delete("\x00") # Remove null bytes padding the end of the string
file = files_by_path[file_path]
file[:ram_start_offset] = ram_start_offset
file[:file_data_type] = file_data_type
@files_by_index << file
offset += LIST_OF_FILE_RAM_LOCATIONS_ENTRY_LENGTH
end
if GAME == "por"
# Richter's gfx files don't have a ram offset stored in the normal place.
i = 0
files.values.each do |file|
if file[:ram_start_offset] == 0 && file[:file_path] =~ /\/sc2\/s0_ri_..\.dat/
file[:ram_start_offset] = read(RICHTERS_LIST_OF_GFX_POINTERS + i*4, 4).unpack("V").first
i += 1
end
end
end
end
def get_file_allocation_table
file_allocation_table_data = @rom[@file_allocation_table_offset, @file_allocation_table_size]
id = 0x00
offset = 0x00
while offset < @file_allocation_table_size
@files[id][:start_offset], @files[id][:end_offset] = file_allocation_table_data[offset,8].unpack("VV")
@files[id][:size] = @files[id][:end_offset] - @files[id][:start_offset]
id += 1
offset += 0x08
end
end
def get_extra_files
@extra_files = []
@extra_files << {:name => "ndsheader.bin", :type => :file, :start_offset => 0x0, :end_offset => 0x4000}
arm9_file = {:name => "arm9.bin", :type => :file, :start_offset => @arm9_rom_offset, :end_offset => @arm9_rom_offset + @arm9_size, :ram_start_offset => @arm9_ram_offset, :size => @arm9_size}
@extra_files << arm9_file
load_file(arm9_file)
@extra_files << {:name => "arm7.bin", :type => :file, :start_offset => @arm7_rom_offset, :end_offset => @arm7_rom_offset + @arm7_size}
@extra_files << {:name => "arm9_overlay_table.bin", :type => :file, :start_offset => @arm9_overlay_table_offset, :end_offset => @arm9_overlay_table_offset + @arm9_overlay_table_size}
@extra_files << {:name => "arm7_overlay_table.bin", :type => :file, :start_offset => @arm7_overlay_table_offset, :end_offset => @arm7_overlay_table_offset + @arm7_overlay_table_size}
@extra_files << {:name => "fnt.bin", :type => :file, :start_offset => @file_name_table_offset, :end_offset => @file_name_table_offset + @file_name_table_size}
@extra_files << {:name => "fat.bin", :type => :file, :start_offset => @file_allocation_table_offset, :end_offset => @file_allocation_table_offset + @file_allocation_table_size}
@extra_files << {:name => "banner.bin", :type => :file, :start_offset => @banner_start_offset, :end_offset => @banner_end_offset}
@extra_files << {:name => "rom.nds", :type => :file, :start_offset => 0, :end_offset => @rom.length}
end
def generate_file_paths
@files_by_path = {}
all_files.each do |file|
if file[:parent_id] == 0xF000
file[:file_path] = file[:name]
elsif file[:parent_id].nil?
file[:file_path] = File.join("/ftc", file[:name])
else
file[:file_path] = "/" + File.join(@files[file[:parent_id]][:name], file[:name])
end
@files_by_path[file[:file_path]] = file
end
end
end
Clarify error
require 'fileutils'
class NDSFileSystem
class InvalidFileError < StandardError ; end
class ConversionError < StandardError ; end
class OffsetPastEndOfFileError < StandardError ; end
class FileExpandError < StandardError ; end
attr_reader :files,
:files_by_path,
:files_by_index,
:overlays,
:rom
def open_directory(filesystem_directory)
@filesystem_directory = filesystem_directory
input_rom_path = "#{@filesystem_directory}/ftc/rom.nds"
@rom = File.open(input_rom_path, "rb") {|file| file.read}
read_from_rom()
@files.each do |id, file|
next unless file[:type] == :file
file[:size] = File.size(File.join(@filesystem_directory, file[:file_path]))
file[:end_offset] = file[:start_offset] + file[:size]
end
get_file_ram_start_offsets_and_file_data_types()
end
def open_and_extract_rom(input_rom_path, filesystem_directory)
@filesystem_directory = filesystem_directory
@rom = File.open(input_rom_path, "rb") {|file| file.read}
read_from_rom()
extract_to_hard_drive()
get_file_ram_start_offsets_and_file_data_types()
end
def open_rom(input_rom_path)
@filesystem_directory = nil
@rom = File.open(input_rom_path, "rb") {|file| file.read}
read_from_rom()
extract_to_memory()
get_file_ram_start_offsets_and_file_data_types()
end
def write_to_rom(output_rom_path)
print "Writing files to #{output_rom_path}... "
new_rom = @rom.dup
expanded_files = []
max_written_address = 0
files_written = 0
files_without_dirs.sort_by{|id, file| id}.each do |id, file|
file_data = get_file_data_from_opened_files_cache(file[:file_path])
new_file_size = file_data.length
offset = file[:id]*8
old_start_offset, old_end_offset = @rom[@file_allocation_table_offset+offset, 8].unpack("VV")
old_size = old_end_offset - old_start_offset
if new_file_size > old_size
expanded_files << file
next
end
new_start_offset = old_start_offset
new_end_offset = new_start_offset + new_file_size
new_rom[new_start_offset,new_file_size] = file_data
offset = file[:id]*8
new_rom[@file_allocation_table_offset+offset, 8] = [new_start_offset, new_end_offset].pack("VV")
max_written_address = new_end_offset if new_end_offset > max_written_address
# Update the lengths of changed overlay files.
if file[:overlay_id]
offset = file[:overlay_id] * 32
new_rom[@arm9_overlay_table_offset+offset+8, 4] = [new_file_size].pack("V")
end
files_written += 1
if block_given?
yield(files_written)
end
end
expanded_files.each do |file|
file_data = get_file_data_from_opened_files_cache(file[:file_path])
new_file_size = file_data.length
new_start_offset = max_written_address
new_end_offset = new_start_offset + new_file_size
new_rom[new_start_offset,new_file_size] = file_data
offset = file[:id]*8
new_rom[@file_allocation_table_offset+offset, 8] = [new_start_offset, new_end_offset].pack("VV")
max_written_address = new_end_offset if new_end_offset > max_written_address
# Update the lengths of changed overlay files.
if file[:overlay_id]
offset = file[:overlay_id] * 32
new_rom[@arm9_overlay_table_offset+offset+8, 4] = [new_file_size].pack("V")
end
files_written += 1
if block_given?
yield(files_written)
end
end
# Update arm9
file = @extra_files.find{|file| file[:name] == "arm9.bin"}
file_data = get_file_data_from_opened_files_cache(file[:file_path])
new_file_size = file_data.length
if @arm9_size != new_file_size
raise "ARM9 changed size"
end
new_rom[file[:start_offset], file[:size]] = file_data
File.open(output_rom_path, "wb") do |f|
f.write(new_rom)
end
puts "Done"
end
def all_files
@files.values + @extra_files
end
def print_files
@files.each do |id, file|
puts "%02X" % id
puts file.inspect
gets
end
end
def load_overlay(overlay_id)
overlay = @overlays[overlay_id]
load_file(overlay)
end
def load_file(file)
@currently_loaded_files[file[:ram_start_offset]] = file
end
def convert_ram_address_to_path_and_offset(ram_address)
@currently_loaded_files.each do |ram_start_offset, file|
ram_range = (file[:ram_start_offset]..file[:ram_start_offset]+file[:size]-1)
if ram_range.include?(ram_address)
offset_in_file = ram_address - file[:ram_start_offset]
return [file[:file_path], offset_in_file]
end
end
str = ""
@currently_loaded_files.each do |ram_start_offset, file|
if file[:overlay_id]
str << "\n overlay loaded: %02d" % file[:overlay_id]
end
str << "\n ram_range: %08X..%08X" % [file[:ram_start_offset], file[:ram_start_offset]+file[:size]]
str << "\n rom_start: %08X" % file[:start_offset]
end
raise ConversionError.new("Failed to convert ram address to rom address: %08X. #{str}" % ram_address)
end
def read(ram_address, length=1, options={})
file_path, offset_in_file = convert_ram_address_to_path_and_offset(ram_address)
return read_by_file(file_path, offset_in_file, length, options)
end
def read_by_file(file_path, offset_in_file, length, options={})
file = files_by_path[file_path]
if options[:allow_length_to_exceed_end_of_file]
max_offset = offset_in_file
else
max_offset = offset_in_file + length
end
if max_offset > file[:size]
if options[:allow_reading_into_next_file_in_ram] && file[:ram_start_offset]
next_file_in_ram = find_file_by_ram_start_offset(file[:ram_start_offset]+0xC)
if next_file_in_ram
return read_by_file(next_file_in_ram[:file_path], offset_in_file - file[:size], length, options=options)
end
end
raise OffsetPastEndOfFileError.new("Offset %08X (length %08X) is past end of file #{file_path} (%08X bytes long)" % [offset_in_file, length, file[:size]])
end
file_data = get_file_data_from_opened_files_cache(file_path)
return file_data[offset_in_file, length]
end
def read_until_end_marker(ram_address, end_markers)
file_path, offset_in_file = convert_ram_address_to_path_and_offset(ram_address)
file_data = get_file_data_from_opened_files_cache(file_path)
substring = file_data[offset_in_file..-1]
end_index = substring.index(end_markers.pack("C*"))
return substring[0,end_index]
end
def write(ram_address, new_data)
file_path, offset_in_file = convert_ram_address_to_path_and_offset(ram_address)
write_by_file(file_path, offset_in_file, new_data)
end
def write_by_file(file_path, offset_in_file, new_data)
file = files_by_path[file_path]
if offset_in_file + new_data.length > file[:size]
raise OffsetPastEndOfFileError.new("Offset %08X is past end of file #{file_path} (%08X bytes long)" % [offset_in_file, file[:size]])
end
file_data = get_file_data_from_opened_files_cache(file_path)
file_data[offset_in_file, new_data.length] = new_data
@opened_files_cache[file_path] = file_data
@uncommitted_files << file_path
end
def find_file_by_ram_start_offset(ram_start_offset)
files.values.find do |file|
file[:type] == :file && file[:ram_start_offset] == ram_start_offset
end
end
def commit_file_changes(base_directory = @filesystem_directory)
print "Committing changes to filesystem... "
@uncommitted_files.each do |file_path|
file_data = get_file_data_from_opened_files_cache(file_path)
full_path = File.join(base_directory, file_path)
full_dir = File.dirname(full_path)
FileUtils.mkdir_p(full_dir)
File.open(full_path, "wb") do |f|
f.write(file_data)
end
end
@uncommitted_files = []
puts "Done."
end
def has_uncommitted_files?
!@uncommitted_files.empty?
end
def expand_file_and_get_end_of_file_ram_address(ram_address, length_to_expand_by)
file_path, offset_in_file = convert_ram_address_to_path_and_offset(ram_address)
file = @currently_loaded_files.values.find{|file| file[:file_path] == file_path}
if file[:overlay_id] && ROOM_OVERLAYS.include?(file[:overlay_id]) && file[:size] + length_to_expand_by > MAX_ALLOWABLE_ROOM_OVERLAY_SIZE
raise FileExpandError.new("Failed to expand room overlay #{file[:overlay_id]} to #{file[:size] + length_to_expand_by} bytes because that is larger than the maximum size a room overlay can be in this game (#{MAX_ALLOWABLE_ROOM_OVERLAY_SIZE} bytes).")
end
old_size = file[:size]
file[:size] += length_to_expand_by
# Expand the actual file data string, and fill it with 0 bytes.
write_by_file(file_path, old_size, "\0"*length_to_expand_by)
return file[:ram_start_offset] + old_size
end
def files_without_dirs
files.select{|id, file| file[:type] == :file}
end
private
def read_from_rom
@game_name = @rom[0x00,12]
raise InvalidFileError.new("Not a DSVania") unless %w(CASTLEVANIA1 CASTLEVANIA2 CASTLEVANIA3).include?(@game_name)
@game_code = @rom[0x0C,4]
raise InvalidFileError.new("Only the North American versions are supported") unless %w(ACVE ACBE YR9E).include?(@game_code)
@arm9_rom_offset, @arm9_entry_address, @arm9_ram_offset, @arm9_size = @rom[0x20,16].unpack("VVVV")
@arm7_rom_offset, @arm7_entry_address, @arm7_ram_offset, @arm7_size = @rom[0x30,16].unpack("VVVV")
@file_name_table_offset, @file_name_table_size, @file_allocation_table_offset, @file_allocation_table_size = @rom[0x40,16].unpack("VVVV")
@arm9_overlay_table_offset, @arm9_overlay_table_size = @rom[0x50,8].unpack("VV")
@arm7_overlay_table_offset, @arm7_overlay_table_size = @rom[0x58,8].unpack("VV")
@banner_start_offset = @rom[0x68,4].unpack("V").first
@banner_end_offset = @banner_start_offset + 0x840 # ??
@files = {}
@overlays = []
@currently_loaded_files = {}
@opened_files_cache = {}
@uncommitted_files = []
get_file_name_table()
get_overlay_table()
get_file_allocation_table()
get_extra_files()
generate_file_paths()
CONSTANT_OVERLAYS.each do |overlay_index|
load_overlay(overlay_index)
end
end
def extract_to_hard_drive
print "Extracting files from ROM... "
all_files.each do |file|
next unless file[:type] == :file
#next unless (file[:overlay_id] || file[:name] == "arm9.bin" || file[:name] == "rom.nds")
start_offset, end_offset, file_path = file[:start_offset], file[:end_offset], file[:file_path]
file_data = @rom[start_offset..end_offset-1]
output_path = File.join(@filesystem_directory, file_path)
output_dir = File.dirname(output_path)
FileUtils.mkdir_p(output_dir)
File.open(output_path, "wb") do |f|
f.write(file_data)
end
end
puts "Done."
end
def extract_to_memory
print "Extracting files from ROM to memory... "
all_files.each do |file|
next unless file[:type] == :file
start_offset, end_offset, file_path = file[:start_offset], file[:end_offset], file[:file_path]
file_data = @rom[start_offset..end_offset-1]
@opened_files_cache[file_path] = file_data
end
puts "Done."
end
def get_file_data_from_opened_files_cache(file_path)
if @opened_files_cache[file_path]
file_data = @opened_files_cache[file_path]
else
path = File.join(@filesystem_directory, file_path)
file_data = File.open(path, "rb") {|file| file.read}
@opened_files_cache[file_path] = file_data
end
return file_data
end
def get_file_name_table
file_name_table_data = @rom[@file_name_table_offset, @file_name_table_size]
subtable_offset, subtable_first_file_id, number_of_dirs = file_name_table_data[0x00,8].unpack("Vvv")
get_file_name_subtable(subtable_offset, subtable_first_file_id, 0xF000)
i = 1
while i < number_of_dirs
subtable_offset, subtable_first_file_id, parent_dir_id = file_name_table_data[0x00+i*8,8].unpack("Vvv")
get_file_name_subtable(subtable_offset, subtable_first_file_id, 0xF000 + i)
i += 1
end
end
def get_file_name_subtable(subtable_offset, subtable_first_file_id, parent_dir_id)
i = 0
offset = @file_name_table_offset + subtable_offset
next_file_id = subtable_first_file_id
while true
length = @rom[offset,1].unpack("C*").first
offset += 1
case length
when 0x01..0x7F
type = :file
name = @rom[offset,length]
offset += length
id = next_file_id
next_file_id += 1
when 0x81..0xFF
type = :subdir
length = length & 0x7F
name = @rom[offset,length]
offset += length
id = @rom[offset,2].unpack("v").first
offset += 2
when 0x00
# end of subtable
break
when 0x80
# reserved
break
end
@files[id] = {:name => name, :type => type, :parent_id => parent_dir_id, :id => id}
i += 1
end
end
def get_overlay_table
overlay_table_data = @rom[@arm9_overlay_table_offset, @arm9_overlay_table_size]
offset = 0x00
while offset < @arm9_overlay_table_size
overlay_id, overlay_ram_address, overlay_size, _, _, _, file_id, _ = overlay_table_data[0x00+offset,32].unpack("V*")
@files[file_id] = {:name => "overlay9_#{overlay_id}", :type => :file, :id => file_id, :overlay_id => overlay_id, :ram_start_offset => overlay_ram_address, :size => overlay_size}
@overlays << @files[file_id]
offset += 32
end
end
def get_file_ram_start_offsets_and_file_data_types
@files_by_index = []
offset = LIST_OF_FILE_RAM_LOCATIONS_START_OFFSET
while offset < LIST_OF_FILE_RAM_LOCATIONS_END_OFFSET
file_data = read(offset, LIST_OF_FILE_RAM_LOCATIONS_ENTRY_LENGTH)
ram_start_offset = file_data[0..3].unpack("V").first
file_data_type = file_data[4..5].unpack("v").first
file_path = file_data[6..-1]
file_path = file_path.delete("\x00") # Remove null bytes padding the end of the string
file = files_by_path[file_path]
file[:ram_start_offset] = ram_start_offset
file[:file_data_type] = file_data_type
@files_by_index << file
offset += LIST_OF_FILE_RAM_LOCATIONS_ENTRY_LENGTH
end
if GAME == "por"
# Richter's gfx files don't have a ram offset stored in the normal place.
i = 0
files.values.each do |file|
if file[:ram_start_offset] == 0 && file[:file_path] =~ /\/sc2\/s0_ri_..\.dat/
file[:ram_start_offset] = read(RICHTERS_LIST_OF_GFX_POINTERS + i*4, 4).unpack("V").first
i += 1
end
end
end
end
def get_file_allocation_table
file_allocation_table_data = @rom[@file_allocation_table_offset, @file_allocation_table_size]
id = 0x00
offset = 0x00
while offset < @file_allocation_table_size
@files[id][:start_offset], @files[id][:end_offset] = file_allocation_table_data[offset,8].unpack("VV")
@files[id][:size] = @files[id][:end_offset] - @files[id][:start_offset]
id += 1
offset += 0x08
end
end
def get_extra_files
@extra_files = []
@extra_files << {:name => "ndsheader.bin", :type => :file, :start_offset => 0x0, :end_offset => 0x4000}
arm9_file = {:name => "arm9.bin", :type => :file, :start_offset => @arm9_rom_offset, :end_offset => @arm9_rom_offset + @arm9_size, :ram_start_offset => @arm9_ram_offset, :size => @arm9_size}
@extra_files << arm9_file
load_file(arm9_file)
@extra_files << {:name => "arm7.bin", :type => :file, :start_offset => @arm7_rom_offset, :end_offset => @arm7_rom_offset + @arm7_size}
@extra_files << {:name => "arm9_overlay_table.bin", :type => :file, :start_offset => @arm9_overlay_table_offset, :end_offset => @arm9_overlay_table_offset + @arm9_overlay_table_size}
@extra_files << {:name => "arm7_overlay_table.bin", :type => :file, :start_offset => @arm7_overlay_table_offset, :end_offset => @arm7_overlay_table_offset + @arm7_overlay_table_size}
@extra_files << {:name => "fnt.bin", :type => :file, :start_offset => @file_name_table_offset, :end_offset => @file_name_table_offset + @file_name_table_size}
@extra_files << {:name => "fat.bin", :type => :file, :start_offset => @file_allocation_table_offset, :end_offset => @file_allocation_table_offset + @file_allocation_table_size}
@extra_files << {:name => "banner.bin", :type => :file, :start_offset => @banner_start_offset, :end_offset => @banner_end_offset}
@extra_files << {:name => "rom.nds", :type => :file, :start_offset => 0, :end_offset => @rom.length}
end
def generate_file_paths
@files_by_path = {}
all_files.each do |file|
if file[:parent_id] == 0xF000
file[:file_path] = file[:name]
elsif file[:parent_id].nil?
file[:file_path] = File.join("/ftc", file[:name])
else
file[:file_path] = "/" + File.join(@files[file[:parent_id]][:name], file[:name])
end
@files_by_path[file[:file_path]] = file
end
end
end
|
Add specs for network::bond::debian
require 'spec_helper'
describe 'network::bond::debian', :type => :define do
let(:title) { 'bond0' }
describe "with default bonding params" do
let(:params) do
{
'ensure' => 'present',
'method' => 'static',
'ipaddress' => '172.18.1.2',
'netmask' => '255.255.128.0',
'slaves' => ['eth0', 'eth1'],
'mode' => 'active-backup',
'miimon' => '100',
'downdelay' => '200',
'updelay' => '200',
'lacp_rate' => 'slow',
'primary' => 'eth0',
'primary_reselect' => 'always',
'xmit_hash_policy' => 'layer2',
}
end
['eth0', 'eth1'].each do |slave|
it "should add a network_config resource for #{slave}" do
should contain_network_config(slave).with_ensure('absent')
end
end
it "should add a network_config resource for bond0" do
should contain_network_config('bond0').with({
'ensure' => 'present',
'method' => 'static',
'ipaddress' => '172.18.1.2',
'netmask' => '255.255.128.0',
'options' => {
'bond-slaves' => 'eth0 eth1',
'bond-mode' => 'active-backup',
'bond-miimon' => '100',
'bond-downdelay' => '200',
'bond-updelay' => '200',
'bond-lacp-rate' => 'slow',
'bond-primary' => 'eth0',
'bond-primary-reselect' => 'always',
'bond-xmit-hash-policy' => 'layer2',
},
})
end
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
Bundler.require(*Rails.groups)
require "fine_print"
module Dummy
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.enforce_available_locales = true
end
end
Fixed dummy app config
require File.expand_path('../boot', __FILE__)
require 'rails/all'
Bundler.require(*Rails.groups)
require "action_interceptor"
module Dummy
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.enforce_available_locales = true
end
end
|
# frozen_string_literal: true
FactoryBot.define do
factory :episode_record do
association :user, :with_profile
comment { "おもしろかった" }
twitter_url_hash { |n| "xxxxx#{n}" }
episode
rating { 3.0 }
before(:create) do |episode_record|
Tip.where(slug: "record").first_or_create(attributes_for(:record_tip))
episode_record.work = episode_record.episode.work
episode_record.record = create(:record, user: episode_record.user, work: episode_record.work)
end
end
end
Fix comment to body
# frozen_string_literal: true
FactoryBot.define do
factory :episode_record do
association :user, :with_profile
body { "おもしろかった" }
twitter_url_hash { |n| "xxxxx#{n}" }
episode
rating { 3.0 }
before(:create) do |episode_record|
Tip.where(slug: "record").first_or_create(attributes_for(:record_tip))
episode_record.work = episode_record.episode.work
episode_record.record = create(:record, user: episode_record.user, work: episode_record.work)
end
end
end
|
require 'liquid'
module NanocConrefFS
module Conrefifier
SINGLE_SUB = /(\{\{[^\}]+\}\})/m
BLOCK_SUB = /\{% (?:if|unless).+? %\}.*?\{% end(?:if|unless) %\}/m
def self.file_variables(variables, path, rep)
return {} if variables.nil?
data_vars = {}
scopes = variables.select do |v|
scope_block = v[:scope]
scoped_path = scope_block[:path].empty? || Regexp.new(scope_block[:path]) =~ path
scoped_rep = scope_block[:reps].nil? || scope_block[:reps].include?(rep)
scoped_path && scoped_rep
end
# I benchmarked that assignment is much faster than
# merging an empty hash
return scopes.first[:values] if scopes.length == 1
scopes.each do |scope|
data_vars = data_vars.merge(scope[:values])
end
data_vars
end
def self.liquify(config, path:, content:, rep:)
page_vars = NanocConrefFS::Conrefifier.file_variables(config[:page_variables], path, rep)
page_vars = { :page => page_vars }.merge(NanocConrefFS::Variables.variables[rep])
# we must obfuscate essential ExtendedMarkdownFilter content
content = content.gsub(/\{\{\s*#(\S+)\s*\}\}/, '[[#\1]]')
content = content.gsub(/\{\{\s*\/(\S+)\s*\}\}/, '[[/\1]]')
content = content.gsub(/\{\{\s*(octicon-\S+\s*[^\}]+)\s*\}\}/, '[[\1]]')
begin
result = content
# This pass replaces any matched conditionals
if result =~ NanocConrefFS::Conrefifier::BLOCK_SUB || result =~ NanocConrefFS::Conrefifier::SINGLE_SUB
result = NanocConrefFS::Conrefifier.apply_liquid(result, page_vars)
end
rescue Liquid::SyntaxError => e
# unrecognized Liquid, so just return the content
STDERR.puts "Could not convert #{filename}: #{e.message}"
rescue => e
raise "#{e.message}: #{e.inspect}"
end
result = result.gsub(/\[\[\s*#(\S+)\s*\]\]/, '{{#\1}}')
result = result.gsub(/\[\[\s*\/(\S+)\s*\]\]/, '{{/\1}}')
result = result.gsub(/\[\[\s*(octicon-\S+\s*[^\]]+)\s*\]\]/, '{{\1}}')
result
end
def self.apply_liquid(content, data_vars)
data_vars['page'] = data_vars[:page].stringify_keys
result = Liquid::Template.parse(content, :error_mode => :warn).render(data_vars)
# This second pass renders any previously inserted
# data conditionals within the body. If a Liquid parse
# returns a blank string, we'll return the original
if result =~ NanocConrefFS::Conrefifier::SINGLE_SUB
result = result.gsub(NanocConrefFS::Conrefifier::SINGLE_SUB) do |match|
liquified = NanocConrefFS::Conrefifier.apply_liquid(match, data_vars)
liquified.empty? ? match : liquified
end
end
result
end
end
end
Store `PATH_TO_VARS` for reuse
require 'liquid'
module NanocConrefFS
module Conrefifier
SINGLE_SUB = /(\{\{[^\}]+\}\})/m
BLOCK_SUB = /\{% (?:if|unless).+? %\}.*?\{% end(?:if|unless) %\}/m
PATH_TO_VARS = {}
def self.file_variables(variables, path, rep)
return {} if variables.nil?
data_vars = {}
# this saves a bunch of time because we don't need to
# recalculate the paths (looping over scopes, etc)
if PATH_TO_VARS[rep] && PATH_TO_VARS[rep][path]
data_vars = PATH_TO_VARS[rep][path]
else
scopes = variables.select do |v|
scope_block = v[:scope]
scoped_path = scope_block[:path].empty? || Regexp.new(scope_block[:path]) =~ path
scoped_rep = scope_block[:reps].nil? || scope_block[:reps].include?(rep)
scoped_path && scoped_rep
end
# I benchmarked that assignment is much faster than
# merging an empty hash
if scopes.length == 1
data_vars = scopes.first[:values]
else
scopes.each do |scope|
data_vars = data_vars.merge(scope[:values])
end
end
# stash for later use
PATH_TO_VARS[rep] = {}
PATH_TO_VARS[rep][path] = data_vars
end
data_vars
end
def self.liquify(config, path:, content:, rep:)
page_vars = NanocConrefFS::Conrefifier.file_variables(config[:page_variables], path, rep)
page_vars = { :page => page_vars }.merge(NanocConrefFS::Variables.variables[rep])
# we must obfuscate essential ExtendedMarkdownFilter content
content = content.gsub(/\{\{\s*#(\S+)\s*\}\}/, '[[#\1]]')
content = content.gsub(/\{\{\s*\/(\S+)\s*\}\}/, '[[/\1]]')
content = content.gsub(/\{\{\s*(octicon-\S+\s*[^\}]+)\s*\}\}/, '[[\1]]')
begin
result = content
# This pass replaces any matched conditionals
if result =~ NanocConrefFS::Conrefifier::BLOCK_SUB || result =~ NanocConrefFS::Conrefifier::SINGLE_SUB
result = NanocConrefFS::Conrefifier.apply_liquid(result, page_vars)
end
rescue Liquid::SyntaxError => e
# unrecognized Liquid, so just return the content
STDERR.puts "Could not convert #{filename}: #{e.message}"
rescue => e
raise "#{e.message}: #{e.inspect}"
end
result = result.gsub(/\[\[\s*#(\S+)\s*\]\]/, '{{#\1}}')
result = result.gsub(/\[\[\s*\/(\S+)\s*\]\]/, '{{/\1}}')
result = result.gsub(/\[\[\s*(octicon-\S+\s*[^\]]+)\s*\]\]/, '{{\1}}')
result
end
def self.apply_liquid(content, data_vars)
data_vars['page'] = data_vars[:page].stringify_keys
result = Liquid::Template.parse(content, :error_mode => :warn).render(data_vars)
# This second pass renders any previously inserted
# data conditionals within the body. If a Liquid parse
# returns a blank string, we'll return the original
if result =~ NanocConrefFS::Conrefifier::SINGLE_SUB
result = result.gsub(NanocConrefFS::Conrefifier::SINGLE_SUB) do |match|
liquified = NanocConrefFS::Conrefifier.apply_liquid(match, data_vars)
liquified.empty? ? match : liquified
end
end
result
end
end
end
|
require File.expand_path('extensions/callbacks', File.dirname(__FILE__))
require 'i18n'
module NiftyServices
class BaseService
attr_reader :response_status, :response_status_code
attr_reader :options, :errors, :logger
class << self
def register_error_response_method(reason_string, status_code)
NiftyServices::Configuration.add_response_error_method(reason_string, status_code)
define_error_response_method(reason_string, status_code)
end
def define_error_response_method(reason_string, status_code)
method_name = Util.normalized_callback_name(reason_string, '_error')
define_method method_name do |message_key, options = {}|
error(status_code, message_key, options)
end
define_method "#{method_name}!" do |message_key, options = {}|
error!(status_code, message_key, options)
end
end
end
def initialize(options = {}, initial_response_status = 400)
@options = with_default_options(options)
@errors = []
@logger = @options[:logger] || default_logger
@executed = false
with_before_and_after_callbacks(:initialize) do
set_response_status(initial_response_status)
end
end
def execute
not_implemented_exception(__method__)
end
def valid?
return @errors.empty?
end
def success?
@success == true && valid?
end
def fail?
!success?
end
def response_status
@response_status ||= :bad_request
end
def valid_user?
user_class = NiftyServices.config.user_class
raise Errors::InvalidUser if user_class.nil?
valid_object?(@user, user_class)
end
def option_exists?(key)
@options && @options.key?(key.to_sym)
end
def option_enabled?(key)
option_exists?(key) && [true, 'true'].member?(@options[key.to_sym])
end
def option_disabled?(key)
!option_enabled?(key)
end
def add_error(error)
add_method = error.is_a?(Array) ? :concat : :push
@errors.send(add_method, error)
end
def default_logger
NiftyServices.config.logger
end
alias :log :logger
def executed?
@executed == true
end
alias :runned? :executed?
private
def with_default_options(options)
default_options.merge(options).symbolize_keys
end
def default_options
{}
end
def can_execute?
not_implemented_exception(__method__)
end
def execute_action(&block)
return nil if executed?
with_before_and_after_callbacks(:execute) do
if can_execute?
yield(block) if block_given?
end
end
@executed = true
end
def success_response(status = :ok)
unless Configuration::SUCCESS_RESPONSE_STATUS.key?(status.to_sym)
raise "#{status} is not a valid success response status"
end
with_before_and_after_callbacks(:success) do
@success = true
set_response_status(status)
end
end
def success_created_response
success_response(:created)
end
def set_response_status(status)
@response_status = response_status_reason_for(status)
@response_status_code = response_status_code_for(status)
end
def response_status_for(status)
error_list = Configuration::ERROR_RESPONSE_STATUS
success_list = Configuration::SUCCESS_RESPONSE_STATUS
select_method = [Symbol, String].member?(status.class) ? :key : :value
response_list = error_list.merge(success_list)
response_list.select do |status_key, status_code|
status == (select_method == :key ? status_key : status_code)
end
end
def response_status_code_for(status)
response_status_for(status).values.first
end
def response_status_reason_for(status)
response_status_for(status).keys.first
end
def error(status, message_key, options = {})
@success = false
with_before_and_after_callbacks(:error) do
set_response_status(status)
error_message = process_error_message_for_key(message_key, options)
add_error(error_message)
error_message
end
end
def error!(status, message_key, options = {})
error(status, message_key, options)
# TODO:
# maybe throw a Exception making bang(!) semantic
# raise "NiftyServices::V1::Exceptions::#{status.titleize}".constantize
return false
end
def valid_object?(record, expected_class)
record.class.to_s == expected_class.to_s
end
def filter_hash(hash = {}, whitelist_keys = [])
hash.symbolize_keys.slice(*whitelist_keys.map(&:to_sym))
end
def changes(old, current, attributes = {})
changes = []
return changes if old.nil? || current.nil?
old_attributes = old.attributes.slice(*attributes.map(&:to_s))
new_attributes = current.attributes.slice(*attributes.map(&:to_s))
new_attributes.each do |attribute, value|
changes << attribute if (old_attributes[attribute] != value)
end
changes.map(&:to_sym)
end
def i18n_namespace
NiftyServices.configuration.i18n_namespace
end
def i18n_errors_namespace
"#{i18n_namespace}.errors"
end
def process_error_message_for_key(message_key, options)
if message_key.class.to_s == 'ActiveModel::Errors'
message = message_key.messages
elsif message_key.is_a?(Array) && message_key.first.is_a?(Hash)
message = message_key
else
message = translate("#{i18n_errors_namespace}.#{message_key}", options)
end
message
end
NiftyServices::Configuration.response_errors_list.each do |reason_string, status_code|
define_error_response_method(reason_string, status_code)
end
protected
def not_implemented_exception(method_name)
raise NotImplementedError, "#{method_name} must be implemented in subclass"
end
def translate(key, options = {})
begin
I18n.t(key, options)
rescue => error
"Can't fecth key #{key} - #{error.message}"
end
end
end
end
allowing error messages to not be translated
require File.expand_path('extensions/callbacks', File.dirname(__FILE__))
require 'i18n'
module NiftyServices
class BaseService
attr_reader :response_status, :response_status_code
attr_reader :options, :errors, :logger
class << self
def register_error_response_method(reason_string, status_code)
NiftyServices::Configuration.add_response_error_method(reason_string, status_code)
define_error_response_method(reason_string, status_code)
end
def define_error_response_method(reason_string, status_code)
method_name = Util.normalized_callback_name(reason_string, '_error')
define_method method_name do |message_key, options = {}|
error(status_code, message_key, options)
end
define_method "#{method_name}!" do |message_key, options = {}|
error!(status_code, message_key, options)
end
end
end
def initialize(options = {}, initial_response_status = 400)
@options = with_default_options(options)
@errors = []
@logger = @options[:logger] || default_logger
@executed = false
with_before_and_after_callbacks(:initialize) do
set_response_status(initial_response_status)
end
end
def execute
not_implemented_exception(__method__)
end
def valid?
return @errors.empty?
end
def success?
@success == true && valid?
end
def fail?
!success?
end
def response_status
@response_status ||= :bad_request
end
def valid_user?
user_class = NiftyServices.config.user_class
raise Errors::InvalidUser if user_class.nil?
valid_object?(@user, user_class)
end
def option_exists?(key)
@options && @options.key?(key.to_sym)
end
def option_enabled?(key)
option_exists?(key) && [true, 'true'].member?(@options[key.to_sym])
end
def option_disabled?(key)
!option_enabled?(key)
end
def add_error(error)
add_method = error.is_a?(Array) ? :concat : :push
@errors.send(add_method, error)
end
def default_logger
NiftyServices.config.logger
end
alias :log :logger
def executed?
@executed == true
end
alias :runned? :executed?
private
def with_default_options(options)
default_options.merge(options).symbolize_keys
end
def default_options
{}
end
def can_execute?
not_implemented_exception(__method__)
end
def execute_action(&block)
return nil if executed?
with_before_and_after_callbacks(:execute) do
if can_execute?
yield(block) if block_given?
end
end
@executed = true
end
def success_response(status = :ok)
unless Configuration::SUCCESS_RESPONSE_STATUS.key?(status.to_sym)
raise "#{status} is not a valid success response status"
end
with_before_and_after_callbacks(:success) do
@success = true
set_response_status(status)
end
end
def success_created_response
success_response(:created)
end
def set_response_status(status)
@response_status = response_status_reason_for(status)
@response_status_code = response_status_code_for(status)
end
def response_status_for(status)
error_list = Configuration::ERROR_RESPONSE_STATUS
success_list = Configuration::SUCCESS_RESPONSE_STATUS
select_method = [Symbol, String].member?(status.class) ? :key : :value
response_list = error_list.merge(success_list)
response_list.select do |status_key, status_code|
status == (select_method == :key ? status_key : status_code)
end
end
def response_status_code_for(status)
response_status_for(status).values.first
end
def response_status_reason_for(status)
response_status_for(status).keys.first
end
def error(status, message_key, options = {})
@success = false
with_before_and_after_callbacks(:error) do
set_response_status(status)
error_message = process_error_message_for_key(message_key, options)
add_error(error_message)
error_message
end
end
def error!(status, message_key, options = {})
error(status, message_key, options)
# TODO:
# maybe throw a Exception making bang(!) semantic
# raise "NiftyServices::V1::Exceptions::#{status.titleize}".constantize
return false
end
def valid_object?(record, expected_class)
record.class.to_s == expected_class.to_s
end
def filter_hash(hash = {}, whitelist_keys = [])
hash.symbolize_keys.slice(*whitelist_keys.map(&:to_sym))
end
def changes(old, current, attributes = {})
changes = []
return changes if old.nil? || current.nil?
old_attributes = old.attributes.slice(*attributes.map(&:to_s))
new_attributes = current.attributes.slice(*attributes.map(&:to_s))
new_attributes.each do |attribute, value|
changes << attribute if (old_attributes[attribute] != value)
end
changes.map(&:to_sym)
end
def i18n_namespace
NiftyServices.configuration.i18n_namespace
end
def i18n_errors_namespace
"#{i18n_namespace}.errors"
end
def process_error_message_for_key(message_key, options)
if message_key.class.to_s == 'ActiveModel::Errors'
message = message_key.messages
elsif message_key.is_a?(Array) && message_key.first.is_a?(Hash)
message = message_key
else
message = options[:translate].nil? || options[:translate] == true ?
translate("#{i18n_errors_namespace}.#{message_key}", options) :
message_key
end
message
end
NiftyServices::Configuration.response_errors_list.each do |reason_string, status_code|
define_error_response_method(reason_string, status_code)
end
protected
def not_implemented_exception(method_name)
raise NotImplementedError, "#{method_name} must be implemented in subclass"
end
def translate(key, options = {})
begin
I18n.t(key, options)
rescue => error
"Can't fecth key #{key} - #{error.message}"
end
end
end
end
|
module Nyulibraries
module Assets
VERSION = "3.0.0"
end
end
bug fix
module Nyulibraries
module Assets
VERSION = "3.0.1"
end
end
|
#
# Author:: Adam Jacob (<adam@chef.io>)
# Copyright:: Copyright (c) 2015-2017, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
Ohai.plugin(:Platform) do
provides "platform", "platform_version", "platform_family"
depends "lsb"
def get_redhatish_platform(contents)
contents[/^Red Hat/i] ? "redhat" : contents[/(\w+)/i, 1].downcase
end
def get_redhatish_version(contents)
contents[/Rawhide/i] ? contents[/((\d+) \(Rawhide\))/i, 1].downcase : contents[/release ([\d\.]+)/, 1]
end
#
# Reads an os-release-info file and parse it into a hash
#
# @param file [String] the filename to read (e.g. '/etc/os-release')
#
# @returns [Hash] the file parsed into a Hash or nil
#
def read_os_release_info(file)
return nil unless File.exist?(file)
File.read(file).split.inject({}) do |map, line|
key, value = line.split("=")
map[key] = value.gsub(/\A"|"\Z/, "") if value
map
end
end
#
# Cached /etc/os-release info Hash. Also has logic for Cisco Nexus
# switches that pulls the chained CISCO_RELEASE_INFO file into the Hash (other
# distros can also reuse this method safely).
#
# @returns [Hash] the canonical, cached Hash of /etc/os-release info or nil
#
def os_release_info
@os_release_info ||=
begin
os_release_info = read_os_release_info("/etc/os-release")
cisco_release_info = os_release_info["CISCO_RELEASE_INFO"] if os_release_info
if cisco_release_info && File.exist?(cisco_release_info)
os_release_info.merge!(read_os_release_info(cisco_release_info))
end
os_release_info
end
end
#
# If /etc/os-release indicates we are Cisco based
#
# @returns [Boolean] if we are Cisco according to /etc/os-release
#
def os_release_file_is_cisco?
File.exist?("/etc/os-release") && os_release_info["CISCO_RELEASE_INFO"]
end
#
# Determines the platform version for Cumulus Linux systems
#
# @returns [String] cumulus Linux version from /etc/cumulus/etc.replace/os-release
#
def cumulus_version
release_contents = File.read("/etc/cumulus/etc.replace/os-release")
release_contents.match(/VERSION_ID=(.*)/)[1]
rescue NoMethodError, Errno::ENOENT, Errno::EACCES # rescue regex failure, file missing, or permission denied
Ohai::Log.warn("Detected Cumulus Linux, but /etc/cumulus/etc/replace/os-release could not be parsed to determine platform_version")
nil
end
#
# Determines the platform version for F5 Big-IP systems
#
# @returns [String] bigip Linux version from /etc/f5-release
#
def bigip_version
release_contents = File.read("/etc/f5-release")
release_contents.match(/BIG-IP release (\S*)/)[1] # http://rubular.com/r/O8nlrBVqSb
rescue NoMethodError, Errno::ENOENT, Errno::EACCES # rescue regex failure, file missing, or permission denied
Ohai::Log.warn("Detected F5 Big-IP, but /etc/f5-release could not be parsed to determine platform_version")
nil
end
#
# Determines the platform version for Debian based systems
#
# @returns [String] version of the platform
#
def debian_platform_version
if platform == "cumulus"
cumulus_version
else # not cumulus
File.read("/etc/debian_version").chomp
end
end
#
# Determines the platform_family based on the platform
#
# @returns [String] platform_family value
#
def determine_platform_family
case platform
when /debian/, /ubuntu/, /linuxmint/, /raspbian/, /cumulus/
# apt-get+dpkg almost certainly goes here
"debian"
when /oracle/, /centos/, /redhat/, /scientific/, /enterpriseenterprise/, /xenserver/, /cloudlinux/, /ibm_powerkvm/, /parallels/, /nexus_centos/, /clearos/, /bigip/ # Note that 'enterpriseenterprise' is oracle's LSB "distributor ID"
# NOTE: "rhel" should be reserved exclusively for recompiled rhel versions that are nearly perfectly compatible down to the platform_version.
# The operating systems that are "rhel" should all be as compatible as rhel7 = centos7 = oracle7 = scientific7 (98%-ish core RPM version compatibility
# and the version numbers MUST track the upstream). The appropriate EPEL version repo should work nearly perfectly. Some variation like the
# oracle kernel version differences and tuning and extra packages are clearly acceptable. Almost certainly some distros above (xenserver?)
# should not be in this list. Please use fedora, below, instead. Also note that this is the only platform_family with this strict of a rule,
# see the example of the debian platform family for how the rest of the platform_family designations should be used.
"rhel"
when /amazon/
"amazon"
when /suse/
"suse"
when /fedora/, /pidora/, /arista_eos/
# In the broadest sense: RPM-based, fedora-derived distributions which are not strictly re-compiled RHEL (if it uses RPMs, and smells more like redhat and less like
# SuSE it probably goes here).
"fedora"
when /nexus/, /ios_xr/
"wrlinux"
when /gentoo/
"gentoo"
when /slackware/
"slackware"
when /arch/
"arch"
when /exherbo/
"exherbo"
when /alpine/
"alpine"
when /clearlinux/
"clearlinux"
end
end
collect_data(:linux) do
# platform [ and platform_version ? ] should be lower case to avoid dealing with RedHat/Redhat/redhat matching
if File.exist?("/etc/oracle-release")
contents = File.read("/etc/oracle-release").chomp
platform "oracle"
platform_version get_redhatish_version(contents)
elsif File.exist?("/etc/enterprise-release")
contents = File.read("/etc/enterprise-release").chomp
platform "oracle"
platform_version get_redhatish_version(contents)
elsif File.exist?("/etc/f5-release")
platform "bigip"
platform_version bigip_version
elsif File.exist?("/etc/debian_version")
# Ubuntu and Debian both have /etc/debian_version
# Ubuntu should always have a working lsb, debian does not by default
if lsb[:id] =~ /Ubuntu/i
platform "ubuntu"
platform_version lsb[:release]
elsif lsb[:id] =~ /LinuxMint/i
platform "linuxmint"
platform_version lsb[:release]
else
if File.exist?("/usr/bin/raspi-config")
platform "raspbian"
elsif Dir.exist?("/etc/cumulus")
platform "cumulus"
else
platform "debian"
end
platform_version debian_platform_version
end
elsif File.exist?("/etc/parallels-release")
contents = File.read("/etc/parallels-release").chomp
platform get_redhatish_platform(contents)
platform_version contents.match(/(\d\.\d\.\d)/)[0]
elsif File.exist?("/etc/redhat-release")
if os_release_file_is_cisco? # Cisco guestshell
platform "nexus_centos"
platform_version os_release_info["VERSION"]
else
contents = File.read("/etc/redhat-release").chomp
platform get_redhatish_platform(contents)
platform_version get_redhatish_version(contents)
end
elsif File.exist?("/etc/system-release")
contents = File.read("/etc/system-release").chomp
platform get_redhatish_platform(contents)
platform_version get_redhatish_version(contents)
elsif File.exist?("/etc/SuSE-release")
suse_release = File.read("/etc/SuSE-release")
suse_version = suse_release.scan(/VERSION = (\d+)\nPATCHLEVEL = (\d+)/).flatten.join(".")
suse_version = suse_release[/VERSION = ([\d\.]{2,})/, 1] if suse_version == ""
platform_version suse_version
if suse_release =~ /^openSUSE/
# opensuse releases >= 42 are openSUSE Leap
if platform_version.to_i < 42
platform "opensuse"
else
platform "opensuseleap"
end
else
platform "suse"
end
elsif File.exist?("/etc/Eos-release")
platform "arista_eos"
platform_version File.read("/etc/Eos-release").strip.split[-1]
platform_family "fedora"
elsif os_release_file_is_cisco?
raise "unknown Cisco /etc/os-release or /etc/cisco-release ID_LIKE field" if
os_release_info["ID_LIKE"].nil? || ! os_release_info["ID_LIKE"].include?("wrlinux")
case os_release_info["ID"]
when "nexus"
platform "nexus"
when "ios_xr"
platform "ios_xr"
else
raise "unknown Cisco /etc/os-release or /etc/cisco-release ID field"
end
platform_family "wrlinux"
platform_version os_release_info["VERSION"]
elsif File.exist?("/etc/gentoo-release")
platform "gentoo"
# the gentoo release version is the base version used to bootstrap
# a node and doesn't have a lot of meaning in a rolling release distro
# kernel release will be used - ex. 3.18.7-gentoo
platform_version `uname -r`.strip
elsif File.exist?("/etc/slackware-version")
platform "slackware"
platform_version File.read("/etc/slackware-version").scan(/(\d+|\.+)/).join
elsif File.exist?("/etc/arch-release")
platform "arch"
# no way to determine platform_version in a rolling release distribution
# kernel release will be used - ex. 2.6.32-ARCH
platform_version `uname -r`.strip
elsif File.exist?("/etc/exherbo-release")
platform "exherbo"
# no way to determine platform_version in a rolling release distribution
# kernel release will be used - ex. 3.13
platform_version `uname -r`.strip
elsif File.exist?("/etc/alpine-release")
platform "alpine"
platform_version File.read("/etc/alpine-release").strip
elsif File.exist?("/usr/lib/os-release")
contents = File.read("/usr/lib/os-release")
if /Clear Linux/ =~ contents
platform "clearlinux"
platform_version contents[/VERSION_ID=(\d+)/, 1]
end
elsif lsb[:id] =~ /RedHat/i
platform "redhat"
platform_version lsb[:release]
elsif lsb[:id] =~ /Amazon/i
platform "amazon"
platform_version lsb[:release]
elsif lsb[:id] =~ /ScientificSL/i
platform "scientific"
platform_version lsb[:release]
elsif lsb[:id] =~ /XenServer/i
platform "xenserver"
platform_version lsb[:release]
elsif lsb[:id] # LSB can provide odd data that changes between releases, so we currently fall back on it rather than dealing with its subtleties
platform lsb[:id].downcase
platform_version lsb[:release]
end
platform_family determine_platform_family
end
end
use /etc/os-release if it exists on the system
#
# Author:: Adam Jacob (<adam@chef.io>)
# Copyright:: Copyright (c) 2015-2017, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
Ohai.plugin(:Platform) do
provides "platform", "platform_version", "platform_family"
depends "lsb"
def get_redhatish_platform(contents)
contents[/^Red Hat/i] ? "redhat" : contents[/(\w+)/i, 1].downcase
end
def get_redhatish_version(contents)
contents[/Rawhide/i] ? contents[/((\d+) \(Rawhide\))/i, 1].downcase : contents[/release ([\d\.]+)/, 1]
end
#
# Reads an os-release-info file and parse it into a hash
#
# @param file [String] the filename to read (e.g. '/etc/os-release')
#
# @returns [Hash] the file parsed into a Hash or nil
#
def read_os_release_info(file)
return nil unless File.exist?(file)
File.read(file).split.inject({}) do |map, line|
key, value = line.split("=")
map[key] = value.gsub(/\A"|"\Z/, "") if value
map
end
end
#
# Cached /etc/os-release info Hash. Also has logic for Cisco Nexus
# switches that pulls the chained CISCO_RELEASE_INFO file into the Hash (other
# distros can also reuse this method safely).
#
# @returns [Hash] the canonical, cached Hash of /etc/os-release info or nil
#
def os_release_info
@os_release_info ||=
begin
os_release_info = read_os_release_info("/etc/os-release")
cisco_release_info = os_release_info["CISCO_RELEASE_INFO"] if os_release_info
if cisco_release_info && File.exist?(cisco_release_info)
os_release_info.merge!(read_os_release_info(cisco_release_info))
end
os_release_info
end
end
#
# If /etc/os-release indicates we are Cisco based
#
# @returns [Boolean] if we are Cisco according to /etc/os-release
#
def os_release_file_is_cisco?
File.exist?("/etc/os-release") && os_release_info["CISCO_RELEASE_INFO"]
end
#
# Determines the platform version for Cumulus Linux systems
#
# @returns [String] cumulus Linux version from /etc/cumulus/etc.replace/os-release
#
def cumulus_version
release_contents = File.read("/etc/cumulus/etc.replace/os-release")
release_contents.match(/VERSION_ID=(.*)/)[1]
rescue NoMethodError, Errno::ENOENT, Errno::EACCES # rescue regex failure, file missing, or permission denied
Ohai::Log.warn("Detected Cumulus Linux, but /etc/cumulus/etc/replace/os-release could not be parsed to determine platform_version")
nil
end
#
# Determines the platform version for F5 Big-IP systems
#
# @returns [String] bigip Linux version from /etc/f5-release
#
def bigip_version
release_contents = File.read("/etc/f5-release")
release_contents.match(/BIG-IP release (\S*)/)[1] # http://rubular.com/r/O8nlrBVqSb
rescue NoMethodError, Errno::ENOENT, Errno::EACCES # rescue regex failure, file missing, or permission denied
Ohai::Log.warn("Detected F5 Big-IP, but /etc/f5-release could not be parsed to determine platform_version")
nil
end
#
# Determines the platform version for Debian based systems
#
# @returns [String] version of the platform
#
def debian_platform_version
if platform == "cumulus"
cumulus_version
else # not cumulus
File.read("/etc/debian_version").chomp
end
end
#
# Determines the platform_family based on the platform
#
# @returns [String] platform_family value
#
def determine_platform_family
case platform
when /debian/, /ubuntu/, /linuxmint/, /raspbian/, /cumulus/
# apt-get+dpkg almost certainly goes here
"debian"
when /oracle/, /centos/, /redhat/, /scientific/, /enterpriseenterprise/, /xenserver/, /cloudlinux/, /ibm_powerkvm/, /parallels/, /nexus_centos/, /clearos/, /bigip/ # Note that 'enterpriseenterprise' is oracle's LSB "distributor ID"
# NOTE: "rhel" should be reserved exclusively for recompiled rhel versions that are nearly perfectly compatible down to the platform_version.
# The operating systems that are "rhel" should all be as compatible as rhel7 = centos7 = oracle7 = scientific7 (98%-ish core RPM version compatibility
# and the version numbers MUST track the upstream). The appropriate EPEL version repo should work nearly perfectly. Some variation like the
# oracle kernel version differences and tuning and extra packages are clearly acceptable. Almost certainly some distros above (xenserver?)
# should not be in this list. Please use fedora, below, instead. Also note that this is the only platform_family with this strict of a rule,
# see the example of the debian platform family for how the rest of the platform_family designations should be used.
"rhel"
when /amazon/
"amazon"
when /suse/
"suse"
when /fedora/, /pidora/, /arista_eos/
# In the broadest sense: RPM-based, fedora-derived distributions which are not strictly re-compiled RHEL (if it uses RPMs, and smells more like redhat and less like
# SuSE it probably goes here).
"fedora"
when /nexus/, /ios_xr/
"wrlinux"
when /gentoo/
"gentoo"
when /slackware/
"slackware"
when /arch/
"arch"
when /exherbo/
"exherbo"
when /alpine/
"alpine"
when /clearlinux/
"clearlinux"
end
end
collect_data(:linux) do
# use os-release (present on all modern linux distros) or use old *-release files as fallback
# platform_family also does not need to be hardcoded anymore
if File.exist?("/etc/os-release")
platform_family os_release_info["ID_LIKE"]
platform os_release_info["ID"]
platform_version os_release_info["VERSION_ID"]
else
# platform [ and platform_version ? ] should be lower case to avoid dealing with RedHat/Redhat/redhat matching
if File.exist?("/etc/oracle-release")
contents = File.read("/etc/oracle-release").chomp
platform "oracle"
platform_version get_redhatish_version(contents)
elsif File.exist?("/etc/enterprise-release")
contents = File.read("/etc/enterprise-release").chomp
platform "oracle"
platform_version get_redhatish_version(contents)
elsif File.exist?("/etc/f5-release")
platform "bigip"
platform_version bigip_version
elsif File.exist?("/etc/debian_version")
# Ubuntu and Debian both have /etc/debian_version
# Ubuntu should always have a working lsb, debian does not by default
if lsb[:id] =~ /Ubuntu/i
platform "ubuntu"
platform_version lsb[:release]
elsif lsb[:id] =~ /LinuxMint/i
platform "linuxmint"
platform_version lsb[:release]
else
if File.exist?("/usr/bin/raspi-config")
platform "raspbian"
elsif Dir.exist?("/etc/cumulus")
platform "cumulus"
else
platform "debian"
end
platform_version debian_platform_version
end
elsif File.exist?("/etc/parallels-release")
contents = File.read("/etc/parallels-release").chomp
platform get_redhatish_platform(contents)
platform_version contents.match(/(\d\.\d\.\d)/)[0]
elsif File.exist?("/etc/redhat-release")
if os_release_file_is_cisco? # Cisco guestshell
platform "nexus_centos"
platform_version os_release_info["VERSION"]
else
contents = File.read("/etc/redhat-release").chomp
platform get_redhatish_platform(contents)
platform_version get_redhatish_version(contents)
end
elsif File.exist?("/etc/system-release")
contents = File.read("/etc/system-release").chomp
platform get_redhatish_platform(contents)
platform_version get_redhatish_version(contents)
elsif File.exist?("/etc/SuSE-release")
suse_release = File.read("/etc/SuSE-release")
suse_version = suse_release.scan(/VERSION = (\d+)\nPATCHLEVEL = (\d+)/).flatten.join(".")
suse_version = suse_release[/VERSION = ([\d\.]{2,})/, 1] if suse_version == ""
platform_version suse_version
if suse_release =~ /^openSUSE/
# opensuse releases >= 42 are openSUSE Leap
if platform_version.to_i < 42
platform "opensuse"
else
platform "opensuseleap"
end
else
platform "suse"
end
elsif File.exist?("/etc/Eos-release")
platform "arista_eos"
platform_version File.read("/etc/Eos-release").strip.split[-1]
platform_family "fedora"
elsif os_release_file_is_cisco?
raise "unknown Cisco /etc/os-release or /etc/cisco-release ID_LIKE field" if
os_release_info["ID_LIKE"].nil? || ! os_release_info["ID_LIKE"].include?("wrlinux")
case os_release_info["ID"]
when "nexus"
platform "nexus"
when "ios_xr"
platform "ios_xr"
else
raise "unknown Cisco /etc/os-release or /etc/cisco-release ID field"
end
platform_family "wrlinux"
platform_version os_release_info["VERSION"]
elsif File.exist?("/etc/gentoo-release")
platform "gentoo"
# the gentoo release version is the base version used to bootstrap
# a node and doesn't have a lot of meaning in a rolling release distro
# kernel release will be used - ex. 3.18.7-gentoo
platform_version `uname -r`.strip
elsif File.exist?("/etc/slackware-version")
platform "slackware"
platform_version File.read("/etc/slackware-version").scan(/(\d+|\.+)/).join
elsif File.exist?("/etc/arch-release")
platform "arch"
# no way to determine platform_version in a rolling release distribution
# kernel release will be used - ex. 2.6.32-ARCH
platform_version `uname -r`.strip
elsif File.exist?("/etc/exherbo-release")
platform "exherbo"
# no way to determine platform_version in a rolling release distribution
# kernel release will be used - ex. 3.13
platform_version `uname -r`.strip
elsif File.exist?("/etc/alpine-release")
platform "alpine"
platform_version File.read("/etc/alpine-release").strip
elsif File.exist?("/usr/lib/os-release")
contents = File.read("/usr/lib/os-release")
if /Clear Linux/ =~ contents
platform "clearlinux"
platform_version contents[/VERSION_ID=(\d+)/, 1]
end
elsif lsb[:id] =~ /RedHat/i
platform "redhat"
platform_version lsb[:release]
elsif lsb[:id] =~ /Amazon/i
platform "amazon"
platform_version lsb[:release]
elsif lsb[:id] =~ /ScientificSL/i
platform "scientific"
platform_version lsb[:release]
elsif lsb[:id] =~ /XenServer/i
platform "xenserver"
platform_version lsb[:release]
elsif lsb[:id] # LSB can provide odd data that changes between releases, so we currently fall back on it rather than dealing with its subtleties
platform lsb[:id].downcase
platform_version lsb[:release]
end
platform_family determine_platform_family
end
end
end
|
require "xml_security"
require "onelogin/ruby-saml/attributes"
require "time"
require "nokogiri"
# Only supports SAML 2.0
module OneLogin
module RubySaml
# SAML2 Authentication Response. SAML Response
#
class Response < SamlMessage
ASSERTION = "urn:oasis:names:tc:SAML:2.0:assertion"
PROTOCOL = "urn:oasis:names:tc:SAML:2.0:protocol"
DSIG = "http://www.w3.org/2000/09/xmldsig#"
XENC = "http://www.w3.org/2001/04/xmlenc#"
# TODO: Settings should probably be initialized too... WDYT?
# OneLogin::RubySaml::Settings Toolkit settings
attr_accessor :settings
# Array with the causes [Array of strings]
attr_accessor :errors
attr_reader :document
attr_reader :decrypted_document
attr_reader :response
attr_reader :options
attr_accessor :soft
# Constructs the SAML Response. A Response Object that is an extension of the SamlMessage class.
# @param response [String] A UUEncoded SAML response from the IdP.
# @param options [Hash] :settings to provide the OneLogin::RubySaml::Settings object
# Or some options for the response validation process like skip the conditions validation
# with the :skip_conditions, or allow a clock_drift when checking dates with :allowed_clock_drift
# or :matches_request_id that will validate that the response matches the ID of the request,
# or skip the subject confirmation validation with the :skip_subject_confirmation option
def initialize(response, options = {})
@errors = []
raise ArgumentError.new("Response cannot be nil") if response.nil?
@options = options
@soft = true
if !options.empty? && !options[:settings].nil?
@settings = options[:settings]
if !options[:settings].soft.nil?
@soft = options[:settings].soft
end
end
@response = decode_raw_saml(response)
@document = XMLSecurity::SignedDocument.new(@response, @errors)
if assertion_encrypted?
@decrypted_document = generate_decrypted_document
end
end
# Append the cause to the errors array, and based on the value of soft, return false or raise
# an exception
def append_error(error_msg)
@errors << error_msg
return soft ? false : validation_error(error_msg)
end
# Reset the errors array
def reset_errors!
@errors = []
end
# Validates the SAML Response with the default values (soft = true)
# @return [Boolean] TRUE if the SAML Response is valid
#
def is_valid?
validate
end
# @return [String] the NameID provided by the SAML response from the IdP.
#
def name_id
@name_id ||= begin
encrypted_node = xpath_first_from_signed_assertion('/a:Subject/a:EncryptedID')
if encrypted_node
node = decrypt_nameid(encrypted_node)
else
node = xpath_first_from_signed_assertion('/a:Subject/a:NameID')
end
node.nil? ? nil : node.text
end
end
alias_method :nameid, :name_id
# Gets the SessionIndex from the AuthnStatement.
# Could be used to be stored in the local session in order
# to be used in a future Logout Request that the SP could
# send to the IdP, to set what specific session must be deleted
# @return [String] SessionIndex Value
#
def sessionindex
@sessionindex ||= begin
node = xpath_first_from_signed_assertion('/a:AuthnStatement')
node.nil? ? nil : node.attributes['SessionIndex']
end
end
# Gets the Attributes from the AttributeStatement element.
#
# All attributes can be iterated over +attributes.each+ or returned as array by +attributes.all+
# For backwards compatibility ruby-saml returns by default only the first value for a given attribute with
# attributes['name']
# To get all of the attributes, use:
# attributes.multi('name')
# Or turn off the compatibility:
# OneLogin::RubySaml::Attributes.single_value_compatibility = false
# Now this will return an array:
# attributes['name']
#
# @return [Attributes] OneLogin::RubySaml::Attributes enumerable collection.
#
def attributes
@attr_statements ||= begin
attributes = Attributes.new
stmt_element = xpath_first_from_signed_assertion('/a:AttributeStatement')
return attributes if stmt_element.nil?
stmt_element.elements.each do |attr_element|
name = attr_element.attributes["Name"]
values = attr_element.elements.collect{|e|
if (e.elements.nil? || e.elements.size == 0)
# SAMLCore requires that nil AttributeValues MUST contain xsi:nil XML attribute set to "true" or "1"
# otherwise the value is to be regarded as empty.
["true", "1"].include?(e.attributes['xsi:nil']) ? nil : e.text.to_s
# explicitly support saml2:NameID with saml2:NameQualifier if supplied in attributes
# this is useful for allowing eduPersonTargetedId to be passed as an opaque identifier to use to
# identify the subject in an SP rather than email or other less opaque attributes
# NameQualifier, if present is prefixed with a "/" to the value
else
REXML::XPath.match(e,'a:NameID', { "a" => ASSERTION }).collect{|n|
(n.attributes['NameQualifier'] ? n.attributes['NameQualifier'] +"/" : '') + n.text.to_s
}
end
}
attributes.add(name, values.flatten)
end
attributes
end
end
# Gets the SessionNotOnOrAfter from the AuthnStatement.
# Could be used to set the local session expiration (expire at latest)
# @return [String] The SessionNotOnOrAfter value
#
def session_expires_at
@expires_at ||= begin
node = xpath_first_from_signed_assertion('/a:AuthnStatement')
node.nil? ? nil : parse_time(node, "SessionNotOnOrAfter")
end
end
# Checks if the Status has the "Success" code
# @return [Boolean] True if the StatusCode is Sucess
#
def success?
status_code == "urn:oasis:names:tc:SAML:2.0:status:Success"
end
# @return [String] StatusCode value from a SAML Response.
#
def status_code
@status_code ||= begin
node = REXML::XPath.first(
document,
"/p:Response/p:Status/p:StatusCode",
{ "p" => PROTOCOL, "a" => ASSERTION }
)
node.attributes["Value"] if node && node.attributes
end
end
# @return [String] the StatusMessage value from a SAML Response.
#
def status_message
@status_message ||= begin
node = REXML::XPath.first(
document,
"/p:Response/p:Status/p:StatusMessage",
{ "p" => PROTOCOL, "a" => ASSERTION }
)
node.text if node
end
end
# Gets the Condition Element of the SAML Response if exists.
# (returns the first node that matches the supplied xpath)
# @return [REXML::Element] Conditions Element if exists
#
def conditions
@conditions ||= xpath_first_from_signed_assertion('/a:Conditions')
end
# Gets the NotBefore Condition Element value.
# @return [Time] The NotBefore value in Time format
#
def not_before
@not_before ||= parse_time(conditions, "NotBefore")
end
# Gets the NotOnOrAfter Condition Element value.
# @return [Time] The NotOnOrAfter value in Time format
#
def not_on_or_after
@not_on_or_after ||= parse_time(conditions, "NotOnOrAfter")
end
# Gets the Issuers (from Response and Assertion).
# (returns the first node that matches the supplied xpath from the Response and from the Assertion)
# @return [Array] Array with the Issuers (REXML::Element)
#
def issuers
@issuers ||= begin
issuers = []
nodes = REXML::XPath.match(
document,
"/p:Response/a:Issuer",
{ "p" => PROTOCOL, "a" => ASSERTION }
)
nodes += xpath_from_signed_assertion("/a:Issuer")
nodes.each do |node|
issuers << node.text if node.text
end
issuers.uniq
end
end
# @return [String|nil] The InResponseTo attribute from the SAML Response.
#
def in_response_to
@in_response_to ||= begin
node = REXML::XPath.first(
document,
"/p:Response",
{ "p" => PROTOCOL }
)
node.nil? ? nil : node.attributes['InResponseTo']
end
end
# @return [String|nil] Destination attribute from the SAML Response.
#
def destination
@destination ||= begin
node = REXML::XPath.first(
document,
"/p:Response",
{ "p" => PROTOCOL }
)
node.nil? ? nil : node.attributes['Destination']
end
end
# @return [Array] The Audience elements from the Contitions of the SAML Response.
#
def audiences
@audiences ||= begin
audiences = []
nodes = xpath_from_signed_assertion('/a:Conditions/a:AudienceRestriction/a:Audience')
nodes.each do |node|
if node && node.text
audiences << node.text
end
end
audiences
end
end
# returns the allowed clock drift on timing validation
# @return [Integer]
def allowed_clock_drift
return options[:allowed_clock_drift] || 0
end
private
# Validates the SAML Response (calls several validation methods)
# @return [Boolean] True if the SAML Response is valid, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate
reset_errors!
validate_response_state &&
validate_version &&
validate_id &&
validate_success_status &&
validate_num_assertion &&
validate_no_encrypted_attributes &&
validate_signed_elements &&
validate_structure &&
validate_in_response_to &&
validate_conditions &&
validate_audience &&
validate_destination &&
validate_issuer &&
validate_session_expiration &&
validate_subject_confirmation &&
validate_signature
end
# Validates the Status of the SAML Response
# @return [Boolean] True if the SAML Response contains a Success code, otherwise False if soft == false
# @raise [ValidationError] if soft == false and validation fails
#
def validate_success_status
return true if success?
error_msg = 'The status code of the Response was not Success'
status_error_msg = OneLogin::RubySaml::Utils.status_error_msg(error_msg, status_code, status_message)
append_error(status_error_msg)
end
# Validates the SAML Response against the specified schema.
# @return [Boolean] True if the XML is valid, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_structure
unless valid_saml?(document, soft)
return append_error("Invalid SAML Response. Not match the saml-schema-protocol-2.0.xsd")
end
true
end
# Validates that the SAML Response provided in the initialization is not empty,
# also check that the setting and the IdP cert were also provided
# @return [Boolean] True if the required info is found, false otherwise
#
def validate_response_state
return append_error("Blank response") if response.nil? || response.empty?
return append_error("No settings on response") if settings.nil?
if settings.idp_cert_fingerprint.nil? && settings.idp_cert.nil?
return append_error("No fingerprint or certificate on settings")
end
true
end
# Validates that the SAML Response contains an ID
# If fails, the error is added to the errors array.
# @return [Boolean] True if the SAML Response contains an ID, otherwise returns False
#
def validate_id
unless id(document)
return append_error("Missing ID attribute on SAML Response")
end
true
end
# Validates the SAML version (2.0)
# If fails, the error is added to the errors array.
# @return [Boolean] True if the SAML Response is 2.0, otherwise returns False
#
def validate_version
unless version(document) == "2.0"
return append_error("Unsupported SAML version")
end
true
end
# Validates that the SAML Response only contains a single Assertion (encrypted or not).
# If fails, the error is added to the errors array.
# @return [Boolean] True if the SAML Response contains one unique Assertion, otherwise False
#
def validate_num_assertion
assertions = REXML::XPath.match(
document,
"//a:Assertion",
{ "a" => ASSERTION }
)
encrypted_assertions = REXML::XPath.match(
document,
"//a:EncryptedAssertion",
{ "a" => ASSERTION }
)
unless assertions.size + encrypted_assertions.size == 1
return append_error("SAML Response must contain 1 assertion")
end
true
end
# Validates that there are not EncryptedAttribute (not supported)
# If fails, the error is added to the errors array
# @return [Boolean] True if there are no EncryptedAttribute elements, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_no_encrypted_attributes
nodes = xpath_from_signed_assertion("/a:AttributeStatement/a:EncryptedAttribute")
if nodes && nodes.length > 0
return append_error("There is an EncryptedAttribute in the Response and this SP not support them")
end
true
end
# Validates the Signed elements
# If fails, the error is added to the errors array
# @return [Boolean] True if there is 1 or 2 Elements signed in the SAML Response
# an are a Response or an Assertion Element, otherwise False if soft=True
#
def validate_signed_elements
signature_nodes = REXML::XPath.match(
decrypted_document.nil? ? document : decrypted_document,
"//ds:Signature",
{"ds"=>DSIG}
)
signed_elements = []
signature_nodes.each do |signature_node|
signed_element = signature_node.parent.name
if signed_element != 'Response' && signed_element != 'Assertion'
return append_error("Found an unexpected Signature Element. SAML Response rejected")
end
signed_elements << signed_element
end
unless signature_nodes.length < 3 && !signed_elements.empty?
return append_error("Found an unexpected number of Signature Element. SAML Response rejected")
end
true
end
# Validates if the provided request_id match the inResponseTo value.
# If fails, the error is added to the errors array
# @return [Boolean] True if there is no request_id or it match, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_in_response_to
return true unless options.has_key? :matches_request_id
return true if options[:matches_request_id].nil? || options[:matches_request_id].empty?
return true unless options[:matches_request_id] != in_response_to
error_msg = "The InResponseTo of the Response: #{in_response_to}, does not match the ID of the AuthNRequest sent by the SP: #{options[:matches_request_id]}"
append_error(error_msg)
end
# Validates the Audience, (If the Audience match the Service Provider EntityID)
# If fails, the error is added to the errors array
# @return [Boolean] True if there is an Audience Element that match the Service Provider EntityID, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_audience
return true if audiences.empty? || settings.issuer.nil? || settings.issuer.empty?
unless audiences.include? settings.issuer
error_msg = "#{settings.issuer} is not a valid audience for this Response - Valid audiences: #{audiences.join(',')}"
return append_error(error_msg)
end
true
end
# Validates the Destination, (If the SAML Response is received where expected)
# If fails, the error is added to the errors array
# @return [Boolean] True if there is a Destination element that matches the Consumer Service URL, otherwise False
#
def validate_destination
return true if destination.nil? || destination.empty? || settings.assertion_consumer_service_url.nil? || settings.assertion_consumer_service_url.empty?
unless destination == settings.assertion_consumer_service_url
error_msg = "The response was received at #{destination} instead of #{settings.assertion_consumer_service_url}"
return append_error(error_msg)
end
true
end
# Validates the Conditions. (If the response was initialized with the :skip_conditions option, this validation is skipped,
# If the response was initialized with the :allowed_clock_drift option, the timing validations are relaxed by the allowed_clock_drift value)
# @return [Boolean] True if satisfies the conditions, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_conditions
return true if conditions.nil?
return true if options[:skip_conditions]
now = Time.now.utc
if not_before && (now + allowed_clock_drift) < not_before
error_msg = "Current time is earlier than NotBefore condition #{(now + allowed_clock_drift)} < #{not_before})"
return append_error(error_msg)
end
if not_on_or_after && now >= (not_on_or_after + allowed_clock_drift)
error_msg = "Current time is on or after NotOnOrAfter condition (#{now} >= #{not_on_or_after + allowed_clock_drift})"
return append_error(error_msg)
end
true
end
# Validates the Issuer (Of the SAML Response and the SAML Assertion)
# @param soft [Boolean] soft Enable or Disable the soft mode (In order to raise exceptions when the response is invalid or not)
# @return [Boolean] True if the Issuer matchs the IdP entityId, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_issuer
return true if settings.idp_entity_id.nil?
issuers.each do |issuer|
unless URI.parse(issuer) == URI.parse(settings.idp_entity_id)
error_msg = "Doesn't match the issuer, expected: <#{settings.idp_entity_id}>, but was: <#{issuer}>"
return append_error(error_msg)
end
end
true
end
# Validates that the Session haven't expired (If the response was initialized with the :allowed_clock_drift option,
# this time validation is relaxed by the allowed_clock_drift value)
# If fails, the error is added to the errors array
# @param soft [Boolean] soft Enable or Disable the soft mode (In order to raise exceptions when the response is invalid or not)
# @return [Boolean] True if the SessionNotOnOrAfter of the AttributeStatement is valid, otherwise (when expired) False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_session_expiration(soft = true)
return true if session_expires_at.nil?
now = Time.now.utc
unless (session_expires_at + allowed_clock_drift) > now
error_msg = "The attributes have expired, based on the SessionNotOnOrAfter of the AttributeStatement of this Response"
return append_error(error_msg)
end
true
end
# Validates if exists valid SubjectConfirmation (If the response was initialized with the :allowed_clock_drift option,
# timimg validation are relaxed by the allowed_clock_drift value. If the response was initialized with the
# :skip_subject_confirmation option, this validation is skipped)
# If fails, the error is added to the errors array
# @return [Boolean] True if exists a valid SubjectConfirmation, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_subject_confirmation
return true if options[:skip_subject_confirmation]
valid_subject_confirmation = false
subject_confirmation_nodes = xpath_from_signed_assertion('/a:Subject/a:SubjectConfirmation')
now = Time.now.utc
subject_confirmation_nodes.each do |subject_confirmation|
if subject_confirmation.attributes.include? "Method" and subject_confirmation.attributes['Method'] != 'urn:oasis:names:tc:SAML:2.0:cm:bearer'
next
end
confirmation_data_node = REXML::XPath.first(
subject_confirmation,
'a:SubjectConfirmationData',
{ "a" => ASSERTION }
)
next unless confirmation_data_node
attrs = confirmation_data_node.attributes
next if (attrs.include? "InResponseTo" and attrs['InResponseTo'] != in_response_to) ||
(attrs.include? "NotOnOrAfter" and (parse_time(confirmation_data_node, "NotOnOrAfter") + allowed_clock_drift) <= now) ||
(attrs.include? "NotBefore" and parse_time(confirmation_data_node, "NotBefore") > (now + allowed_clock_drift))
valid_subject_confirmation = true
break
end
if !valid_subject_confirmation
error_msg = "A valid SubjectConfirmation was not found on this Response"
return append_error(error_msg)
end
true
end
# Validates the Signature
# @return [Boolean] True if not contains a Signature or if the Signature is valid, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_signature
error_msg = "Invalid Signature on SAML Response"
# If the response contains the signature, and the assertion was encrypted, validate the original SAML Response
# otherwise, review if the decrypted assertion contains a signature
sig_elements = REXML::XPath.match(
document,
"/p:Response/ds:Signature]",
{ "p" => PROTOCOL, "ds" => DSIG },
)
doc = (sig_elements.size == 1 || decrypted_document.nil?) ? document : decrypted_document
# Check signature nodes
if sig_elements.nil? || sig_elements.size == 0
sig_elements = REXML::XPath.match(
doc,
"/p:Response/a:Assertion/ds:Signature",
{"p" => PROTOCOL, "a" => ASSERTION, "ds"=>DSIG}
)
end
if sig_elements.size != 1
return append_error(error_msg)
end
opts = {}
opts[:fingerprint_alg] = settings.idp_cert_fingerprint_algorithm
opts[:cert] = settings.get_idp_cert
fingerprint = settings.get_fingerprint
unless fingerprint && doc.validate_document(fingerprint, @soft, opts)
return append_error(error_msg)
end
true
end
# Extracts the first appearance that matchs the subelt (pattern)
# Search on any Assertion that is signed, or has a Response parent signed
# @param subelt [String] The XPath pattern
# @return [REXML::Element | nil] If any matches, return the Element
#
def xpath_first_from_signed_assertion(subelt=nil)
doc = decrypted_document.nil? ? document : decrypted_document
node = REXML::XPath.first(
doc,
"/p:Response/a:Assertion[@ID=$id]#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
)
node ||= REXML::XPath.first(
doc,
"/p:Response[@ID=$id]/a:Assertion#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
)
node
end
# Extracts all the appearances that matchs the subelt (pattern)
# Search on any Assertion that is signed, or has a Response parent signed
# @param subelt [String] The XPath pattern
# @return [Array of REXML::Element] Return all matches
#
def xpath_from_signed_assertion(subelt=nil)
doc = decrypted_document.nil? ? document : decrypted_document
node = REXML::XPath.match(
doc,
"/p:Response/a:Assertion[@ID=$id]#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
)
node.concat( REXML::XPath.match(
doc,
"/p:Response[@ID=$id]/a:Assertion#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
))
end
# Generates the decrypted_document
# @return [XMLSecurity::SignedDocument] The SAML Response with the assertion decrypted
#
def generate_decrypted_document
if settings.nil? || !settings.get_sp_key
validation_error('An EncryptedAssertion found and no SP private key found on the settings to decrypt it. Be sure you provided the :settings parameter at the initialize method')
end
# Marshal at Ruby 1.8.7 throw an Exception
if RUBY_VERSION < "1.9"
document_copy = XMLSecurity::SignedDocument.new(response, errors)
else
document_copy = Marshal.load(Marshal.dump(document))
end
decrypt_assertion_from_document(document_copy)
end
# Obtains a SAML Response with the EncryptedAssertion element decrypted
# @param document_copy [XMLSecurity::SignedDocument] A copy of the original SAML Response with the encrypted assertion
# @return [XMLSecurity::SignedDocument] The SAML Response with the assertion decrypted
#
def decrypt_assertion_from_document(document_copy)
response_node = REXML::XPath.first(
document_copy,
"/p:Response/",
{ "p" => PROTOCOL }
)
encrypted_assertion_node = REXML::XPath.first(
document_copy,
"(/p:Response/EncryptedAssertion/)|(/p:Response/a:EncryptedAssertion/)",
{ "p" => PROTOCOL, "a" => ASSERTION }
)
response_node.add(decrypt_assertion(encrypted_assertion_node))
encrypted_assertion_node.remove
XMLSecurity::SignedDocument.new(response_node.to_s)
end
# Checks if the SAML Response contains or not an EncryptedAssertion element
# @return [Boolean] True if the SAML Response contains an EncryptedAssertion element
#
def assertion_encrypted?
! REXML::XPath.first(
document,
"(/p:Response/EncryptedAssertion/)|(/p:Response/a:EncryptedAssertion/)",
{ "p" => PROTOCOL, "a" => ASSERTION }
).nil?
end
# Decrypts an EncryptedAssertion element
# @param encrypted_assertion_node [REXML::Element] The EncryptedAssertion element
# @return [REXML::Document] The decrypted EncryptedAssertion element
#
def decrypt_assertion(encrypted_assertion_node)
decrypt_element(encrypted_assertion_node, /(.*<\/(\w+:)?Assertion>)/m)
end
# Decrypts an EncryptedID element
# @param encryptedid_node [REXML::Element] The EncryptedID element
# @return [REXML::Document] The decrypted EncrypedtID element
#
def decrypt_nameid(encryptedid_node)
decrypt_element(encryptedid_node, /(.*<\/(\w+:)?NameID>)/m)
end
# Decrypt an element
# @param encryptedid_node [REXML::Element] The encrypted element
# @return [REXML::Document] The decrypted element
#
def decrypt_element(encrypt_node, rgrex)
if settings.nil? || !settings.get_sp_key
return validation_error('An ' + encrypt_node.name + ' found and no SP private key found on the settings to decrypt it')
end
elem_plaintext = OneLogin::RubySaml::Utils.decrypt_data(encrypt_node, settings.get_sp_key)
# If we get some problematic noise in the plaintext after decrypting.
# This quick regexp parse will grab only the Element and discard the noise.
elem_plaintext = elem_plaintext.match(rgrex)[0]
# To avoid namespace errors if saml namespace is not defined at assertion_plaintext
# create a parent node first with the saml namespace defined
elem_plaintext = '<node xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion">' + elem_plaintext + '</node>'
doc = REXML::Document.new(elem_plaintext)
doc.root[0]
end
# Parse the attribute of a given node in Time format
# @param node [REXML:Element] The node
# @param attribute [String] The attribute name
# @return [Time|nil] The parsed value
#
def parse_time(node, attribute)
if node && node.attributes[attribute]
Time.parse(node.attributes[attribute])
end
end
end
end
end
Fix typo that causes error in ruby 1.8.7
require "xml_security"
require "onelogin/ruby-saml/attributes"
require "time"
require "nokogiri"
# Only supports SAML 2.0
module OneLogin
module RubySaml
# SAML2 Authentication Response. SAML Response
#
class Response < SamlMessage
ASSERTION = "urn:oasis:names:tc:SAML:2.0:assertion"
PROTOCOL = "urn:oasis:names:tc:SAML:2.0:protocol"
DSIG = "http://www.w3.org/2000/09/xmldsig#"
XENC = "http://www.w3.org/2001/04/xmlenc#"
# TODO: Settings should probably be initialized too... WDYT?
# OneLogin::RubySaml::Settings Toolkit settings
attr_accessor :settings
# Array with the causes [Array of strings]
attr_accessor :errors
attr_reader :document
attr_reader :decrypted_document
attr_reader :response
attr_reader :options
attr_accessor :soft
# Constructs the SAML Response. A Response Object that is an extension of the SamlMessage class.
# @param response [String] A UUEncoded SAML response from the IdP.
# @param options [Hash] :settings to provide the OneLogin::RubySaml::Settings object
# Or some options for the response validation process like skip the conditions validation
# with the :skip_conditions, or allow a clock_drift when checking dates with :allowed_clock_drift
# or :matches_request_id that will validate that the response matches the ID of the request,
# or skip the subject confirmation validation with the :skip_subject_confirmation option
def initialize(response, options = {})
@errors = []
raise ArgumentError.new("Response cannot be nil") if response.nil?
@options = options
@soft = true
if !options.empty? && !options[:settings].nil?
@settings = options[:settings]
if !options[:settings].soft.nil?
@soft = options[:settings].soft
end
end
@response = decode_raw_saml(response)
@document = XMLSecurity::SignedDocument.new(@response, @errors)
if assertion_encrypted?
@decrypted_document = generate_decrypted_document
end
end
# Append the cause to the errors array, and based on the value of soft, return false or raise
# an exception
def append_error(error_msg)
@errors << error_msg
return soft ? false : validation_error(error_msg)
end
# Reset the errors array
def reset_errors!
@errors = []
end
# Validates the SAML Response with the default values (soft = true)
# @return [Boolean] TRUE if the SAML Response is valid
#
def is_valid?
validate
end
# @return [String] the NameID provided by the SAML response from the IdP.
#
def name_id
@name_id ||= begin
encrypted_node = xpath_first_from_signed_assertion('/a:Subject/a:EncryptedID')
if encrypted_node
node = decrypt_nameid(encrypted_node)
else
node = xpath_first_from_signed_assertion('/a:Subject/a:NameID')
end
node.nil? ? nil : node.text
end
end
alias_method :nameid, :name_id
# Gets the SessionIndex from the AuthnStatement.
# Could be used to be stored in the local session in order
# to be used in a future Logout Request that the SP could
# send to the IdP, to set what specific session must be deleted
# @return [String] SessionIndex Value
#
def sessionindex
@sessionindex ||= begin
node = xpath_first_from_signed_assertion('/a:AuthnStatement')
node.nil? ? nil : node.attributes['SessionIndex']
end
end
# Gets the Attributes from the AttributeStatement element.
#
# All attributes can be iterated over +attributes.each+ or returned as array by +attributes.all+
# For backwards compatibility ruby-saml returns by default only the first value for a given attribute with
# attributes['name']
# To get all of the attributes, use:
# attributes.multi('name')
# Or turn off the compatibility:
# OneLogin::RubySaml::Attributes.single_value_compatibility = false
# Now this will return an array:
# attributes['name']
#
# @return [Attributes] OneLogin::RubySaml::Attributes enumerable collection.
#
def attributes
@attr_statements ||= begin
attributes = Attributes.new
stmt_element = xpath_first_from_signed_assertion('/a:AttributeStatement')
return attributes if stmt_element.nil?
stmt_element.elements.each do |attr_element|
name = attr_element.attributes["Name"]
values = attr_element.elements.collect{|e|
if (e.elements.nil? || e.elements.size == 0)
# SAMLCore requires that nil AttributeValues MUST contain xsi:nil XML attribute set to "true" or "1"
# otherwise the value is to be regarded as empty.
["true", "1"].include?(e.attributes['xsi:nil']) ? nil : e.text.to_s
# explicitly support saml2:NameID with saml2:NameQualifier if supplied in attributes
# this is useful for allowing eduPersonTargetedId to be passed as an opaque identifier to use to
# identify the subject in an SP rather than email or other less opaque attributes
# NameQualifier, if present is prefixed with a "/" to the value
else
REXML::XPath.match(e,'a:NameID', { "a" => ASSERTION }).collect{|n|
(n.attributes['NameQualifier'] ? n.attributes['NameQualifier'] +"/" : '') + n.text.to_s
}
end
}
attributes.add(name, values.flatten)
end
attributes
end
end
# Gets the SessionNotOnOrAfter from the AuthnStatement.
# Could be used to set the local session expiration (expire at latest)
# @return [String] The SessionNotOnOrAfter value
#
def session_expires_at
@expires_at ||= begin
node = xpath_first_from_signed_assertion('/a:AuthnStatement')
node.nil? ? nil : parse_time(node, "SessionNotOnOrAfter")
end
end
# Checks if the Status has the "Success" code
# @return [Boolean] True if the StatusCode is Sucess
#
def success?
status_code == "urn:oasis:names:tc:SAML:2.0:status:Success"
end
# @return [String] StatusCode value from a SAML Response.
#
def status_code
@status_code ||= begin
node = REXML::XPath.first(
document,
"/p:Response/p:Status/p:StatusCode",
{ "p" => PROTOCOL, "a" => ASSERTION }
)
node.attributes["Value"] if node && node.attributes
end
end
# @return [String] the StatusMessage value from a SAML Response.
#
def status_message
@status_message ||= begin
node = REXML::XPath.first(
document,
"/p:Response/p:Status/p:StatusMessage",
{ "p" => PROTOCOL, "a" => ASSERTION }
)
node.text if node
end
end
# Gets the Condition Element of the SAML Response if exists.
# (returns the first node that matches the supplied xpath)
# @return [REXML::Element] Conditions Element if exists
#
def conditions
@conditions ||= xpath_first_from_signed_assertion('/a:Conditions')
end
# Gets the NotBefore Condition Element value.
# @return [Time] The NotBefore value in Time format
#
def not_before
@not_before ||= parse_time(conditions, "NotBefore")
end
# Gets the NotOnOrAfter Condition Element value.
# @return [Time] The NotOnOrAfter value in Time format
#
def not_on_or_after
@not_on_or_after ||= parse_time(conditions, "NotOnOrAfter")
end
# Gets the Issuers (from Response and Assertion).
# (returns the first node that matches the supplied xpath from the Response and from the Assertion)
# @return [Array] Array with the Issuers (REXML::Element)
#
def issuers
@issuers ||= begin
issuers = []
nodes = REXML::XPath.match(
document,
"/p:Response/a:Issuer",
{ "p" => PROTOCOL, "a" => ASSERTION }
)
nodes += xpath_from_signed_assertion("/a:Issuer")
nodes.each do |node|
issuers << node.text if node.text
end
issuers.uniq
end
end
# @return [String|nil] The InResponseTo attribute from the SAML Response.
#
def in_response_to
@in_response_to ||= begin
node = REXML::XPath.first(
document,
"/p:Response",
{ "p" => PROTOCOL }
)
node.nil? ? nil : node.attributes['InResponseTo']
end
end
# @return [String|nil] Destination attribute from the SAML Response.
#
def destination
@destination ||= begin
node = REXML::XPath.first(
document,
"/p:Response",
{ "p" => PROTOCOL }
)
node.nil? ? nil : node.attributes['Destination']
end
end
# @return [Array] The Audience elements from the Contitions of the SAML Response.
#
def audiences
@audiences ||= begin
audiences = []
nodes = xpath_from_signed_assertion('/a:Conditions/a:AudienceRestriction/a:Audience')
nodes.each do |node|
if node && node.text
audiences << node.text
end
end
audiences
end
end
# returns the allowed clock drift on timing validation
# @return [Integer]
def allowed_clock_drift
return options[:allowed_clock_drift] || 0
end
private
# Validates the SAML Response (calls several validation methods)
# @return [Boolean] True if the SAML Response is valid, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate
reset_errors!
validate_response_state &&
validate_version &&
validate_id &&
validate_success_status &&
validate_num_assertion &&
validate_no_encrypted_attributes &&
validate_signed_elements &&
validate_structure &&
validate_in_response_to &&
validate_conditions &&
validate_audience &&
validate_destination &&
validate_issuer &&
validate_session_expiration &&
validate_subject_confirmation &&
validate_signature
end
# Validates the Status of the SAML Response
# @return [Boolean] True if the SAML Response contains a Success code, otherwise False if soft == false
# @raise [ValidationError] if soft == false and validation fails
#
def validate_success_status
return true if success?
error_msg = 'The status code of the Response was not Success'
status_error_msg = OneLogin::RubySaml::Utils.status_error_msg(error_msg, status_code, status_message)
append_error(status_error_msg)
end
# Validates the SAML Response against the specified schema.
# @return [Boolean] True if the XML is valid, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_structure
unless valid_saml?(document, soft)
return append_error("Invalid SAML Response. Not match the saml-schema-protocol-2.0.xsd")
end
true
end
# Validates that the SAML Response provided in the initialization is not empty,
# also check that the setting and the IdP cert were also provided
# @return [Boolean] True if the required info is found, false otherwise
#
def validate_response_state
return append_error("Blank response") if response.nil? || response.empty?
return append_error("No settings on response") if settings.nil?
if settings.idp_cert_fingerprint.nil? && settings.idp_cert.nil?
return append_error("No fingerprint or certificate on settings")
end
true
end
# Validates that the SAML Response contains an ID
# If fails, the error is added to the errors array.
# @return [Boolean] True if the SAML Response contains an ID, otherwise returns False
#
def validate_id
unless id(document)
return append_error("Missing ID attribute on SAML Response")
end
true
end
# Validates the SAML version (2.0)
# If fails, the error is added to the errors array.
# @return [Boolean] True if the SAML Response is 2.0, otherwise returns False
#
def validate_version
unless version(document) == "2.0"
return append_error("Unsupported SAML version")
end
true
end
# Validates that the SAML Response only contains a single Assertion (encrypted or not).
# If fails, the error is added to the errors array.
# @return [Boolean] True if the SAML Response contains one unique Assertion, otherwise False
#
def validate_num_assertion
assertions = REXML::XPath.match(
document,
"//a:Assertion",
{ "a" => ASSERTION }
)
encrypted_assertions = REXML::XPath.match(
document,
"//a:EncryptedAssertion",
{ "a" => ASSERTION }
)
unless assertions.size + encrypted_assertions.size == 1
return append_error("SAML Response must contain 1 assertion")
end
true
end
# Validates that there are not EncryptedAttribute (not supported)
# If fails, the error is added to the errors array
# @return [Boolean] True if there are no EncryptedAttribute elements, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_no_encrypted_attributes
nodes = xpath_from_signed_assertion("/a:AttributeStatement/a:EncryptedAttribute")
if nodes && nodes.length > 0
return append_error("There is an EncryptedAttribute in the Response and this SP not support them")
end
true
end
# Validates the Signed elements
# If fails, the error is added to the errors array
# @return [Boolean] True if there is 1 or 2 Elements signed in the SAML Response
# an are a Response or an Assertion Element, otherwise False if soft=True
#
def validate_signed_elements
signature_nodes = REXML::XPath.match(
decrypted_document.nil? ? document : decrypted_document,
"//ds:Signature",
{"ds"=>DSIG}
)
signed_elements = []
signature_nodes.each do |signature_node|
signed_element = signature_node.parent.name
if signed_element != 'Response' && signed_element != 'Assertion'
return append_error("Found an unexpected Signature Element. SAML Response rejected")
end
signed_elements << signed_element
end
unless signature_nodes.length < 3 && !signed_elements.empty?
return append_error("Found an unexpected number of Signature Element. SAML Response rejected")
end
true
end
# Validates if the provided request_id match the inResponseTo value.
# If fails, the error is added to the errors array
# @return [Boolean] True if there is no request_id or it match, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_in_response_to
return true unless options.has_key? :matches_request_id
return true if options[:matches_request_id].nil? || options[:matches_request_id].empty?
return true unless options[:matches_request_id] != in_response_to
error_msg = "The InResponseTo of the Response: #{in_response_to}, does not match the ID of the AuthNRequest sent by the SP: #{options[:matches_request_id]}"
append_error(error_msg)
end
# Validates the Audience, (If the Audience match the Service Provider EntityID)
# If fails, the error is added to the errors array
# @return [Boolean] True if there is an Audience Element that match the Service Provider EntityID, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_audience
return true if audiences.empty? || settings.issuer.nil? || settings.issuer.empty?
unless audiences.include? settings.issuer
error_msg = "#{settings.issuer} is not a valid audience for this Response - Valid audiences: #{audiences.join(',')}"
return append_error(error_msg)
end
true
end
# Validates the Destination, (If the SAML Response is received where expected)
# If fails, the error is added to the errors array
# @return [Boolean] True if there is a Destination element that matches the Consumer Service URL, otherwise False
#
def validate_destination
return true if destination.nil? || destination.empty? || settings.assertion_consumer_service_url.nil? || settings.assertion_consumer_service_url.empty?
unless destination == settings.assertion_consumer_service_url
error_msg = "The response was received at #{destination} instead of #{settings.assertion_consumer_service_url}"
return append_error(error_msg)
end
true
end
# Validates the Conditions. (If the response was initialized with the :skip_conditions option, this validation is skipped,
# If the response was initialized with the :allowed_clock_drift option, the timing validations are relaxed by the allowed_clock_drift value)
# @return [Boolean] True if satisfies the conditions, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_conditions
return true if conditions.nil?
return true if options[:skip_conditions]
now = Time.now.utc
if not_before && (now + allowed_clock_drift) < not_before
error_msg = "Current time is earlier than NotBefore condition #{(now + allowed_clock_drift)} < #{not_before})"
return append_error(error_msg)
end
if not_on_or_after && now >= (not_on_or_after + allowed_clock_drift)
error_msg = "Current time is on or after NotOnOrAfter condition (#{now} >= #{not_on_or_after + allowed_clock_drift})"
return append_error(error_msg)
end
true
end
# Validates the Issuer (Of the SAML Response and the SAML Assertion)
# @param soft [Boolean] soft Enable or Disable the soft mode (In order to raise exceptions when the response is invalid or not)
# @return [Boolean] True if the Issuer matchs the IdP entityId, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_issuer
return true if settings.idp_entity_id.nil?
issuers.each do |issuer|
unless URI.parse(issuer) == URI.parse(settings.idp_entity_id)
error_msg = "Doesn't match the issuer, expected: <#{settings.idp_entity_id}>, but was: <#{issuer}>"
return append_error(error_msg)
end
end
true
end
# Validates that the Session haven't expired (If the response was initialized with the :allowed_clock_drift option,
# this time validation is relaxed by the allowed_clock_drift value)
# If fails, the error is added to the errors array
# @param soft [Boolean] soft Enable or Disable the soft mode (In order to raise exceptions when the response is invalid or not)
# @return [Boolean] True if the SessionNotOnOrAfter of the AttributeStatement is valid, otherwise (when expired) False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_session_expiration(soft = true)
return true if session_expires_at.nil?
now = Time.now.utc
unless (session_expires_at + allowed_clock_drift) > now
error_msg = "The attributes have expired, based on the SessionNotOnOrAfter of the AttributeStatement of this Response"
return append_error(error_msg)
end
true
end
# Validates if exists valid SubjectConfirmation (If the response was initialized with the :allowed_clock_drift option,
# timimg validation are relaxed by the allowed_clock_drift value. If the response was initialized with the
# :skip_subject_confirmation option, this validation is skipped)
# If fails, the error is added to the errors array
# @return [Boolean] True if exists a valid SubjectConfirmation, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_subject_confirmation
return true if options[:skip_subject_confirmation]
valid_subject_confirmation = false
subject_confirmation_nodes = xpath_from_signed_assertion('/a:Subject/a:SubjectConfirmation')
now = Time.now.utc
subject_confirmation_nodes.each do |subject_confirmation|
if subject_confirmation.attributes.include? "Method" and subject_confirmation.attributes['Method'] != 'urn:oasis:names:tc:SAML:2.0:cm:bearer'
next
end
confirmation_data_node = REXML::XPath.first(
subject_confirmation,
'a:SubjectConfirmationData',
{ "a" => ASSERTION }
)
next unless confirmation_data_node
attrs = confirmation_data_node.attributes
next if (attrs.include? "InResponseTo" and attrs['InResponseTo'] != in_response_to) ||
(attrs.include? "NotOnOrAfter" and (parse_time(confirmation_data_node, "NotOnOrAfter") + allowed_clock_drift) <= now) ||
(attrs.include? "NotBefore" and parse_time(confirmation_data_node, "NotBefore") > (now + allowed_clock_drift))
valid_subject_confirmation = true
break
end
if !valid_subject_confirmation
error_msg = "A valid SubjectConfirmation was not found on this Response"
return append_error(error_msg)
end
true
end
# Validates the Signature
# @return [Boolean] True if not contains a Signature or if the Signature is valid, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_signature
error_msg = "Invalid Signature on SAML Response"
# If the response contains the signature, and the assertion was encrypted, validate the original SAML Response
# otherwise, review if the decrypted assertion contains a signature
sig_elements = REXML::XPath.match(
document,
"/p:Response/ds:Signature]",
{ "p" => PROTOCOL, "ds" => DSIG }
)
use_original = sig_elements.size == 1 || decrypted_document.nil?
doc = use_original ? document : decrypted_document
# Check signature nodes
if sig_elements.nil? || sig_elements.size == 0
sig_elements = REXML::XPath.match(
doc,
"/p:Response/a:Assertion/ds:Signature",
{"p" => PROTOCOL, "a" => ASSERTION, "ds"=>DSIG}
)
end
if sig_elements.size != 1
return append_error(error_msg)
end
opts = {}
opts[:fingerprint_alg] = settings.idp_cert_fingerprint_algorithm
opts[:cert] = settings.get_idp_cert
fingerprint = settings.get_fingerprint
unless fingerprint && doc.validate_document(fingerprint, @soft, opts)
return append_error(error_msg)
end
true
end
# Extracts the first appearance that matchs the subelt (pattern)
# Search on any Assertion that is signed, or has a Response parent signed
# @param subelt [String] The XPath pattern
# @return [REXML::Element | nil] If any matches, return the Element
#
def xpath_first_from_signed_assertion(subelt=nil)
doc = decrypted_document.nil? ? document : decrypted_document
node = REXML::XPath.first(
doc,
"/p:Response/a:Assertion[@ID=$id]#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
)
node ||= REXML::XPath.first(
doc,
"/p:Response[@ID=$id]/a:Assertion#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
)
node
end
# Extracts all the appearances that matchs the subelt (pattern)
# Search on any Assertion that is signed, or has a Response parent signed
# @param subelt [String] The XPath pattern
# @return [Array of REXML::Element] Return all matches
#
def xpath_from_signed_assertion(subelt=nil)
doc = decrypted_document.nil? ? document : decrypted_document
node = REXML::XPath.match(
doc,
"/p:Response/a:Assertion[@ID=$id]#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
)
node.concat( REXML::XPath.match(
doc,
"/p:Response[@ID=$id]/a:Assertion#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
))
end
# Generates the decrypted_document
# @return [XMLSecurity::SignedDocument] The SAML Response with the assertion decrypted
#
def generate_decrypted_document
if settings.nil? || !settings.get_sp_key
validation_error('An EncryptedAssertion found and no SP private key found on the settings to decrypt it. Be sure you provided the :settings parameter at the initialize method')
end
# Marshal at Ruby 1.8.7 throw an Exception
if RUBY_VERSION < "1.9"
document_copy = XMLSecurity::SignedDocument.new(response, errors)
else
document_copy = Marshal.load(Marshal.dump(document))
end
decrypt_assertion_from_document(document_copy)
end
# Obtains a SAML Response with the EncryptedAssertion element decrypted
# @param document_copy [XMLSecurity::SignedDocument] A copy of the original SAML Response with the encrypted assertion
# @return [XMLSecurity::SignedDocument] The SAML Response with the assertion decrypted
#
def decrypt_assertion_from_document(document_copy)
response_node = REXML::XPath.first(
document_copy,
"/p:Response/",
{ "p" => PROTOCOL }
)
encrypted_assertion_node = REXML::XPath.first(
document_copy,
"(/p:Response/EncryptedAssertion/)|(/p:Response/a:EncryptedAssertion/)",
{ "p" => PROTOCOL, "a" => ASSERTION }
)
response_node.add(decrypt_assertion(encrypted_assertion_node))
encrypted_assertion_node.remove
XMLSecurity::SignedDocument.new(response_node.to_s)
end
# Checks if the SAML Response contains or not an EncryptedAssertion element
# @return [Boolean] True if the SAML Response contains an EncryptedAssertion element
#
def assertion_encrypted?
! REXML::XPath.first(
document,
"(/p:Response/EncryptedAssertion/)|(/p:Response/a:EncryptedAssertion/)",
{ "p" => PROTOCOL, "a" => ASSERTION }
).nil?
end
# Decrypts an EncryptedAssertion element
# @param encrypted_assertion_node [REXML::Element] The EncryptedAssertion element
# @return [REXML::Document] The decrypted EncryptedAssertion element
#
def decrypt_assertion(encrypted_assertion_node)
decrypt_element(encrypted_assertion_node, /(.*<\/(\w+:)?Assertion>)/m)
end
# Decrypts an EncryptedID element
# @param encryptedid_node [REXML::Element] The EncryptedID element
# @return [REXML::Document] The decrypted EncrypedtID element
#
def decrypt_nameid(encryptedid_node)
decrypt_element(encryptedid_node, /(.*<\/(\w+:)?NameID>)/m)
end
# Decrypt an element
# @param encryptedid_node [REXML::Element] The encrypted element
# @return [REXML::Document] The decrypted element
#
def decrypt_element(encrypt_node, rgrex)
if settings.nil? || !settings.get_sp_key
return validation_error('An ' + encrypt_node.name + ' found and no SP private key found on the settings to decrypt it')
end
elem_plaintext = OneLogin::RubySaml::Utils.decrypt_data(encrypt_node, settings.get_sp_key)
# If we get some problematic noise in the plaintext after decrypting.
# This quick regexp parse will grab only the Element and discard the noise.
elem_plaintext = elem_plaintext.match(rgrex)[0]
# To avoid namespace errors if saml namespace is not defined at assertion_plaintext
# create a parent node first with the saml namespace defined
elem_plaintext = '<node xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion">' + elem_plaintext + '</node>'
doc = REXML::Document.new(elem_plaintext)
doc.root[0]
end
# Parse the attribute of a given node in Time format
# @param node [REXML:Element] The node
# @param attribute [String] The attribute name
# @return [Time|nil] The parsed value
#
def parse_time(node, attribute)
if node && node.attributes[attribute]
Time.parse(node.attributes[attribute])
end
end
end
end
end
|
require "xml_security"
require "onelogin/ruby-saml/attributes"
require "time"
require "nokogiri"
# Only supports SAML 2.0
module OneLogin
module RubySaml
# SAML2 Authentication Response. SAML Response
#
class Response < SamlMessage
include ErrorHandling
ASSERTION = "urn:oasis:names:tc:SAML:2.0:assertion"
PROTOCOL = "urn:oasis:names:tc:SAML:2.0:protocol"
DSIG = "http://www.w3.org/2000/09/xmldsig#"
XENC = "http://www.w3.org/2001/04/xmlenc#"
# TODO: Settings should probably be initialized too... WDYT?
# OneLogin::RubySaml::Settings Toolkit settings
attr_accessor :settings
attr_reader :document
attr_reader :decrypted_document
attr_reader :response
attr_reader :options
attr_accessor :soft
# Response available options
# This is not a whitelist to allow people extending OneLogin::RubySaml:Response
# and pass custom options
AVAILABLE_OPTIONS = [
:allowed_clock_drift, :check_duplicated_attributes, :matches_request_id, :settings, :skip_audience, :skip_authnstatement, :skip_conditions,
:skip_destination, :skip_recipient_check, :skip_subject_confirmation
]
# TODO: Update the comment on initialize to describe every option
# Constructs the SAML Response. A Response Object that is an extension of the SamlMessage class.
# @param response [String] A UUEncoded SAML response from the IdP.
# @param options [Hash] :settings to provide the OneLogin::RubySaml::Settings object
# Or some options for the response validation process like skip the conditions validation
# with the :skip_conditions, or allow a clock_drift when checking dates with :allowed_clock_drift
# or :matches_request_id that will validate that the response matches the ID of the request,
# or skip the subject confirmation validation with the :skip_subject_confirmation option
# or skip the recipient validation of the subject confirmation element with :skip_recipient_check option
# or skip the audience validation with :skip_audience option
#
def initialize(response, options = {})
raise ArgumentError.new("Response cannot be nil") if response.nil?
@errors = []
@options = options
@soft = true
unless options[:settings].nil?
@settings = options[:settings]
unless @settings.soft.nil?
@soft = @settings.soft
end
end
@response = decode_raw_saml(response)
@document = XMLSecurity::SignedDocument.new(@response, @errors)
if assertion_encrypted?
@decrypted_document = generate_decrypted_document
end
end
# Validates the SAML Response with the default values (soft = true)
# @param collect_errors [Boolean] Stop validation when first error appears or keep validating. (if soft=true)
# @return [Boolean] TRUE if the SAML Response is valid
#
def is_valid?(collect_errors = false)
validate(collect_errors)
end
# @return [String] the NameID provided by the SAML response from the IdP.
#
def name_id
@name_id ||= Utils.element_text(name_id_node)
end
alias_method :nameid, :name_id
# @return [String] the NameID Format provided by the SAML response from the IdP.
#
def name_id_format
@name_id_format ||=
if name_id_node && name_id_node.attribute("Format")
name_id_node.attribute("Format").value
end
end
alias_method :nameid_format, :name_id_format
# @return [String] the NameID SPNameQualifier provided by the SAML response from the IdP.
#
def name_id_spnamequalifier
@name_id_spnamequalifier ||=
if name_id_node && name_id_node.attribute("SPNameQualifier")
name_id_node.attribute("SPNameQualifier").value
end
end
# @return [String] the NameID NameQualifier provided by the SAML response from the IdP.
#
def name_id_namequalifier
@name_id_namequalifier ||=
if name_id_node && name_id_node.attribute("NameQualifier")
name_id_node.attribute("NameQualifier").value
end
end
# Gets the SessionIndex from the AuthnStatement.
# Could be used to be stored in the local session in order
# to be used in a future Logout Request that the SP could
# send to the IdP, to set what specific session must be deleted
# @return [String] SessionIndex Value
#
def sessionindex
@sessionindex ||= begin
node = xpath_first_from_signed_assertion('/a:AuthnStatement')
node.nil? ? nil : node.attributes['SessionIndex']
end
end
# Gets the Attributes from the AttributeStatement element.
#
# All attributes can be iterated over +attributes.each+ or returned as array by +attributes.all+
# For backwards compatibility ruby-saml returns by default only the first value for a given attribute with
# attributes['name']
# To get all of the attributes, use:
# attributes.multi('name')
# Or turn off the compatibility:
# OneLogin::RubySaml::Attributes.single_value_compatibility = false
# Now this will return an array:
# attributes['name']
#
# @return [Attributes] OneLogin::RubySaml::Attributes enumerable collection.
# @raise [ValidationError] if there are 2+ Attribute with the same Name
#
def attributes
@attr_statements ||= begin
attributes = Attributes.new
stmt_elements = xpath_from_signed_assertion('/a:AttributeStatement')
stmt_elements.each do |stmt_element|
stmt_element.elements.each do |attr_element|
if attr_element.name == "EncryptedAttribute"
node = decrypt_attribute(attr_element.dup)
else
node = attr_element
end
name = node.attributes["Name"]
if options[:check_duplicated_attributes] && attributes.include?(name)
raise ValidationError.new("Found an Attribute element with duplicated Name")
end
values = node.elements.collect{|e|
if (e.elements.nil? || e.elements.size == 0)
# SAMLCore requires that nil AttributeValues MUST contain xsi:nil XML attribute set to "true" or "1"
# otherwise the value is to be regarded as empty.
["true", "1"].include?(e.attributes['xsi:nil']) ? nil : Utils.element_text(e)
# explicitly support saml2:NameID with saml2:NameQualifier if supplied in attributes
# this is useful for allowing eduPersonTargetedId to be passed as an opaque identifier to use to
# identify the subject in an SP rather than email or other less opaque attributes
# NameQualifier, if present is prefixed with a "/" to the value
else
REXML::XPath.match(e,'a:NameID', { "a" => ASSERTION }).collect do |n|
base_path = n.attributes['NameQualifier'] ? "#{n.attributes['NameQualifier']}/" : ''
"#{base_path}#{Utils.element_text(n)}"
end
end
}
attributes.add(name, values.flatten)
end
end
attributes
end
end
# Gets the SessionNotOnOrAfter from the AuthnStatement.
# Could be used to set the local session expiration (expire at latest)
# @return [String] The SessionNotOnOrAfter value
#
def session_expires_at
@expires_at ||= begin
node = xpath_first_from_signed_assertion('/a:AuthnStatement')
node.nil? ? nil : parse_time(node, "SessionNotOnOrAfter")
end
end
# Checks if the Status has the "Success" code
# @return [Boolean] True if the StatusCode is Sucess
#
def success?
status_code == "urn:oasis:names:tc:SAML:2.0:status:Success"
end
# @return [String] StatusCode value from a SAML Response.
#
def status_code
@status_code ||= begin
nodes = REXML::XPath.match(
document,
"/p:Response/p:Status/p:StatusCode",
{ "p" => PROTOCOL }
)
if nodes.size == 1
node = nodes[0]
code = node.attributes["Value"] if node && node.attributes
unless code == "urn:oasis:names:tc:SAML:2.0:status:Success"
nodes = REXML::XPath.match(
document,
"/p:Response/p:Status/p:StatusCode/p:StatusCode",
{ "p" => PROTOCOL }
)
statuses = nodes.collect do |inner_node|
inner_node.attributes["Value"]
end
extra_code = statuses.join(" | ")
if extra_code
code = "#{code} | #{extra_code}"
end
end
code
end
end
end
# @return [String] the StatusMessage value from a SAML Response.
#
def status_message
@status_message ||= begin
nodes = REXML::XPath.match(
document,
"/p:Response/p:Status/p:StatusMessage",
{ "p" => PROTOCOL }
)
if nodes.size == 1
Utils.element_text(nodes.first)
end
end
end
# Gets the Condition Element of the SAML Response if exists.
# (returns the first node that matches the supplied xpath)
# @return [REXML::Element] Conditions Element if exists
#
def conditions
@conditions ||= xpath_first_from_signed_assertion('/a:Conditions')
end
# Gets the NotBefore Condition Element value.
# @return [Time] The NotBefore value in Time format
#
def not_before
@not_before ||= parse_time(conditions, "NotBefore")
end
# Gets the NotOnOrAfter Condition Element value.
# @return [Time] The NotOnOrAfter value in Time format
#
def not_on_or_after
@not_on_or_after ||= parse_time(conditions, "NotOnOrAfter")
end
# Gets the Issuers (from Response and Assertion).
# (returns the first node that matches the supplied xpath from the Response and from the Assertion)
# @return [Array] Array with the Issuers (REXML::Element)
#
def issuers
@issuers ||= begin
issuer_response_nodes = REXML::XPath.match(
document,
"/p:Response/a:Issuer",
{ "p" => PROTOCOL, "a" => ASSERTION }
)
unless issuer_response_nodes.size == 1
error_msg = "Issuer of the Response not found or multiple."
raise ValidationError.new(error_msg)
end
issuer_assertion_nodes = xpath_from_signed_assertion("/a:Issuer")
unless issuer_assertion_nodes.size == 1
error_msg = "Issuer of the Assertion not found or multiple."
raise ValidationError.new(error_msg)
end
nodes = issuer_response_nodes + issuer_assertion_nodes
nodes.map { |node| Utils.element_text(node) }.compact.uniq
end
end
# @return [String|nil] The InResponseTo attribute from the SAML Response.
#
def in_response_to
@in_response_to ||= begin
node = REXML::XPath.first(
document,
"/p:Response",
{ "p" => PROTOCOL }
)
node.nil? ? nil : node.attributes['InResponseTo']
end
end
# @return [String|nil] Destination attribute from the SAML Response.
#
def destination
@destination ||= begin
node = REXML::XPath.first(
document,
"/p:Response",
{ "p" => PROTOCOL }
)
node.nil? ? nil : node.attributes['Destination']
end
end
# @return [Array] The Audience elements from the Contitions of the SAML Response.
#
def audiences
@audiences ||= begin
nodes = xpath_from_signed_assertion('/a:Conditions/a:AudienceRestriction/a:Audience')
nodes.map { |node| Utils.element_text(node) }.reject(&:empty?)
end
end
# returns the allowed clock drift on timing validation
# @return [Integer]
def allowed_clock_drift
return options[:allowed_clock_drift].to_f
end
# Checks if the SAML Response contains or not an EncryptedAssertion element
# @return [Boolean] True if the SAML Response contains an EncryptedAssertion element
#
def assertion_encrypted?
! REXML::XPath.first(
document,
"(/p:Response/EncryptedAssertion/)|(/p:Response/a:EncryptedAssertion/)",
{ "p" => PROTOCOL, "a" => ASSERTION }
).nil?
end
def response_id
id(document)
end
def assertion_id
@assertion_id ||= begin
node = xpath_first_from_signed_assertion("")
node.nil? ? nil : node.attributes['ID']
end
end
private
# Validates the SAML Response (calls several validation methods)
# @param collect_errors [Boolean] Stop validation when first error appears or keep validating. (if soft=true)
# @return [Boolean] True if the SAML Response is valid, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate(collect_errors = false)
reset_errors!
return false unless validate_response_state
validations = [
:validate_response_state,
:validate_version,
:validate_id,
:validate_success_status,
:validate_num_assertion,
:validate_no_duplicated_attributes,
:validate_signed_elements,
:validate_structure,
:validate_in_response_to,
:validate_one_conditions,
:validate_conditions,
:validate_one_authnstatement,
:validate_audience,
:validate_destination,
:validate_issuer,
:validate_session_expiration,
:validate_subject_confirmation,
:validate_name_id,
:validate_signature
]
if collect_errors
validations.each { |validation| send(validation) }
@errors.empty?
else
validations.all? { |validation| send(validation) }
end
end
# Validates the Status of the SAML Response
# @return [Boolean] True if the SAML Response contains a Success code, otherwise False if soft == false
# @raise [ValidationError] if soft == false and validation fails
#
def validate_success_status
return true if success?
error_msg = 'The status code of the Response was not Success'
status_error_msg = OneLogin::RubySaml::Utils.status_error_msg(error_msg, status_code, status_message)
append_error(status_error_msg)
end
# Validates the SAML Response against the specified schema.
# @return [Boolean] True if the XML is valid, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_structure
structure_error_msg = "Invalid SAML Response. Not match the saml-schema-protocol-2.0.xsd"
unless valid_saml?(document, soft)
return append_error(structure_error_msg)
end
unless decrypted_document.nil?
unless valid_saml?(decrypted_document, soft)
return append_error(structure_error_msg)
end
end
true
end
# Validates that the SAML Response provided in the initialization is not empty,
# also check that the setting and the IdP cert were also provided
# @return [Boolean] True if the required info is found, false otherwise
#
def validate_response_state
return append_error("Blank response") if response.nil? || response.empty?
return append_error("No settings on response") if settings.nil?
if settings.idp_cert_fingerprint.nil? && settings.idp_cert.nil? && settings.idp_cert_multi.nil?
return append_error("No fingerprint or certificate on settings")
end
true
end
# Validates that the SAML Response contains an ID
# If fails, the error is added to the errors array.
# @return [Boolean] True if the SAML Response contains an ID, otherwise returns False
#
def validate_id
unless response_id
return append_error("Missing ID attribute on SAML Response")
end
true
end
# Validates the SAML version (2.0)
# If fails, the error is added to the errors array.
# @return [Boolean] True if the SAML Response is 2.0, otherwise returns False
#
def validate_version
unless version(document) == "2.0"
return append_error("Unsupported SAML version")
end
true
end
# Validates that the SAML Response only contains a single Assertion (encrypted or not).
# If fails, the error is added to the errors array.
# @return [Boolean] True if the SAML Response contains one unique Assertion, otherwise False
#
def validate_num_assertion
error_msg = "SAML Response must contain 1 assertion"
assertions = REXML::XPath.match(
document,
"//a:Assertion",
{ "a" => ASSERTION }
)
encrypted_assertions = REXML::XPath.match(
document,
"//a:EncryptedAssertion",
{ "a" => ASSERTION }
)
unless assertions.size + encrypted_assertions.size == 1
return append_error(error_msg)
end
unless decrypted_document.nil?
assertions = REXML::XPath.match(
decrypted_document,
"//a:Assertion",
{ "a" => ASSERTION }
)
unless assertions.size == 1
return append_error(error_msg)
end
end
true
end
# Validates that there are not duplicated attributes
# If fails, the error is added to the errors array
# @return [Boolean] True if there are no duplicated attribute elements, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_no_duplicated_attributes
if options[:check_duplicated_attributes]
begin
attributes
rescue ValidationError => e
return append_error(e.message)
end
end
true
end
# Validates the Signed elements
# If fails, the error is added to the errors array
# @return [Boolean] True if there is 1 or 2 Elements signed in the SAML Response
# an are a Response or an Assertion Element, otherwise False if soft=True
#
def validate_signed_elements
signature_nodes = REXML::XPath.match(
decrypted_document.nil? ? document : decrypted_document,
"//ds:Signature",
{"ds"=>DSIG}
)
signed_elements = []
verified_seis = []
verified_ids = []
signature_nodes.each do |signature_node|
signed_element = signature_node.parent.name
if signed_element != 'Response' && signed_element != 'Assertion'
return append_error("Invalid Signature Element '#{signed_element}'. SAML Response rejected")
end
if signature_node.parent.attributes['ID'].nil?
return append_error("Signed Element must contain an ID. SAML Response rejected")
end
id = signature_node.parent.attributes.get_attribute("ID").value
if verified_ids.include?(id)
return append_error("Duplicated ID. SAML Response rejected")
end
verified_ids.push(id)
# Check that reference URI matches the parent ID and no duplicate References or IDs
ref = REXML::XPath.first(signature_node, ".//ds:Reference", {"ds"=>DSIG})
if ref
uri = ref.attributes.get_attribute("URI")
if uri && !uri.value.empty?
sei = uri.value[1..-1]
unless sei == id
return append_error("Found an invalid Signed Element. SAML Response rejected")
end
if verified_seis.include?(sei)
return append_error("Duplicated Reference URI. SAML Response rejected")
end
verified_seis.push(sei)
end
end
signed_elements << signed_element
end
unless signature_nodes.length < 3 && !signed_elements.empty?
return append_error("Found an unexpected number of Signature Element. SAML Response rejected")
end
if settings.security[:want_assertions_signed] && !(signed_elements.include? "Assertion")
return append_error("The Assertion of the Response is not signed and the SP requires it")
end
true
end
# Validates if the provided request_id match the inResponseTo value.
# If fails, the error is added to the errors array
# @return [Boolean] True if there is no request_id or it match, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_in_response_to
return true unless options.has_key? :matches_request_id
return true if options[:matches_request_id].nil?
return true unless options[:matches_request_id] != in_response_to
error_msg = "The InResponseTo of the Response: #{in_response_to}, does not match the ID of the AuthNRequest sent by the SP: #{options[:matches_request_id]}"
append_error(error_msg)
end
# Validates the Audience, (If the Audience match the Service Provider EntityID)
# If the response was initialized with the :skip_audience option, this validation is skipped,
# If fails, the error is added to the errors array
# @return [Boolean] True if there is an Audience Element that match the Service Provider EntityID, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_audience
return true if options[:skip_audience]
return true if audiences.empty? || settings.sp_entity_id.nil? || settings.sp_entity_id.empty?
unless audiences.include? settings.sp_entity_id
s = audiences.count > 1 ? 's' : '';
error_msg = "Invalid Audience#{s}. The audience#{s} #{audiences.join(',')}, did not match the expected audience #{settings.sp_entity_id}"
return append_error(error_msg)
end
true
end
# Validates the Destination, (If the SAML Response is received where expected).
# If the response was initialized with the :skip_destination option, this validation is skipped,
# If fails, the error is added to the errors array
# @return [Boolean] True if there is a Destination element that matches the Consumer Service URL, otherwise False
#
def validate_destination
return true if destination.nil?
return true if options[:skip_destination]
if destination.empty?
error_msg = "The response has an empty Destination value"
return append_error(error_msg)
end
return true if settings.assertion_consumer_service_url.nil? || settings.assertion_consumer_service_url.empty?
unless OneLogin::RubySaml::Utils.uri_match?(destination, settings.assertion_consumer_service_url)
error_msg = "The response was received at #{destination} instead of #{settings.assertion_consumer_service_url}"
return append_error(error_msg)
end
true
end
# Checks that the samlp:Response/saml:Assertion/saml:Conditions element exists and is unique.
# (If the response was initialized with the :skip_conditions option, this validation is skipped)
# If fails, the error is added to the errors array
# @return [Boolean] True if there is a conditions element and is unique
#
def validate_one_conditions
return true if options[:skip_conditions]
conditions_nodes = xpath_from_signed_assertion('/a:Conditions')
unless conditions_nodes.size == 1
error_msg = "The Assertion must include one Conditions element"
return append_error(error_msg)
end
true
end
# Checks that the samlp:Response/saml:Assertion/saml:AuthnStatement element exists and is unique.
# If fails, the error is added to the errors array
# @return [Boolean] True if there is a authnstatement element and is unique
#
def validate_one_authnstatement
return true if options[:skip_authnstatement]
authnstatement_nodes = xpath_from_signed_assertion('/a:AuthnStatement')
unless authnstatement_nodes.size == 1
error_msg = "The Assertion must include one AuthnStatement element"
return append_error(error_msg)
end
true
end
# Validates the Conditions. (If the response was initialized with the :skip_conditions option, this validation is skipped,
# If the response was initialized with the :allowed_clock_drift option, the timing validations are relaxed by the allowed_clock_drift value)
# @return [Boolean] True if satisfies the conditions, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_conditions
return true if conditions.nil?
return true if options[:skip_conditions]
now = Time.now.utc
if not_before && (now_with_drift = now + allowed_clock_drift) < not_before
error_msg = "Current time is earlier than NotBefore condition (#{now_with_drift} < #{not_before})"
return append_error(error_msg)
end
if not_on_or_after && now >= (not_on_or_after_with_drift = not_on_or_after + allowed_clock_drift)
error_msg = "Current time is on or after NotOnOrAfter condition (#{now} >= #{not_on_or_after_with_drift})"
return append_error(error_msg)
end
true
end
# Validates the Issuer (Of the SAML Response and the SAML Assertion)
# @param soft [Boolean] soft Enable or Disable the soft mode (In order to raise exceptions when the response is invalid or not)
# @return [Boolean] True if the Issuer matchs the IdP entityId, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_issuer
return true if settings.idp_entity_id.nil?
begin
obtained_issuers = issuers
rescue ValidationError => e
return append_error(e.message)
end
obtained_issuers.each do |issuer|
unless OneLogin::RubySaml::Utils.uri_match?(issuer, settings.idp_entity_id)
error_msg = "Doesn't match the issuer, expected: <#{settings.idp_entity_id}>, but was: <#{issuer}>"
return append_error(error_msg)
end
end
true
end
# Validates that the Session haven't expired (If the response was initialized with the :allowed_clock_drift option,
# this time validation is relaxed by the allowed_clock_drift value)
# If fails, the error is added to the errors array
# @param soft [Boolean] soft Enable or Disable the soft mode (In order to raise exceptions when the response is invalid or not)
# @return [Boolean] True if the SessionNotOnOrAfter of the AuthnStatement is valid, otherwise (when expired) False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_session_expiration(soft = true)
return true if session_expires_at.nil?
now = Time.now.utc
unless (session_expires_at + allowed_clock_drift) > now
error_msg = "The attributes have expired, based on the SessionNotOnOrAfter of the AuthnStatement of this Response"
return append_error(error_msg)
end
true
end
# Validates if exists valid SubjectConfirmation (If the response was initialized with the :allowed_clock_drift option,
# timimg validation are relaxed by the allowed_clock_drift value. If the response was initialized with the
# :skip_subject_confirmation option, this validation is skipped)
# There is also an optional Recipient check
# If fails, the error is added to the errors array
# @return [Boolean] True if exists a valid SubjectConfirmation, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_subject_confirmation
return true if options[:skip_subject_confirmation]
valid_subject_confirmation = false
subject_confirmation_nodes = xpath_from_signed_assertion('/a:Subject/a:SubjectConfirmation')
now = Time.now.utc
subject_confirmation_nodes.each do |subject_confirmation|
if subject_confirmation.attributes.include? "Method" and subject_confirmation.attributes['Method'] != 'urn:oasis:names:tc:SAML:2.0:cm:bearer'
next
end
confirmation_data_node = REXML::XPath.first(
subject_confirmation,
'a:SubjectConfirmationData',
{ "a" => ASSERTION }
)
next unless confirmation_data_node
attrs = confirmation_data_node.attributes
next if (attrs.include? "InResponseTo" and attrs['InResponseTo'] != in_response_to) ||
(attrs.include? "NotOnOrAfter" and (parse_time(confirmation_data_node, "NotOnOrAfter") + allowed_clock_drift) <= now) ||
(attrs.include? "NotBefore" and parse_time(confirmation_data_node, "NotBefore") > (now + allowed_clock_drift)) ||
(attrs.include? "Recipient" and !options[:skip_recipient_check] and settings and attrs['Recipient'] != settings.assertion_consumer_service_url)
valid_subject_confirmation = true
break
end
if !valid_subject_confirmation
error_msg = "A valid SubjectConfirmation was not found on this Response"
return append_error(error_msg)
end
true
end
# Validates the NameID element
def validate_name_id
if name_id_node.nil?
if settings.security[:want_name_id]
return append_error("No NameID element found in the assertion of the Response")
end
else
if name_id.nil? || name_id.empty?
return append_error("An empty NameID value found")
end
unless settings.sp_entity_id.nil? || settings.sp_entity_id.empty? || name_id_spnamequalifier.nil? || name_id_spnamequalifier.empty?
if name_id_spnamequalifier != settings.sp_entity_id
return append_error("The SPNameQualifier value mistmatch the SP entityID value.")
end
end
end
true
end
# Validates the Signature
# @return [Boolean] True if not contains a Signature or if the Signature is valid, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_signature
error_msg = "Invalid Signature on SAML Response"
# If the response contains the signature, and the assertion was encrypted, validate the original SAML Response
# otherwise, review if the decrypted assertion contains a signature
sig_elements = REXML::XPath.match(
document,
"/p:Response[@ID=$id]/ds:Signature]",
{ "p" => PROTOCOL, "ds" => DSIG },
{ 'id' => document.signed_element_id }
)
use_original = sig_elements.size == 1 || decrypted_document.nil?
doc = use_original ? document : decrypted_document
# Check signature nodes
if sig_elements.nil? || sig_elements.size == 0
sig_elements = REXML::XPath.match(
doc,
"/p:Response/a:Assertion[@ID=$id]/ds:Signature",
{"p" => PROTOCOL, "a" => ASSERTION, "ds"=>DSIG},
{ 'id' => doc.signed_element_id }
)
end
if sig_elements.size != 1
if sig_elements.size == 0
append_error("Signed element id ##{doc.signed_element_id} is not found")
else
append_error("Signed element id ##{doc.signed_element_id} is found more than once")
end
return append_error(error_msg)
end
old_errors = @errors.clone
idp_certs = settings.get_idp_cert_multi
if idp_certs.nil? || idp_certs[:signing].empty?
opts = {}
opts[:fingerprint_alg] = settings.idp_cert_fingerprint_algorithm
idp_cert = settings.get_idp_cert
fingerprint = settings.get_fingerprint
opts[:cert] = idp_cert
if fingerprint && doc.validate_document(fingerprint, @soft, opts)
if settings.security[:check_idp_cert_expiration]
if OneLogin::RubySaml::Utils.is_cert_expired(idp_cert)
error_msg = "IdP x509 certificate expired"
return append_error(error_msg)
end
end
else
return append_error(error_msg)
end
else
valid = false
expired = false
idp_certs[:signing].each do |idp_cert|
valid = doc.validate_document_with_cert(idp_cert, true)
if valid
if settings.security[:check_idp_cert_expiration]
if OneLogin::RubySaml::Utils.is_cert_expired(idp_cert)
expired = true
end
end
# At least one certificate is valid, restore the old accumulated errors
@errors = old_errors
break
end
end
if expired
error_msg = "IdP x509 certificate expired"
return append_error(error_msg)
end
unless valid
# Remove duplicated errors
@errors = @errors.uniq
return append_error(error_msg)
end
end
true
end
def name_id_node
@name_id_node ||=
begin
encrypted_node = xpath_first_from_signed_assertion('/a:Subject/a:EncryptedID')
if encrypted_node
node = decrypt_nameid(encrypted_node)
else
node = xpath_first_from_signed_assertion('/a:Subject/a:NameID')
end
end
end
# Extracts the first appearance that matchs the subelt (pattern)
# Search on any Assertion that is signed, or has a Response parent signed
# @param subelt [String] The XPath pattern
# @return [REXML::Element | nil] If any matches, return the Element
#
def xpath_first_from_signed_assertion(subelt=nil)
doc = decrypted_document.nil? ? document : decrypted_document
node = REXML::XPath.first(
doc,
"/p:Response/a:Assertion[@ID=$id]#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
)
node ||= REXML::XPath.first(
doc,
"/p:Response[@ID=$id]/a:Assertion#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
)
node
end
# Extracts all the appearances that matchs the subelt (pattern)
# Search on any Assertion that is signed, or has a Response parent signed
# @param subelt [String] The XPath pattern
# @return [Array of REXML::Element] Return all matches
#
def xpath_from_signed_assertion(subelt=nil)
doc = decrypted_document.nil? ? document : decrypted_document
node = REXML::XPath.match(
doc,
"/p:Response/a:Assertion[@ID=$id]#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
)
node.concat( REXML::XPath.match(
doc,
"/p:Response[@ID=$id]/a:Assertion#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
))
end
# Generates the decrypted_document
# @return [XMLSecurity::SignedDocument] The SAML Response with the assertion decrypted
#
def generate_decrypted_document
if settings.nil? || !settings.get_sp_key
raise ValidationError.new('An EncryptedAssertion found and no SP private key found on the settings to decrypt it. Be sure you provided the :settings parameter at the initialize method')
end
# Marshal at Ruby 1.8.7 throw an Exception
if RUBY_VERSION < "1.9"
document_copy = XMLSecurity::SignedDocument.new(response, errors)
else
document_copy = Marshal.load(Marshal.dump(document))
end
decrypt_assertion_from_document(document_copy)
end
# Obtains a SAML Response with the EncryptedAssertion element decrypted
# @param document_copy [XMLSecurity::SignedDocument] A copy of the original SAML Response with the encrypted assertion
# @return [XMLSecurity::SignedDocument] The SAML Response with the assertion decrypted
#
def decrypt_assertion_from_document(document_copy)
response_node = REXML::XPath.first(
document_copy,
"/p:Response/",
{ "p" => PROTOCOL }
)
encrypted_assertion_node = REXML::XPath.first(
document_copy,
"(/p:Response/EncryptedAssertion/)|(/p:Response/a:EncryptedAssertion/)",
{ "p" => PROTOCOL, "a" => ASSERTION }
)
response_node.add(decrypt_assertion(encrypted_assertion_node))
encrypted_assertion_node.remove
XMLSecurity::SignedDocument.new(response_node.to_s)
end
# Decrypts an EncryptedAssertion element
# @param encrypted_assertion_node [REXML::Element] The EncryptedAssertion element
# @return [REXML::Document] The decrypted EncryptedAssertion element
#
def decrypt_assertion(encrypted_assertion_node)
decrypt_element(encrypted_assertion_node, /(.*<\/(\w+:)?Assertion>)/m)
end
# Decrypts an EncryptedID element
# @param encryptedid_node [REXML::Element] The EncryptedID element
# @return [REXML::Document] The decrypted EncrypedtID element
#
def decrypt_nameid(encryptedid_node)
decrypt_element(encryptedid_node, /(.*<\/(\w+:)?NameID>)/m)
end
# Decrypts an EncryptedID element
# @param encryptedid_node [REXML::Element] The EncryptedID element
# @return [REXML::Document] The decrypted EncrypedtID element
#
def decrypt_attribute(encryptedattribute_node)
decrypt_element(encryptedattribute_node, /(.*<\/(\w+:)?Attribute>)/m)
end
# Decrypt an element
# @param encryptedid_node [REXML::Element] The encrypted element
# @param rgrex string Regex
# @return [REXML::Document] The decrypted element
#
def decrypt_element(encrypt_node, rgrex)
if settings.nil? || !settings.get_sp_key
raise ValidationError.new('An ' + encrypt_node.name + ' found and no SP private key found on the settings to decrypt it')
end
if encrypt_node.name == 'EncryptedAttribute'
node_header = '<node xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">'
else
node_header = '<node xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion">'
end
elem_plaintext = OneLogin::RubySaml::Utils.decrypt_data(encrypt_node, settings.get_sp_key)
# If we get some problematic noise in the plaintext after decrypting.
# This quick regexp parse will grab only the Element and discard the noise.
elem_plaintext = elem_plaintext.match(rgrex)[0]
# To avoid namespace errors if saml namespace is not defined
# create a parent node first with the namespace defined
elem_plaintext = node_header + elem_plaintext + '</node>'
doc = REXML::Document.new(elem_plaintext)
doc.root[0]
end
# Parse the attribute of a given node in Time format
# @param node [REXML:Element] The node
# @param attribute [String] The attribute name
# @return [Time|nil] The parsed value
#
def parse_time(node, attribute)
if node && node.attributes[attribute]
Time.parse(node.attributes[attribute])
end
end
end
end
end
See #577. Fix XPath typo incompatible with Rexml 3.2.5
require "xml_security"
require "onelogin/ruby-saml/attributes"
require "time"
require "nokogiri"
# Only supports SAML 2.0
module OneLogin
module RubySaml
# SAML2 Authentication Response. SAML Response
#
class Response < SamlMessage
include ErrorHandling
ASSERTION = "urn:oasis:names:tc:SAML:2.0:assertion"
PROTOCOL = "urn:oasis:names:tc:SAML:2.0:protocol"
DSIG = "http://www.w3.org/2000/09/xmldsig#"
XENC = "http://www.w3.org/2001/04/xmlenc#"
# TODO: Settings should probably be initialized too... WDYT?
# OneLogin::RubySaml::Settings Toolkit settings
attr_accessor :settings
attr_reader :document
attr_reader :decrypted_document
attr_reader :response
attr_reader :options
attr_accessor :soft
# Response available options
# This is not a whitelist to allow people extending OneLogin::RubySaml:Response
# and pass custom options
AVAILABLE_OPTIONS = [
:allowed_clock_drift, :check_duplicated_attributes, :matches_request_id, :settings, :skip_audience, :skip_authnstatement, :skip_conditions,
:skip_destination, :skip_recipient_check, :skip_subject_confirmation
]
# TODO: Update the comment on initialize to describe every option
# Constructs the SAML Response. A Response Object that is an extension of the SamlMessage class.
# @param response [String] A UUEncoded SAML response from the IdP.
# @param options [Hash] :settings to provide the OneLogin::RubySaml::Settings object
# Or some options for the response validation process like skip the conditions validation
# with the :skip_conditions, or allow a clock_drift when checking dates with :allowed_clock_drift
# or :matches_request_id that will validate that the response matches the ID of the request,
# or skip the subject confirmation validation with the :skip_subject_confirmation option
# or skip the recipient validation of the subject confirmation element with :skip_recipient_check option
# or skip the audience validation with :skip_audience option
#
def initialize(response, options = {})
raise ArgumentError.new("Response cannot be nil") if response.nil?
@errors = []
@options = options
@soft = true
unless options[:settings].nil?
@settings = options[:settings]
unless @settings.soft.nil?
@soft = @settings.soft
end
end
@response = decode_raw_saml(response)
@document = XMLSecurity::SignedDocument.new(@response, @errors)
if assertion_encrypted?
@decrypted_document = generate_decrypted_document
end
end
# Validates the SAML Response with the default values (soft = true)
# @param collect_errors [Boolean] Stop validation when first error appears or keep validating. (if soft=true)
# @return [Boolean] TRUE if the SAML Response is valid
#
def is_valid?(collect_errors = false)
validate(collect_errors)
end
# @return [String] the NameID provided by the SAML response from the IdP.
#
def name_id
@name_id ||= Utils.element_text(name_id_node)
end
alias_method :nameid, :name_id
# @return [String] the NameID Format provided by the SAML response from the IdP.
#
def name_id_format
@name_id_format ||=
if name_id_node && name_id_node.attribute("Format")
name_id_node.attribute("Format").value
end
end
alias_method :nameid_format, :name_id_format
# @return [String] the NameID SPNameQualifier provided by the SAML response from the IdP.
#
def name_id_spnamequalifier
@name_id_spnamequalifier ||=
if name_id_node && name_id_node.attribute("SPNameQualifier")
name_id_node.attribute("SPNameQualifier").value
end
end
# @return [String] the NameID NameQualifier provided by the SAML response from the IdP.
#
def name_id_namequalifier
@name_id_namequalifier ||=
if name_id_node && name_id_node.attribute("NameQualifier")
name_id_node.attribute("NameQualifier").value
end
end
# Gets the SessionIndex from the AuthnStatement.
# Could be used to be stored in the local session in order
# to be used in a future Logout Request that the SP could
# send to the IdP, to set what specific session must be deleted
# @return [String] SessionIndex Value
#
def sessionindex
@sessionindex ||= begin
node = xpath_first_from_signed_assertion('/a:AuthnStatement')
node.nil? ? nil : node.attributes['SessionIndex']
end
end
# Gets the Attributes from the AttributeStatement element.
#
# All attributes can be iterated over +attributes.each+ or returned as array by +attributes.all+
# For backwards compatibility ruby-saml returns by default only the first value for a given attribute with
# attributes['name']
# To get all of the attributes, use:
# attributes.multi('name')
# Or turn off the compatibility:
# OneLogin::RubySaml::Attributes.single_value_compatibility = false
# Now this will return an array:
# attributes['name']
#
# @return [Attributes] OneLogin::RubySaml::Attributes enumerable collection.
# @raise [ValidationError] if there are 2+ Attribute with the same Name
#
def attributes
@attr_statements ||= begin
attributes = Attributes.new
stmt_elements = xpath_from_signed_assertion('/a:AttributeStatement')
stmt_elements.each do |stmt_element|
stmt_element.elements.each do |attr_element|
if attr_element.name == "EncryptedAttribute"
node = decrypt_attribute(attr_element.dup)
else
node = attr_element
end
name = node.attributes["Name"]
if options[:check_duplicated_attributes] && attributes.include?(name)
raise ValidationError.new("Found an Attribute element with duplicated Name")
end
values = node.elements.collect{|e|
if (e.elements.nil? || e.elements.size == 0)
# SAMLCore requires that nil AttributeValues MUST contain xsi:nil XML attribute set to "true" or "1"
# otherwise the value is to be regarded as empty.
["true", "1"].include?(e.attributes['xsi:nil']) ? nil : Utils.element_text(e)
# explicitly support saml2:NameID with saml2:NameQualifier if supplied in attributes
# this is useful for allowing eduPersonTargetedId to be passed as an opaque identifier to use to
# identify the subject in an SP rather than email or other less opaque attributes
# NameQualifier, if present is prefixed with a "/" to the value
else
REXML::XPath.match(e,'a:NameID', { "a" => ASSERTION }).collect do |n|
base_path = n.attributes['NameQualifier'] ? "#{n.attributes['NameQualifier']}/" : ''
"#{base_path}#{Utils.element_text(n)}"
end
end
}
attributes.add(name, values.flatten)
end
end
attributes
end
end
# Gets the SessionNotOnOrAfter from the AuthnStatement.
# Could be used to set the local session expiration (expire at latest)
# @return [String] The SessionNotOnOrAfter value
#
def session_expires_at
@expires_at ||= begin
node = xpath_first_from_signed_assertion('/a:AuthnStatement')
node.nil? ? nil : parse_time(node, "SessionNotOnOrAfter")
end
end
# Checks if the Status has the "Success" code
# @return [Boolean] True if the StatusCode is Sucess
#
def success?
status_code == "urn:oasis:names:tc:SAML:2.0:status:Success"
end
# @return [String] StatusCode value from a SAML Response.
#
def status_code
@status_code ||= begin
nodes = REXML::XPath.match(
document,
"/p:Response/p:Status/p:StatusCode",
{ "p" => PROTOCOL }
)
if nodes.size == 1
node = nodes[0]
code = node.attributes["Value"] if node && node.attributes
unless code == "urn:oasis:names:tc:SAML:2.0:status:Success"
nodes = REXML::XPath.match(
document,
"/p:Response/p:Status/p:StatusCode/p:StatusCode",
{ "p" => PROTOCOL }
)
statuses = nodes.collect do |inner_node|
inner_node.attributes["Value"]
end
extra_code = statuses.join(" | ")
if extra_code
code = "#{code} | #{extra_code}"
end
end
code
end
end
end
# @return [String] the StatusMessage value from a SAML Response.
#
def status_message
@status_message ||= begin
nodes = REXML::XPath.match(
document,
"/p:Response/p:Status/p:StatusMessage",
{ "p" => PROTOCOL }
)
if nodes.size == 1
Utils.element_text(nodes.first)
end
end
end
# Gets the Condition Element of the SAML Response if exists.
# (returns the first node that matches the supplied xpath)
# @return [REXML::Element] Conditions Element if exists
#
def conditions
@conditions ||= xpath_first_from_signed_assertion('/a:Conditions')
end
# Gets the NotBefore Condition Element value.
# @return [Time] The NotBefore value in Time format
#
def not_before
@not_before ||= parse_time(conditions, "NotBefore")
end
# Gets the NotOnOrAfter Condition Element value.
# @return [Time] The NotOnOrAfter value in Time format
#
def not_on_or_after
@not_on_or_after ||= parse_time(conditions, "NotOnOrAfter")
end
# Gets the Issuers (from Response and Assertion).
# (returns the first node that matches the supplied xpath from the Response and from the Assertion)
# @return [Array] Array with the Issuers (REXML::Element)
#
def issuers
@issuers ||= begin
issuer_response_nodes = REXML::XPath.match(
document,
"/p:Response/a:Issuer",
{ "p" => PROTOCOL, "a" => ASSERTION }
)
unless issuer_response_nodes.size == 1
error_msg = "Issuer of the Response not found or multiple."
raise ValidationError.new(error_msg)
end
issuer_assertion_nodes = xpath_from_signed_assertion("/a:Issuer")
unless issuer_assertion_nodes.size == 1
error_msg = "Issuer of the Assertion not found or multiple."
raise ValidationError.new(error_msg)
end
nodes = issuer_response_nodes + issuer_assertion_nodes
nodes.map { |node| Utils.element_text(node) }.compact.uniq
end
end
# @return [String|nil] The InResponseTo attribute from the SAML Response.
#
def in_response_to
@in_response_to ||= begin
node = REXML::XPath.first(
document,
"/p:Response",
{ "p" => PROTOCOL }
)
node.nil? ? nil : node.attributes['InResponseTo']
end
end
# @return [String|nil] Destination attribute from the SAML Response.
#
def destination
@destination ||= begin
node = REXML::XPath.first(
document,
"/p:Response",
{ "p" => PROTOCOL }
)
node.nil? ? nil : node.attributes['Destination']
end
end
# @return [Array] The Audience elements from the Contitions of the SAML Response.
#
def audiences
@audiences ||= begin
nodes = xpath_from_signed_assertion('/a:Conditions/a:AudienceRestriction/a:Audience')
nodes.map { |node| Utils.element_text(node) }.reject(&:empty?)
end
end
# returns the allowed clock drift on timing validation
# @return [Integer]
def allowed_clock_drift
return options[:allowed_clock_drift].to_f
end
# Checks if the SAML Response contains or not an EncryptedAssertion element
# @return [Boolean] True if the SAML Response contains an EncryptedAssertion element
#
def assertion_encrypted?
! REXML::XPath.first(
document,
"(/p:Response/EncryptedAssertion/)|(/p:Response/a:EncryptedAssertion/)",
{ "p" => PROTOCOL, "a" => ASSERTION }
).nil?
end
def response_id
id(document)
end
def assertion_id
@assertion_id ||= begin
node = xpath_first_from_signed_assertion("")
node.nil? ? nil : node.attributes['ID']
end
end
private
# Validates the SAML Response (calls several validation methods)
# @param collect_errors [Boolean] Stop validation when first error appears or keep validating. (if soft=true)
# @return [Boolean] True if the SAML Response is valid, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate(collect_errors = false)
reset_errors!
return false unless validate_response_state
validations = [
:validate_response_state,
:validate_version,
:validate_id,
:validate_success_status,
:validate_num_assertion,
:validate_no_duplicated_attributes,
:validate_signed_elements,
:validate_structure,
:validate_in_response_to,
:validate_one_conditions,
:validate_conditions,
:validate_one_authnstatement,
:validate_audience,
:validate_destination,
:validate_issuer,
:validate_session_expiration,
:validate_subject_confirmation,
:validate_name_id,
:validate_signature
]
if collect_errors
validations.each { |validation| send(validation) }
@errors.empty?
else
validations.all? { |validation| send(validation) }
end
end
# Validates the Status of the SAML Response
# @return [Boolean] True if the SAML Response contains a Success code, otherwise False if soft == false
# @raise [ValidationError] if soft == false and validation fails
#
def validate_success_status
return true if success?
error_msg = 'The status code of the Response was not Success'
status_error_msg = OneLogin::RubySaml::Utils.status_error_msg(error_msg, status_code, status_message)
append_error(status_error_msg)
end
# Validates the SAML Response against the specified schema.
# @return [Boolean] True if the XML is valid, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_structure
structure_error_msg = "Invalid SAML Response. Not match the saml-schema-protocol-2.0.xsd"
unless valid_saml?(document, soft)
return append_error(structure_error_msg)
end
unless decrypted_document.nil?
unless valid_saml?(decrypted_document, soft)
return append_error(structure_error_msg)
end
end
true
end
# Validates that the SAML Response provided in the initialization is not empty,
# also check that the setting and the IdP cert were also provided
# @return [Boolean] True if the required info is found, false otherwise
#
def validate_response_state
return append_error("Blank response") if response.nil? || response.empty?
return append_error("No settings on response") if settings.nil?
if settings.idp_cert_fingerprint.nil? && settings.idp_cert.nil? && settings.idp_cert_multi.nil?
return append_error("No fingerprint or certificate on settings")
end
true
end
# Validates that the SAML Response contains an ID
# If fails, the error is added to the errors array.
# @return [Boolean] True if the SAML Response contains an ID, otherwise returns False
#
def validate_id
unless response_id
return append_error("Missing ID attribute on SAML Response")
end
true
end
# Validates the SAML version (2.0)
# If fails, the error is added to the errors array.
# @return [Boolean] True if the SAML Response is 2.0, otherwise returns False
#
def validate_version
unless version(document) == "2.0"
return append_error("Unsupported SAML version")
end
true
end
# Validates that the SAML Response only contains a single Assertion (encrypted or not).
# If fails, the error is added to the errors array.
# @return [Boolean] True if the SAML Response contains one unique Assertion, otherwise False
#
def validate_num_assertion
error_msg = "SAML Response must contain 1 assertion"
assertions = REXML::XPath.match(
document,
"//a:Assertion",
{ "a" => ASSERTION }
)
encrypted_assertions = REXML::XPath.match(
document,
"//a:EncryptedAssertion",
{ "a" => ASSERTION }
)
unless assertions.size + encrypted_assertions.size == 1
return append_error(error_msg)
end
unless decrypted_document.nil?
assertions = REXML::XPath.match(
decrypted_document,
"//a:Assertion",
{ "a" => ASSERTION }
)
unless assertions.size == 1
return append_error(error_msg)
end
end
true
end
# Validates that there are not duplicated attributes
# If fails, the error is added to the errors array
# @return [Boolean] True if there are no duplicated attribute elements, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_no_duplicated_attributes
if options[:check_duplicated_attributes]
begin
attributes
rescue ValidationError => e
return append_error(e.message)
end
end
true
end
# Validates the Signed elements
# If fails, the error is added to the errors array
# @return [Boolean] True if there is 1 or 2 Elements signed in the SAML Response
# an are a Response or an Assertion Element, otherwise False if soft=True
#
def validate_signed_elements
signature_nodes = REXML::XPath.match(
decrypted_document.nil? ? document : decrypted_document,
"//ds:Signature",
{"ds"=>DSIG}
)
signed_elements = []
verified_seis = []
verified_ids = []
signature_nodes.each do |signature_node|
signed_element = signature_node.parent.name
if signed_element != 'Response' && signed_element != 'Assertion'
return append_error("Invalid Signature Element '#{signed_element}'. SAML Response rejected")
end
if signature_node.parent.attributes['ID'].nil?
return append_error("Signed Element must contain an ID. SAML Response rejected")
end
id = signature_node.parent.attributes.get_attribute("ID").value
if verified_ids.include?(id)
return append_error("Duplicated ID. SAML Response rejected")
end
verified_ids.push(id)
# Check that reference URI matches the parent ID and no duplicate References or IDs
ref = REXML::XPath.first(signature_node, ".//ds:Reference", {"ds"=>DSIG})
if ref
uri = ref.attributes.get_attribute("URI")
if uri && !uri.value.empty?
sei = uri.value[1..-1]
unless sei == id
return append_error("Found an invalid Signed Element. SAML Response rejected")
end
if verified_seis.include?(sei)
return append_error("Duplicated Reference URI. SAML Response rejected")
end
verified_seis.push(sei)
end
end
signed_elements << signed_element
end
unless signature_nodes.length < 3 && !signed_elements.empty?
return append_error("Found an unexpected number of Signature Element. SAML Response rejected")
end
if settings.security[:want_assertions_signed] && !(signed_elements.include? "Assertion")
return append_error("The Assertion of the Response is not signed and the SP requires it")
end
true
end
# Validates if the provided request_id match the inResponseTo value.
# If fails, the error is added to the errors array
# @return [Boolean] True if there is no request_id or it match, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_in_response_to
return true unless options.has_key? :matches_request_id
return true if options[:matches_request_id].nil?
return true unless options[:matches_request_id] != in_response_to
error_msg = "The InResponseTo of the Response: #{in_response_to}, does not match the ID of the AuthNRequest sent by the SP: #{options[:matches_request_id]}"
append_error(error_msg)
end
# Validates the Audience, (If the Audience match the Service Provider EntityID)
# If the response was initialized with the :skip_audience option, this validation is skipped,
# If fails, the error is added to the errors array
# @return [Boolean] True if there is an Audience Element that match the Service Provider EntityID, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_audience
return true if options[:skip_audience]
return true if audiences.empty? || settings.sp_entity_id.nil? || settings.sp_entity_id.empty?
unless audiences.include? settings.sp_entity_id
s = audiences.count > 1 ? 's' : '';
error_msg = "Invalid Audience#{s}. The audience#{s} #{audiences.join(',')}, did not match the expected audience #{settings.sp_entity_id}"
return append_error(error_msg)
end
true
end
# Validates the Destination, (If the SAML Response is received where expected).
# If the response was initialized with the :skip_destination option, this validation is skipped,
# If fails, the error is added to the errors array
# @return [Boolean] True if there is a Destination element that matches the Consumer Service URL, otherwise False
#
def validate_destination
return true if destination.nil?
return true if options[:skip_destination]
if destination.empty?
error_msg = "The response has an empty Destination value"
return append_error(error_msg)
end
return true if settings.assertion_consumer_service_url.nil? || settings.assertion_consumer_service_url.empty?
unless OneLogin::RubySaml::Utils.uri_match?(destination, settings.assertion_consumer_service_url)
error_msg = "The response was received at #{destination} instead of #{settings.assertion_consumer_service_url}"
return append_error(error_msg)
end
true
end
# Checks that the samlp:Response/saml:Assertion/saml:Conditions element exists and is unique.
# (If the response was initialized with the :skip_conditions option, this validation is skipped)
# If fails, the error is added to the errors array
# @return [Boolean] True if there is a conditions element and is unique
#
def validate_one_conditions
return true if options[:skip_conditions]
conditions_nodes = xpath_from_signed_assertion('/a:Conditions')
unless conditions_nodes.size == 1
error_msg = "The Assertion must include one Conditions element"
return append_error(error_msg)
end
true
end
# Checks that the samlp:Response/saml:Assertion/saml:AuthnStatement element exists and is unique.
# If fails, the error is added to the errors array
# @return [Boolean] True if there is a authnstatement element and is unique
#
def validate_one_authnstatement
return true if options[:skip_authnstatement]
authnstatement_nodes = xpath_from_signed_assertion('/a:AuthnStatement')
unless authnstatement_nodes.size == 1
error_msg = "The Assertion must include one AuthnStatement element"
return append_error(error_msg)
end
true
end
# Validates the Conditions. (If the response was initialized with the :skip_conditions option, this validation is skipped,
# If the response was initialized with the :allowed_clock_drift option, the timing validations are relaxed by the allowed_clock_drift value)
# @return [Boolean] True if satisfies the conditions, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_conditions
return true if conditions.nil?
return true if options[:skip_conditions]
now = Time.now.utc
if not_before && (now_with_drift = now + allowed_clock_drift) < not_before
error_msg = "Current time is earlier than NotBefore condition (#{now_with_drift} < #{not_before})"
return append_error(error_msg)
end
if not_on_or_after && now >= (not_on_or_after_with_drift = not_on_or_after + allowed_clock_drift)
error_msg = "Current time is on or after NotOnOrAfter condition (#{now} >= #{not_on_or_after_with_drift})"
return append_error(error_msg)
end
true
end
# Validates the Issuer (Of the SAML Response and the SAML Assertion)
# @param soft [Boolean] soft Enable or Disable the soft mode (In order to raise exceptions when the response is invalid or not)
# @return [Boolean] True if the Issuer matchs the IdP entityId, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_issuer
return true if settings.idp_entity_id.nil?
begin
obtained_issuers = issuers
rescue ValidationError => e
return append_error(e.message)
end
obtained_issuers.each do |issuer|
unless OneLogin::RubySaml::Utils.uri_match?(issuer, settings.idp_entity_id)
error_msg = "Doesn't match the issuer, expected: <#{settings.idp_entity_id}>, but was: <#{issuer}>"
return append_error(error_msg)
end
end
true
end
# Validates that the Session haven't expired (If the response was initialized with the :allowed_clock_drift option,
# this time validation is relaxed by the allowed_clock_drift value)
# If fails, the error is added to the errors array
# @param soft [Boolean] soft Enable or Disable the soft mode (In order to raise exceptions when the response is invalid or not)
# @return [Boolean] True if the SessionNotOnOrAfter of the AuthnStatement is valid, otherwise (when expired) False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_session_expiration(soft = true)
return true if session_expires_at.nil?
now = Time.now.utc
unless (session_expires_at + allowed_clock_drift) > now
error_msg = "The attributes have expired, based on the SessionNotOnOrAfter of the AuthnStatement of this Response"
return append_error(error_msg)
end
true
end
# Validates if exists valid SubjectConfirmation (If the response was initialized with the :allowed_clock_drift option,
# timimg validation are relaxed by the allowed_clock_drift value. If the response was initialized with the
# :skip_subject_confirmation option, this validation is skipped)
# There is also an optional Recipient check
# If fails, the error is added to the errors array
# @return [Boolean] True if exists a valid SubjectConfirmation, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_subject_confirmation
return true if options[:skip_subject_confirmation]
valid_subject_confirmation = false
subject_confirmation_nodes = xpath_from_signed_assertion('/a:Subject/a:SubjectConfirmation')
now = Time.now.utc
subject_confirmation_nodes.each do |subject_confirmation|
if subject_confirmation.attributes.include? "Method" and subject_confirmation.attributes['Method'] != 'urn:oasis:names:tc:SAML:2.0:cm:bearer'
next
end
confirmation_data_node = REXML::XPath.first(
subject_confirmation,
'a:SubjectConfirmationData',
{ "a" => ASSERTION }
)
next unless confirmation_data_node
attrs = confirmation_data_node.attributes
next if (attrs.include? "InResponseTo" and attrs['InResponseTo'] != in_response_to) ||
(attrs.include? "NotOnOrAfter" and (parse_time(confirmation_data_node, "NotOnOrAfter") + allowed_clock_drift) <= now) ||
(attrs.include? "NotBefore" and parse_time(confirmation_data_node, "NotBefore") > (now + allowed_clock_drift)) ||
(attrs.include? "Recipient" and !options[:skip_recipient_check] and settings and attrs['Recipient'] != settings.assertion_consumer_service_url)
valid_subject_confirmation = true
break
end
if !valid_subject_confirmation
error_msg = "A valid SubjectConfirmation was not found on this Response"
return append_error(error_msg)
end
true
end
# Validates the NameID element
def validate_name_id
if name_id_node.nil?
if settings.security[:want_name_id]
return append_error("No NameID element found in the assertion of the Response")
end
else
if name_id.nil? || name_id.empty?
return append_error("An empty NameID value found")
end
unless settings.sp_entity_id.nil? || settings.sp_entity_id.empty? || name_id_spnamequalifier.nil? || name_id_spnamequalifier.empty?
if name_id_spnamequalifier != settings.sp_entity_id
return append_error("The SPNameQualifier value mistmatch the SP entityID value.")
end
end
end
true
end
# Validates the Signature
# @return [Boolean] True if not contains a Signature or if the Signature is valid, otherwise False if soft=True
# @raise [ValidationError] if soft == false and validation fails
#
def validate_signature
error_msg = "Invalid Signature on SAML Response"
# If the response contains the signature, and the assertion was encrypted, validate the original SAML Response
# otherwise, review if the decrypted assertion contains a signature
sig_elements = REXML::XPath.match(
document,
"/p:Response[@ID=$id]/ds:Signature",
{ "p" => PROTOCOL, "ds" => DSIG },
{ 'id' => document.signed_element_id }
)
use_original = sig_elements.size == 1 || decrypted_document.nil?
doc = use_original ? document : decrypted_document
# Check signature nodes
if sig_elements.nil? || sig_elements.size == 0
sig_elements = REXML::XPath.match(
doc,
"/p:Response/a:Assertion[@ID=$id]/ds:Signature",
{"p" => PROTOCOL, "a" => ASSERTION, "ds"=>DSIG},
{ 'id' => doc.signed_element_id }
)
end
if sig_elements.size != 1
if sig_elements.size == 0
append_error("Signed element id ##{doc.signed_element_id} is not found")
else
append_error("Signed element id ##{doc.signed_element_id} is found more than once")
end
return append_error(error_msg)
end
old_errors = @errors.clone
idp_certs = settings.get_idp_cert_multi
if idp_certs.nil? || idp_certs[:signing].empty?
opts = {}
opts[:fingerprint_alg] = settings.idp_cert_fingerprint_algorithm
idp_cert = settings.get_idp_cert
fingerprint = settings.get_fingerprint
opts[:cert] = idp_cert
if fingerprint && doc.validate_document(fingerprint, @soft, opts)
if settings.security[:check_idp_cert_expiration]
if OneLogin::RubySaml::Utils.is_cert_expired(idp_cert)
error_msg = "IdP x509 certificate expired"
return append_error(error_msg)
end
end
else
return append_error(error_msg)
end
else
valid = false
expired = false
idp_certs[:signing].each do |idp_cert|
valid = doc.validate_document_with_cert(idp_cert, true)
if valid
if settings.security[:check_idp_cert_expiration]
if OneLogin::RubySaml::Utils.is_cert_expired(idp_cert)
expired = true
end
end
# At least one certificate is valid, restore the old accumulated errors
@errors = old_errors
break
end
end
if expired
error_msg = "IdP x509 certificate expired"
return append_error(error_msg)
end
unless valid
# Remove duplicated errors
@errors = @errors.uniq
return append_error(error_msg)
end
end
true
end
def name_id_node
@name_id_node ||=
begin
encrypted_node = xpath_first_from_signed_assertion('/a:Subject/a:EncryptedID')
if encrypted_node
node = decrypt_nameid(encrypted_node)
else
node = xpath_first_from_signed_assertion('/a:Subject/a:NameID')
end
end
end
# Extracts the first appearance that matchs the subelt (pattern)
# Search on any Assertion that is signed, or has a Response parent signed
# @param subelt [String] The XPath pattern
# @return [REXML::Element | nil] If any matches, return the Element
#
def xpath_first_from_signed_assertion(subelt=nil)
doc = decrypted_document.nil? ? document : decrypted_document
node = REXML::XPath.first(
doc,
"/p:Response/a:Assertion[@ID=$id]#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
)
node ||= REXML::XPath.first(
doc,
"/p:Response[@ID=$id]/a:Assertion#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
)
node
end
# Extracts all the appearances that matchs the subelt (pattern)
# Search on any Assertion that is signed, or has a Response parent signed
# @param subelt [String] The XPath pattern
# @return [Array of REXML::Element] Return all matches
#
def xpath_from_signed_assertion(subelt=nil)
doc = decrypted_document.nil? ? document : decrypted_document
node = REXML::XPath.match(
doc,
"/p:Response/a:Assertion[@ID=$id]#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
)
node.concat( REXML::XPath.match(
doc,
"/p:Response[@ID=$id]/a:Assertion#{subelt}",
{ "p" => PROTOCOL, "a" => ASSERTION },
{ 'id' => doc.signed_element_id }
))
end
# Generates the decrypted_document
# @return [XMLSecurity::SignedDocument] The SAML Response with the assertion decrypted
#
def generate_decrypted_document
if settings.nil? || !settings.get_sp_key
raise ValidationError.new('An EncryptedAssertion found and no SP private key found on the settings to decrypt it. Be sure you provided the :settings parameter at the initialize method')
end
# Marshal at Ruby 1.8.7 throw an Exception
if RUBY_VERSION < "1.9"
document_copy = XMLSecurity::SignedDocument.new(response, errors)
else
document_copy = Marshal.load(Marshal.dump(document))
end
decrypt_assertion_from_document(document_copy)
end
# Obtains a SAML Response with the EncryptedAssertion element decrypted
# @param document_copy [XMLSecurity::SignedDocument] A copy of the original SAML Response with the encrypted assertion
# @return [XMLSecurity::SignedDocument] The SAML Response with the assertion decrypted
#
def decrypt_assertion_from_document(document_copy)
response_node = REXML::XPath.first(
document_copy,
"/p:Response/",
{ "p" => PROTOCOL }
)
encrypted_assertion_node = REXML::XPath.first(
document_copy,
"(/p:Response/EncryptedAssertion/)|(/p:Response/a:EncryptedAssertion/)",
{ "p" => PROTOCOL, "a" => ASSERTION }
)
response_node.add(decrypt_assertion(encrypted_assertion_node))
encrypted_assertion_node.remove
XMLSecurity::SignedDocument.new(response_node.to_s)
end
# Decrypts an EncryptedAssertion element
# @param encrypted_assertion_node [REXML::Element] The EncryptedAssertion element
# @return [REXML::Document] The decrypted EncryptedAssertion element
#
def decrypt_assertion(encrypted_assertion_node)
decrypt_element(encrypted_assertion_node, /(.*<\/(\w+:)?Assertion>)/m)
end
# Decrypts an EncryptedID element
# @param encryptedid_node [REXML::Element] The EncryptedID element
# @return [REXML::Document] The decrypted EncrypedtID element
#
def decrypt_nameid(encryptedid_node)
decrypt_element(encryptedid_node, /(.*<\/(\w+:)?NameID>)/m)
end
# Decrypts an EncryptedID element
# @param encryptedid_node [REXML::Element] The EncryptedID element
# @return [REXML::Document] The decrypted EncrypedtID element
#
def decrypt_attribute(encryptedattribute_node)
decrypt_element(encryptedattribute_node, /(.*<\/(\w+:)?Attribute>)/m)
end
# Decrypt an element
# @param encryptedid_node [REXML::Element] The encrypted element
# @param rgrex string Regex
# @return [REXML::Document] The decrypted element
#
def decrypt_element(encrypt_node, rgrex)
if settings.nil? || !settings.get_sp_key
raise ValidationError.new('An ' + encrypt_node.name + ' found and no SP private key found on the settings to decrypt it')
end
if encrypt_node.name == 'EncryptedAttribute'
node_header = '<node xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">'
else
node_header = '<node xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion">'
end
elem_plaintext = OneLogin::RubySaml::Utils.decrypt_data(encrypt_node, settings.get_sp_key)
# If we get some problematic noise in the plaintext after decrypting.
# This quick regexp parse will grab only the Element and discard the noise.
elem_plaintext = elem_plaintext.match(rgrex)[0]
# To avoid namespace errors if saml namespace is not defined
# create a parent node first with the namespace defined
elem_plaintext = node_header + elem_plaintext + '</node>'
doc = REXML::Document.new(elem_plaintext)
doc.root[0]
end
# Parse the attribute of a given node in Time format
# @param node [REXML:Element] The node
# @param attribute [String] The attribute name
# @return [Time|nil] The parsed value
#
def parse_time(node, attribute)
if node && node.attributes[attribute]
Time.parse(node.attributes[attribute])
end
end
end
end
end
|
Chef::Log.info("Updating pip to the latest version")
execute "/usr/bin/pip install -U pip" do
user "root"
action :run
end
Get the appropriate pip version.
Chef::Log.info("Updating pip to the latest version")
ruby_block "Get Pip executable" do
block do
pip_executable = `which pip`
end
action :run
end
execute "#{pip_executable} install -U pip" do
user "root"
action :run
end |
module OptimusPrime
module Sources
class Rdbms < Source
def initialize(dsn:, query:, **options)
@db = Sequel.connect(dsn, **options)
@result = @db[query]
end
def each
@result.each do |row|
yield row
end
end
end
end
end
change rdbms source
module OptimusPrime
module Sources
class Rdbms < Source
def initialize(dsn:, query:, **options)
@db ||= Sequel.connect(dsn, **options)
@query = query
end
def each
query.each do |row|
yield row
end
end
private
def query
@db[@query]
end
end
end
end
|
module PaperTrail
module Model
def self.included(base)
base.send :extend, ClassMethods
end
module ClassMethods
# Declare this in your model to track every create, update, and destroy. Each version of
# the model is available in the `versions` association.
#
# Options:
# :ignore an array of attributes for which a new `Version` will not be created if only they change.
# :only inverse of `ignore` - a new `Version` will be created only for these attributes if supplied
# :meta a hash of extra data to store. You must add a column to the `versions` table for each key.
# Values are objects or procs (which are called with `self`, i.e. the model with the paper
# trail). See `PaperTrail::Controller.info_for_paper_trail` for how to store data from
# the controller.
def has_paper_trail(options = {})
# Lazily include the instance methods so we don't clutter up
# any more ActiveRecord models than we have to.
send :include, InstanceMethods
# The version this instance was reified from.
attr_accessor :version
cattr_accessor :ignore
self.ignore = ([options[:ignore]].flatten.compact || []).map &:to_s
cattr_accessor :only
self.only = ([options[:only]].flatten.compact || []).map &:to_s
cattr_accessor :meta
self.meta = options[:meta] || {}
# Indicates whether or not PaperTrail is active for this class.
# This is independent of whether PaperTrail is globally enabled or disabled.
cattr_accessor :paper_trail_active
self.paper_trail_active = true
has_many :versions, :as => :item, :order => 'created_at ASC, id ASC'
after_create :record_create
before_update :record_update
after_destroy :record_destroy
end
# Switches PaperTrail off for this class.
def paper_trail_off
self.paper_trail_active = false
end
# Switches PaperTrail on for this class.
def paper_trail_on
self.paper_trail_active = true
end
end
# Wrap the following methods in a module so we can include them only in the
# ActiveRecord models that declare `has_paper_trail`.
module InstanceMethods
# Returns true if this instance is the current, live one;
# returns false if this instance came from a previous version.
def live?
version.nil?
end
# Returns who put the object into its current state.
def originator
Version.with_item_keys(self.class.name, id).last.try :whodunnit
end
# Returns the object (not a Version) as it was at the given timestamp.
def version_at(timestamp, reify_options={})
# Because a version stores how its object looked *before* the change,
# we need to look for the first version created *after* the timestamp.
version = versions.after(timestamp).first
version ? version.reify(reify_options) : self
end
# Returns the object (not a Version) as it was most recently.
def previous_version
preceding_version = version ? version.previous : versions.last
preceding_version.try :reify
end
# Returns the object (not a Version) as it became next.
def next_version
# NOTE: if self (the item) was not reified from a version, i.e. it is the
# "live" item, we return nil. Perhaps we should return self instead?
subsequent_version = version ? version.next : nil
subsequent_version.reify if subsequent_version
end
private
def record_create
if switched_on?
versions.create merge_metadata(:event => 'create', :whodunnit => PaperTrail.whodunnit)
end
end
def record_update
if switched_on? && changed_notably?
versions.build merge_metadata(:event => 'update',
:object => object_to_string(item_before_change),
:whodunnit => PaperTrail.whodunnit)
end
end
def record_destroy
if switched_on? and not new_record?
Version.create merge_metadata(:item => self,
:event => 'destroy',
:object => object_to_string(item_before_change),
:whodunnit => PaperTrail.whodunnit)
end
versions.send :load_target
end
def merge_metadata(data)
# First we merge the model-level metadata in `meta`.
meta.each do |k,v|
data[k] =
if v.respond_to?(:call)
v.call(self)
elsif v.is_a?(Symbol) && respond_to?(v)
send(v)
else
v
end
end
# Second we merge any extra data from the controller (if available).
data.merge(PaperTrail.controller_info || {})
end
def item_before_change
self.clone.tap do |previous|
previous.id = id
changed_attributes.each { |attr, before| previous[attr] = before }
end
end
def object_to_string(object)
object.attributes.to_yaml
end
def changed_notably?
notably_changed.any?
end
def notably_changed
self.class.only.empty? ? changed_and_not_ignored : (changed_and_not_ignored & self.class.only)
end
def changed_and_not_ignored
changed - self.class.ignore
end
# Returns `true` if PaperTrail is globally enabled and active for this class,
# `false` otherwise.
def switched_on?
PaperTrail.enabled? && self.class.paper_trail_active
end
end
end
end
Whitespace.
module PaperTrail
module Model
def self.included(base)
base.send :extend, ClassMethods
end
module ClassMethods
# Declare this in your model to track every create, update, and destroy. Each version of
# the model is available in the `versions` association.
#
# Options:
# :ignore an array of attributes for which a new `Version` will not be created if only they change.
# :only inverse of `ignore` - a new `Version` will be created only for these attributes if supplied
# :meta a hash of extra data to store. You must add a column to the `versions` table for each key.
# Values are objects or procs (which are called with `self`, i.e. the model with the paper
# trail). See `PaperTrail::Controller.info_for_paper_trail` for how to store data from
# the controller.
def has_paper_trail(options = {})
# Lazily include the instance methods so we don't clutter up
# any more ActiveRecord models than we have to.
send :include, InstanceMethods
# The version this instance was reified from.
attr_accessor :version
cattr_accessor :ignore
self.ignore = ([options[:ignore]].flatten.compact || []).map &:to_s
cattr_accessor :only
self.only = ([options[:only]].flatten.compact || []).map &:to_s
cattr_accessor :meta
self.meta = options[:meta] || {}
# Indicates whether or not PaperTrail is active for this class.
# This is independent of whether PaperTrail is globally enabled or disabled.
cattr_accessor :paper_trail_active
self.paper_trail_active = true
has_many :versions, :as => :item, :order => 'created_at ASC, id ASC'
after_create :record_create
before_update :record_update
after_destroy :record_destroy
end
# Switches PaperTrail off for this class.
def paper_trail_off
self.paper_trail_active = false
end
# Switches PaperTrail on for this class.
def paper_trail_on
self.paper_trail_active = true
end
end
# Wrap the following methods in a module so we can include them only in the
# ActiveRecord models that declare `has_paper_trail`.
module InstanceMethods
# Returns true if this instance is the current, live one;
# returns false if this instance came from a previous version.
def live?
version.nil?
end
# Returns who put the object into its current state.
def originator
Version.with_item_keys(self.class.name, id).last.try :whodunnit
end
# Returns the object (not a Version) as it was at the given timestamp.
def version_at(timestamp, reify_options={})
# Because a version stores how its object looked *before* the change,
# we need to look for the first version created *after* the timestamp.
version = versions.after(timestamp).first
version ? version.reify(reify_options) : self
end
# Returns the object (not a Version) as it was most recently.
def previous_version
preceding_version = version ? version.previous : versions.last
preceding_version.try :reify
end
# Returns the object (not a Version) as it became next.
def next_version
# NOTE: if self (the item) was not reified from a version, i.e. it is the
# "live" item, we return nil. Perhaps we should return self instead?
subsequent_version = version ? version.next : nil
subsequent_version.reify if subsequent_version
end
private
def record_create
if switched_on?
versions.create merge_metadata(:event => 'create', :whodunnit => PaperTrail.whodunnit)
end
end
def record_update
if switched_on? && changed_notably?
versions.build merge_metadata(:event => 'update',
:object => object_to_string(item_before_change),
:whodunnit => PaperTrail.whodunnit)
end
end
def record_destroy
if switched_on? and not new_record?
Version.create merge_metadata(:item => self,
:event => 'destroy',
:object => object_to_string(item_before_change),
:whodunnit => PaperTrail.whodunnit)
end
versions.send :load_target
end
def merge_metadata(data)
# First we merge the model-level metadata in `meta`.
meta.each do |k,v|
data[k] =
if v.respond_to?(:call)
v.call(self)
elsif v.is_a?(Symbol) && respond_to?(v)
send(v)
else
v
end
end
# Second we merge any extra data from the controller (if available).
data.merge(PaperTrail.controller_info || {})
end
def item_before_change
self.clone.tap do |previous|
previous.id = id
changed_attributes.each { |attr, before| previous[attr] = before }
end
end
def object_to_string(object)
object.attributes.to_yaml
end
def changed_notably?
notably_changed.any?
end
def notably_changed
self.class.only.empty? ? changed_and_not_ignored : (changed_and_not_ignored & self.class.only)
end
def changed_and_not_ignored
changed - self.class.ignore
end
# Returns `true` if PaperTrail is globally enabled and active for this class,
# `false` otherwise.
def switched_on?
PaperTrail.enabled? && self.class.paper_trail_active
end
end
end
end
|
require 'pedant/platform'
module Pedant
class MultiTenantPlatform < Platform
GLOBAL_OBJECTS = ['users', 'organizations']
MAX_ATTEMPTS = 5
attr_reader :test_org, :test_org_owner, :validate_org, :internal_account_url, :ldap, :ldap_testing
def initialize(server, superuser_key_file, super_user_name='pivotal')
super(server, superuser_key_file, super_user_name)
@test_org = org_from_config
@internal_account_url = Pedant::Config[:internal_account_url]
@ldap = Pedant::Config[:ldap]
@ldap_testing = Pedant::Config[:ldap_testing]
end
# Intelligently construct a complete API URL based on the
# pre-configured server and platform information. URLs targeted for
# multi-tenant platforms (i.e. Hosted Chef) prepend
# "/organizations/#{org}" to the given path fragment, while
# single-tenant targeted URLs (i.e., Open Source Chef and Private
# Chef) do not.
def api_url(path_fragment = '/', org=test_org)
path_prefix = (map_to_default_orgname?(path_fragment) ? '' : "/organizations/#{org.name}")
slash = path_fragment.start_with?('/') ? '' : '/'
"#{server}#{path_prefix}#{slash}#{path_fragment}"
end
# Override org_name that is defined in the base platform
def org_name
test_org.name
end
def map_to_default_orgname?(path_fragment)
return false unless Pedant::Config.use_default_org # Don't even bother unless we are in default_orgname mode
return false if path_fragment =~ /_acl/ # False if _acl appears anywhere
return true if path_fragment =~ /^\/?(search|nodes|cookbooks|data|roles|sandboxes|environments|clients)/
return false # Default to false
end
def setup(requestors=Pedant::Config.requestors)
requestors[:clients].each do |kind, client_hash|
key = cache_key(kind, :client)
requestor_cache[key] = unless client_hash[:bogus]
client_from_config(client_hash)
else
dummy_client(client_hash)
end
create_requestor_accessor(key)
end
requestors[:users].each do |kind, user_hash|
user_hash[:admin] = !!user_hash[:admin] # Convert to true or false
user_hash[:associate] = true if user_hash[:associate].nil? # default to associate
user_from_config(user_hash).tap do |user|
key = cache_key(kind, :user)
requestor_cache[key] = user
make_owner(user, @test_org) if user_hash[:admin]
make_user(user, @test_org) if user_hash[:associate] and !user_hash[:admin]
create_requestor_accessor(key)
end
end
end
def cleanup
cleanup_requestors
delete_org_from_config
end
# TODO: expose the entire payload as an input parameter
def create_user(username, options = {})
payload = {
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "foobar"
}
users_url = "#{@server}/users"
r = post(users_url, @superuser, :payload => payload)
if r.code == 409
puts "The user #{username} already exists... regenerating a key for it now"
payload["private_key"] = true
r = put("#{users_url}/#{username}", @superuser, :payload => payload)
end
private_key = parse(r)["private_key"]
# The "admin" and "associate" options here are more of a metadata
# than actually creating an admin or associating. This allows
# Pedant tests to succeed even if the users config table has changed.
Pedant::User.new(username, private_key, platform: self, preexisting: false, admin: options[:admin], associate: options[:associate])
end
def delete_user(user)
if user.preexisting
puts "Pedant did not create the user #{user.name}, and will not delete it"
else
puts "Deleting user #{user.name} ..."
## TODO: use api_url
r = delete("#{@server}/users/#{user.name}", @superuser)
if r.code != 200
puts "Unexpected response #{r.code}: #{r}"
end
end
end
################################################################################
# Config-Aware Operations
#
# These operations for adding and deleting users respect
# config settings; i.e. they will not create new users or orgs if
# the config indicates they should already exist, and they won't
# delete them unless Pedant created them in the first place.
#
################################################################################
def user_from_config(requestor_spec)
name = requestor_spec[:name]
create_me = requestor_spec[:create_me]
key_file = requestor_spec[:key_file]
associate = requestor_spec[:associate]
admin = requestor_spec[:admin]
if create_me
create_user(name, admin: admin, associate: associate).tap do |user|
user.populate_dot_chef! if requestor_spec[:create_knife]
end
else
Pedant::User.new(name, key_file, platform: self, preexisting: true)
end
end
# def delete_user_from_config(user_key)
# user = Pedant::Config[:users][user_key]
# name = user[:name]
# if user[:create_me]
# delete_user(name)
# else
# puts "Pedant did not create user #{name}, and will not delete it"
# end
# end
def create_client(name)
clientname = name.to_s
puts "Creating client #{clientname}..."
payload = { "name" => clientname }
r = post(api_url('/clients'), @test_org.validator, :payload => payload)
if r.code == 409
puts "The client #{clientname} already exists... regenerating a key for it now"
payload["private_key"] = true
r = put(api_url("/clients/#{clientname}"), @test_org.validator, :payload => payload)
end
private_key = parse(r)["private_key"]
Pedant::Client.new(clientname, private_key, platform: self)
end
def client_from_config(requestor_spec)
name = requestor_spec[:name]
create_me = requestor_spec[:create_me]
type = requestor_spec[:type]
key_file = requestor_spec[:key_file]
# Extract to after hooks
if create_me
create_client(name).tap do |client|
client.populate_dot_chef! if requestor_spec[:create_knife]
end
else
Pedant::Client.new(name, key_file, platform: self, preexisting: true, admin: false)
end
end
def delete_client(client, org = self.test_org)
puts "Deleting client #{client.name} ..."
r = delete("#{@server}/organizations/#{org.name}/clients/#{client.name}", self.admin_user)
if r.code != 200
puts "Unexpected response #{r.code}: #{r}"
end
end
################################################################################
# Multi-Tenant Platform Methods
#
# Organization and Authorization-related (e.g., groups) operations go here.
#
# TODO: Extract this into a separate class
################################################################################
# TODO: Expose entire payload as an input parameter
def create_org(orgname)
payload = {
"name" => orgname,
"full_name" => orgname,
"org_type" => "Business"
}
puts "Creating org #{orgname}"
MAX_ATTEMPTS.times do |attempt|
r = post("#{@server}/organizations", superuser, :payload => payload)
# This re-assigns the variable 'r' and therefore can't be part of the case statement below
if r.code == 409
puts "The organization already exists! Regenerating validator key ..."
r = post("#{Pedant::Config.account_server}/organizations/#{orgname}/_validator_key", superuser, {})
raise "Bad error code #{r.code} from regenerating validator key: #{r}" unless r.code == 200
end
case r.code
when 201, 200
parsed = parse(r)
# If we came here through the 409 codepath there won't be a client name so we're hardcoding it.
validator_name = parsed["clientname"] || "#{orgname}-validator"
validator_key = parsed["private_key"]
validator = Pedant::Client.new(validator_name, validator_key)
return Pedant::Organization.new(orgname, validator)
when 503
# Continue attempting by allowing the loop to continue
puts "Failed attempting to contact #{@server} (#{attempt}/#{MAX_ATTEMPTS})"
else
raise "Bad error code #{r.code} from create org: #{r}"
end
end
raise "Failed attempting to contact #{@server} #{MAX_ATTEMPTS} times"
end
def delete_org(orgname)
puts "Deleting organization #{orgname} ..."
r = delete("#{@server}/organizations/#{orgname}", superuser)
if r.code != 200
puts "Unexpected response #{r.code}: #{r}"
end
end
def associate_user_with_org(orgname, user)
puts "Associating user #{user.name} with org #{orgname} ..."
payload = { "user" => user.name }
association_requests_url = "#{@server}/organizations/#{orgname}/association_requests"
r = post("#{association_requests_url}", superuser, :payload => payload)
if r.code == 201 # Created
association_id = parse(r)["uri"].split("/").last
r = put("#{@server}/users/#{user.name}/association_requests/#{association_id}", user, :payload => { "response" => "accept" })
elsif r.code == 409 && parse(r)["error"] == "The association already exists."
# No problem!
else
raise "Bad response #{r.code} from association_requests: #{r}"
end
end
def add_user_to_group(orgname, user, groupname)
# Get the group information so we can fill in the POST with mostly existing information
group_url = "#{@server}/organizations/#{orgname}/groups/#{groupname}"
r = get(group_url, superuser)
group = parse(r)
# Insert the user into the given group
if group["actors"].include?(user.name)
puts "User #{user.name} is already in group #{orgname}/#{groupname}."
else
puts "Adding user #{user.name} to group #{orgname}/#{groupname} ..."
payload = {:groupname=> groupname, :actors=>{"users"=> group["actors"], "groups" => group["groups"]}}
payload[:actors]['users'].unshift(user.name)
put(group_url, superuser, :payload => payload)
end
end
# As though +user+ had created +org+ themselves
def make_owner(user, org)
associate_in_groups(user, org, ["admins", "billing-admins", "users"])
end
def make_user(user, org)
associate_in_groups(user, org, ["users"])
end
# Helper function to associate a user with an org, and place the
# user in the specified groups
def associate_in_groups(user, org, groups)
associate_user_with_org(org.name, user)
groups.each do |group|
add_user_to_group(org.name, user, group)
end
end
################################################################################
# Config-Aware Operations
#
# These operations for adding and deleting users and orgs respect
# config settings; i.e. they will not create new users or orgs if
# the config indicates they should already exist, and they won't
# delete them unless Pedant created them in the first place.
#
################################################################################
def pedant_orgname
Pedant::Config.use_default_org ? Pedant::Config.default_orgname : Pedant::Config[:org][:name]
end
def org_from_config()
org = Pedant::Config[:org]
# If default_orgname is set, override the settings for org
name = pedant_orgname
if org[:create_me] || Pedant::Config.default_orgname
@validate_org = true
create_org(name)
else
key = org[:validator_key]
puts "Using pre-created org. Skipping org creation validation tests."
Pedant::Organization.new(name, key)
end
end
def delete_org_from_config
if Pedant.config[:org][:create_me] && Pedant.config[:delete_org]
delete_org(pedant_orgname)
else
puts "Pedant did not create the org, so will it not delete it"
end
end
# When this is defined, pedant will run this before running anything else.
def before_configure_rspec
validate_created_org(test_org) if validate_org
end
def validate_created_org(org)
puts "Validating Org Creation"
@test_org_owner = create_user("#{org.name}_owner", associate: true, admin: true)
requestor_cache[:owner] = @test_org_owner
make_owner(self.test_org_owner, org)
::RSpec.configure do |c|
c.treat_symbols_as_metadata_keys_with_true_values = true
c.include Pedant::RSpec::Common
end
args = if Pedant.config.debug_org_creation
Pedant.config.rspec_formatting_args
else
[]
end
args.concat(Pedant::Gem.test_directories("org_creation"))
if ::RSpec::Core::Runner.run(args) > 0
delete_org_from_config
delete_user(test_org_owner)
puts "Error: unable to validate testing org"
exit 2
end
# We need to reset RSpec after using it. Below are the hacks necessary for reset
::RSpec.reset
::RSpec.configuration.extend RSpecShared::Methods
end
end
end
Added principals and runs to default-org requests
require 'pedant/platform'
module Pedant
class MultiTenantPlatform < Platform
GLOBAL_OBJECTS = ['users', 'organizations']
MAX_ATTEMPTS = 5
attr_reader :test_org, :test_org_owner, :validate_org, :internal_account_url, :ldap, :ldap_testing
def initialize(server, superuser_key_file, super_user_name='pivotal')
super(server, superuser_key_file, super_user_name)
@test_org = org_from_config
@internal_account_url = Pedant::Config[:internal_account_url]
@ldap = Pedant::Config[:ldap]
@ldap_testing = Pedant::Config[:ldap_testing]
end
# Intelligently construct a complete API URL based on the
# pre-configured server and platform information. URLs targeted for
# multi-tenant platforms (i.e. Hosted Chef) prepend
# "/organizations/#{org}" to the given path fragment, while
# single-tenant targeted URLs (i.e., Open Source Chef and Private
# Chef) do not.
def api_url(path_fragment = '/', org=test_org)
path_prefix = (map_to_default_orgname?(path_fragment) ? '' : "/organizations/#{org.name}")
slash = path_fragment.start_with?('/') ? '' : '/'
"#{server}#{path_prefix}#{slash}#{path_fragment}"
end
# Override org_name that is defined in the base platform
def org_name
test_org.name
end
def map_to_default_orgname?(path_fragment)
return false unless Pedant::Config.use_default_org # Don't even bother unless we are in default_orgname mode
return false if path_fragment =~ /_acl/ # False if _acl appears anywhere
return true if path_fragment =~ /^\/?(search|nodes|cookbooks|data|roles|sandboxes|environments|clients|principals|runs)/
return false # Default to false
end
def setup(requestors=Pedant::Config.requestors)
requestors[:clients].each do |kind, client_hash|
key = cache_key(kind, :client)
requestor_cache[key] = unless client_hash[:bogus]
client_from_config(client_hash)
else
dummy_client(client_hash)
end
create_requestor_accessor(key)
end
requestors[:users].each do |kind, user_hash|
user_hash[:admin] = !!user_hash[:admin] # Convert to true or false
user_hash[:associate] = true if user_hash[:associate].nil? # default to associate
user_from_config(user_hash).tap do |user|
key = cache_key(kind, :user)
requestor_cache[key] = user
make_owner(user, @test_org) if user_hash[:admin]
make_user(user, @test_org) if user_hash[:associate] and !user_hash[:admin]
create_requestor_accessor(key)
end
end
end
def cleanup
cleanup_requestors
delete_org_from_config
end
# TODO: expose the entire payload as an input parameter
def create_user(username, options = {})
payload = {
"username" => username,
"email" => "#{username}@opscode.com",
"first_name" => username,
"last_name" => username,
"display_name" => username,
"password" => "foobar"
}
users_url = "#{@server}/users"
r = post(users_url, @superuser, :payload => payload)
if r.code == 409
puts "The user #{username} already exists... regenerating a key for it now"
payload["private_key"] = true
r = put("#{users_url}/#{username}", @superuser, :payload => payload)
end
private_key = parse(r)["private_key"]
# The "admin" and "associate" options here are more of a metadata
# than actually creating an admin or associating. This allows
# Pedant tests to succeed even if the users config table has changed.
Pedant::User.new(username, private_key, platform: self, preexisting: false, admin: options[:admin], associate: options[:associate])
end
def delete_user(user)
if user.preexisting
puts "Pedant did not create the user #{user.name}, and will not delete it"
else
puts "Deleting user #{user.name} ..."
## TODO: use api_url
r = delete("#{@server}/users/#{user.name}", @superuser)
if r.code != 200
puts "Unexpected response #{r.code}: #{r}"
end
end
end
################################################################################
# Config-Aware Operations
#
# These operations for adding and deleting users respect
# config settings; i.e. they will not create new users or orgs if
# the config indicates they should already exist, and they won't
# delete them unless Pedant created them in the first place.
#
################################################################################
def user_from_config(requestor_spec)
name = requestor_spec[:name]
create_me = requestor_spec[:create_me]
key_file = requestor_spec[:key_file]
associate = requestor_spec[:associate]
admin = requestor_spec[:admin]
if create_me
create_user(name, admin: admin, associate: associate).tap do |user|
user.populate_dot_chef! if requestor_spec[:create_knife]
end
else
Pedant::User.new(name, key_file, platform: self, preexisting: true)
end
end
# def delete_user_from_config(user_key)
# user = Pedant::Config[:users][user_key]
# name = user[:name]
# if user[:create_me]
# delete_user(name)
# else
# puts "Pedant did not create user #{name}, and will not delete it"
# end
# end
def create_client(name)
clientname = name.to_s
puts "Creating client #{clientname}..."
payload = { "name" => clientname }
r = post(api_url('/clients'), @test_org.validator, :payload => payload)
if r.code == 409
puts "The client #{clientname} already exists... regenerating a key for it now"
payload["private_key"] = true
r = put(api_url("/clients/#{clientname}"), @test_org.validator, :payload => payload)
end
private_key = parse(r)["private_key"]
Pedant::Client.new(clientname, private_key, platform: self)
end
def client_from_config(requestor_spec)
name = requestor_spec[:name]
create_me = requestor_spec[:create_me]
type = requestor_spec[:type]
key_file = requestor_spec[:key_file]
# Extract to after hooks
if create_me
create_client(name).tap do |client|
client.populate_dot_chef! if requestor_spec[:create_knife]
end
else
Pedant::Client.new(name, key_file, platform: self, preexisting: true, admin: false)
end
end
def delete_client(client, org = self.test_org)
puts "Deleting client #{client.name} ..."
r = delete("#{@server}/organizations/#{org.name}/clients/#{client.name}", self.admin_user)
if r.code != 200
puts "Unexpected response #{r.code}: #{r}"
end
end
################################################################################
# Multi-Tenant Platform Methods
#
# Organization and Authorization-related (e.g., groups) operations go here.
#
# TODO: Extract this into a separate class
################################################################################
# TODO: Expose entire payload as an input parameter
def create_org(orgname)
payload = {
"name" => orgname,
"full_name" => orgname,
"org_type" => "Business"
}
puts "Creating org #{orgname}"
MAX_ATTEMPTS.times do |attempt|
r = post("#{@server}/organizations", superuser, :payload => payload)
# This re-assigns the variable 'r' and therefore can't be part of the case statement below
if r.code == 409
puts "The organization already exists! Regenerating validator key ..."
r = post("#{Pedant::Config.account_server}/organizations/#{orgname}/_validator_key", superuser, {})
raise "Bad error code #{r.code} from regenerating validator key: #{r}" unless r.code == 200
end
case r.code
when 201, 200
parsed = parse(r)
# If we came here through the 409 codepath there won't be a client name so we're hardcoding it.
validator_name = parsed["clientname"] || "#{orgname}-validator"
validator_key = parsed["private_key"]
validator = Pedant::Client.new(validator_name, validator_key)
return Pedant::Organization.new(orgname, validator)
when 503
# Continue attempting by allowing the loop to continue
puts "Failed attempting to contact #{@server} (#{attempt}/#{MAX_ATTEMPTS})"
else
raise "Bad error code #{r.code} from create org: #{r}"
end
end
raise "Failed attempting to contact #{@server} #{MAX_ATTEMPTS} times"
end
def delete_org(orgname)
puts "Deleting organization #{orgname} ..."
r = delete("#{@server}/organizations/#{orgname}", superuser)
if r.code != 200
puts "Unexpected response #{r.code}: #{r}"
end
end
def associate_user_with_org(orgname, user)
puts "Associating user #{user.name} with org #{orgname} ..."
payload = { "user" => user.name }
association_requests_url = "#{@server}/organizations/#{orgname}/association_requests"
r = post("#{association_requests_url}", superuser, :payload => payload)
if r.code == 201 # Created
association_id = parse(r)["uri"].split("/").last
r = put("#{@server}/users/#{user.name}/association_requests/#{association_id}", user, :payload => { "response" => "accept" })
elsif r.code == 409 && parse(r)["error"] == "The association already exists."
# No problem!
else
raise "Bad response #{r.code} from association_requests: #{r}"
end
end
def add_user_to_group(orgname, user, groupname)
# Get the group information so we can fill in the POST with mostly existing information
group_url = "#{@server}/organizations/#{orgname}/groups/#{groupname}"
r = get(group_url, superuser)
group = parse(r)
# Insert the user into the given group
if group["actors"].include?(user.name)
puts "User #{user.name} is already in group #{orgname}/#{groupname}."
else
puts "Adding user #{user.name} to group #{orgname}/#{groupname} ..."
payload = {:groupname=> groupname, :actors=>{"users"=> group["actors"], "groups" => group["groups"]}}
payload[:actors]['users'].unshift(user.name)
put(group_url, superuser, :payload => payload)
end
end
# As though +user+ had created +org+ themselves
def make_owner(user, org)
associate_in_groups(user, org, ["admins", "billing-admins", "users"])
end
def make_user(user, org)
associate_in_groups(user, org, ["users"])
end
# Helper function to associate a user with an org, and place the
# user in the specified groups
def associate_in_groups(user, org, groups)
associate_user_with_org(org.name, user)
groups.each do |group|
add_user_to_group(org.name, user, group)
end
end
################################################################################
# Config-Aware Operations
#
# These operations for adding and deleting users and orgs respect
# config settings; i.e. they will not create new users or orgs if
# the config indicates they should already exist, and they won't
# delete them unless Pedant created them in the first place.
#
################################################################################
def pedant_orgname
Pedant::Config.use_default_org ? Pedant::Config.default_orgname : Pedant::Config[:org][:name]
end
def org_from_config()
org = Pedant::Config[:org]
# If default_orgname is set, override the settings for org
name = pedant_orgname
if org[:create_me] || Pedant::Config.default_orgname
@validate_org = true
create_org(name)
else
key = org[:validator_key]
puts "Using pre-created org. Skipping org creation validation tests."
Pedant::Organization.new(name, key)
end
end
def delete_org_from_config
if Pedant.config[:org][:create_me] && Pedant.config[:delete_org]
delete_org(pedant_orgname)
else
puts "Pedant did not create the org, so will it not delete it"
end
end
# When this is defined, pedant will run this before running anything else.
def before_configure_rspec
validate_created_org(test_org) if validate_org
end
def validate_created_org(org)
puts "Validating Org Creation"
@test_org_owner = create_user("#{org.name}_owner", associate: true, admin: true)
requestor_cache[:owner] = @test_org_owner
make_owner(self.test_org_owner, org)
::RSpec.configure do |c|
c.treat_symbols_as_metadata_keys_with_true_values = true
c.include Pedant::RSpec::Common
end
args = if Pedant.config.debug_org_creation
Pedant.config.rspec_formatting_args
else
[]
end
args.concat(Pedant::Gem.test_directories("org_creation"))
if ::RSpec::Core::Runner.run(args) > 0
delete_org_from_config
delete_user(test_org_owner)
puts "Error: unable to validate testing org"
exit 2
end
# We need to reset RSpec after using it. Below are the hacks necessary for reset
::RSpec.reset
::RSpec.configuration.extend RSpecShared::Methods
end
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "dynamic_fieldsets"
s.version = "0.1.15"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Jeremiah Hemphill", "Ethan Pemble", "John Carter"]
s.date = "2013-08-14"
s.description = "Dynamic fieldsets for rails controllers"
s.email = "jeremiah@cloudspace.com"
s.extra_rdoc_files = [
"README.rdoc"
]
s.files = [
".rdebugrc",
".rspec",
"CHANGELOG",
"Gemfile",
"Gemfile.lock",
"MIT-LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"app/controllers/dynamic_fieldsets/fields_controller.rb",
"app/controllers/dynamic_fieldsets/fieldset_associators_controller.rb",
"app/controllers/dynamic_fieldsets/fieldset_children_controller.rb",
"app/controllers/dynamic_fieldsets/fieldsets_controller.rb",
"app/helpers/dynamic_fieldsets/fields_helper.rb",
"app/helpers/dynamic_fieldsets/fieldset_children_helper.rb",
"app/helpers/dynamic_fieldsets/nested_model_helper.rb",
"app/helpers/dynamic_fieldsets_helper.rb",
"app/models/dynamic_fieldsets.rb",
"app/models/dynamic_fieldsets/checkbox_field.rb",
"app/models/dynamic_fieldsets/date_field.rb",
"app/models/dynamic_fieldsets/datetime_field.rb",
"app/models/dynamic_fieldsets/dependency.rb",
"app/models/dynamic_fieldsets/dependency_clause.rb",
"app/models/dynamic_fieldsets/dependency_group.rb",
"app/models/dynamic_fieldsets/field.rb",
"app/models/dynamic_fieldsets/field_default.rb",
"app/models/dynamic_fieldsets/field_html_attribute.rb",
"app/models/dynamic_fieldsets/field_option.rb",
"app/models/dynamic_fieldsets/field_record.rb",
"app/models/dynamic_fieldsets/fieldset.rb",
"app/models/dynamic_fieldsets/fieldset_associator.rb",
"app/models/dynamic_fieldsets/fieldset_child.rb",
"app/models/dynamic_fieldsets/instruction_field.rb",
"app/models/dynamic_fieldsets/multiple_select_field.rb",
"app/models/dynamic_fieldsets/radio_field.rb",
"app/models/dynamic_fieldsets/select_field.rb",
"app/models/dynamic_fieldsets/text_field.rb",
"app/models/dynamic_fieldsets/textarea_field.rb",
"app/views/dynamic_fieldsets/fields/_disable_field_form.html.erb",
"app/views/dynamic_fieldsets/fields/_field_default_fields.html.erb",
"app/views/dynamic_fieldsets/fields/_field_html_attribute_fields.html.erb",
"app/views/dynamic_fieldsets/fields/_field_option_fields.html.erb",
"app/views/dynamic_fieldsets/fields/_form.html.erb",
"app/views/dynamic_fieldsets/fields/edit.html.erb",
"app/views/dynamic_fieldsets/fields/index.html.erb",
"app/views/dynamic_fieldsets/fields/new.html.erb",
"app/views/dynamic_fieldsets/fields/show.html.erb",
"app/views/dynamic_fieldsets/fieldset_associators/index.html.erb",
"app/views/dynamic_fieldsets/fieldset_associators/show.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_dependency_clause_fields.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_dependency_fields.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_dependency_group_fields.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_form.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/edit.html.erb",
"app/views/dynamic_fieldsets/fieldsets/_associate_child.html.erb",
"app/views/dynamic_fieldsets/fieldsets/_child.html.erb",
"app/views/dynamic_fieldsets/fieldsets/_form.html.erb",
"app/views/dynamic_fieldsets/fieldsets/children.html.erb",
"app/views/dynamic_fieldsets/fieldsets/edit.html.erb",
"app/views/dynamic_fieldsets/fieldsets/index.html.erb",
"app/views/dynamic_fieldsets/fieldsets/new.html.erb",
"app/views/dynamic_fieldsets/fieldsets/reorder.html.erb",
"app/views/dynamic_fieldsets/fieldsets/show.html.erb",
"app/views/dynamic_fieldsets/form_partials/_checkbox_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_date_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_datetime_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_input_footer.html.erb",
"app/views/dynamic_fieldsets/form_partials/_input_header.html.erb",
"app/views/dynamic_fieldsets/form_partials/_instruction_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_multiple_select_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_radio_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_select_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_text_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_textarea_field.html.erb",
"app/views/dynamic_fieldsets/shared/_fieldset_footer.html.erb",
"app/views/dynamic_fieldsets/shared/_fieldset_header.html.erb",
"app/views/dynamic_fieldsets/shared/_javascript_watcher.html.erb",
"app/views/dynamic_fieldsets/shared/_nested_model_javascript.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_incomplete.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_incomplete_footer.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_incomplete_header.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_instruction.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_multiple_answers.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_single_answer.html.erb",
"autotest/discover.rb",
"config/.routes.rb.swp",
"config/routes.rb",
"dynamic_fieldsets.gemspec",
"lib/dynamic_fieldsets.rb",
"lib/dynamic_fieldsets/config.rb",
"lib/dynamic_fieldsets/dynamic_fieldsets_in_model.rb",
"lib/dynamic_fieldsets/engine.rb",
"lib/dynamic_fieldsets/field_with_field_options.rb",
"lib/dynamic_fieldsets/field_with_multiple_answers.rb",
"lib/dynamic_fieldsets/field_with_single_answer.rb",
"lib/dynamic_fieldsets/railtie.rb",
"lib/generators/dynamic_fieldsets/controllers_generator.rb",
"lib/generators/dynamic_fieldsets/install_generator.rb",
"lib/generators/dynamic_fieldsets/templates/config.rb",
"lib/generators/dynamic_fieldsets/templates/migrations/install_migration.rb",
"lib/generators/dynamic_fieldsets/views_generator.rb",
"spec/dummy/Rakefile",
"spec/dummy/app/controllers/application_controller.rb",
"spec/dummy/app/controllers/information_forms_controller.rb",
"spec/dummy/app/helpers/application_helper.rb",
"spec/dummy/app/helpers/information_forms_helper.rb",
"spec/dummy/app/models/information_form.rb",
"spec/dummy/app/views/information_forms/_form.html.erb",
"spec/dummy/app/views/information_forms/dynamic_view.html.erb",
"spec/dummy/app/views/information_forms/edit.html.erb",
"spec/dummy/app/views/information_forms/index.html.erb",
"spec/dummy/app/views/information_forms/new.html.erb",
"spec/dummy/app/views/information_forms/show.html.erb",
"spec/dummy/app/views/layouts/application.html.erb",
"spec/dummy/config.ru",
"spec/dummy/config/application.rb",
"spec/dummy/config/boot.rb",
"spec/dummy/config/database.yml",
"spec/dummy/config/environment.rb",
"spec/dummy/config/environments/development.rb",
"spec/dummy/config/environments/production.rb",
"spec/dummy/config/environments/test.rb",
"spec/dummy/config/initializers/backtrace_silencers.rb",
"spec/dummy/config/initializers/dynamic_fieldsets.rb",
"spec/dummy/config/initializers/inflections.rb",
"spec/dummy/config/initializers/mime_types.rb",
"spec/dummy/config/initializers/secret_token.rb",
"spec/dummy/config/initializers/session_store.rb",
"spec/dummy/config/locales/en.yml",
"spec/dummy/config/routes.rb",
"spec/dummy/db/migrate/20110727210451_create_information_forms.rb",
"spec/dummy/db/migrate/20120213211033_create_dynamic_fieldsets_tables.rb",
"spec/dummy/db/schema.rb",
"spec/dummy/features/field.feature",
"spec/dummy/features/fieldset.feature",
"spec/dummy/features/fieldset_associator.feature",
"spec/dummy/features/fieldset_children.feature",
"spec/dummy/features/javascript_tests.feature",
"spec/dummy/features/step_definitions/debugging_steps.rb",
"spec/dummy/features/step_definitions/field_steps.rb",
"spec/dummy/features/step_definitions/fieldset_associator_steps.rb",
"spec/dummy/features/step_definitions/fieldset_children_steps.rb",
"spec/dummy/features/step_definitions/fieldset_steps.rb",
"spec/dummy/features/step_definitions/javascript_steps.rb",
"spec/dummy/features/step_definitions/web_steps.rb",
"spec/dummy/features/support/env.rb",
"spec/dummy/features/support/paths.rb",
"spec/dummy/features/support/selectors.rb",
"spec/dummy/public/404.html",
"spec/dummy/public/422.html",
"spec/dummy/public/500.html",
"spec/dummy/public/favicon.ico",
"spec/dummy/public/javascripts/application.js",
"spec/dummy/public/javascripts/jquery-1.6.2.min.js",
"spec/dummy/public/javascripts/jquery-ui-1.8.15.custom.min.js",
"spec/dummy/public/javascripts/jquery-ui-nestedSortable.js",
"spec/dummy/public/javascripts/jquery.min.js",
"spec/dummy/public/stylesheets/.gitkeep",
"spec/dummy/public/stylesheets/scaffold.css",
"spec/dummy/script/rails",
"spec/dynamic_fieldsets_helper_spec.rb",
"spec/dynamic_fieldsets_in_model_spec.rb",
"spec/dynamic_fieldsets_spec.rb",
"spec/field_with_field_options_spec.rb",
"spec/field_with_multiple_answers_spec.rb",
"spec/field_with_single_answer_spec.rb",
"spec/integration/navigation_spec.rb",
"spec/models/checkbox_field_spec.rb",
"spec/models/date_field_spec.rb",
"spec/models/datetime_field_spec.rb",
"spec/models/dependency_clause_spec.rb",
"spec/models/dependency_group_spec.rb",
"spec/models/dependency_spec.rb",
"spec/models/field_default_spec.rb",
"spec/models/field_html_attribute_spec.rb",
"spec/models/field_option_spec.rb",
"spec/models/field_record_spec.rb",
"spec/models/field_spec.rb",
"spec/models/fieldset_associator_spec.rb",
"spec/models/fieldset_child_spec.rb",
"spec/models/fieldset_spec.rb",
"spec/models/instruction_field_spec.rb",
"spec/models/multiple_select_field_spec.rb",
"spec/models/radio_field_spec.rb",
"spec/models/text_field_spec.rb",
"spec/models/textarea_field_spec.rb",
"spec/spec_helper.rb",
"spec/support/dependency_group_helper.rb",
"spec/support/dependency_helper.rb",
"spec/support/field_default_helper.rb",
"spec/support/field_helper.rb",
"spec/support/field_html_attribute_helper.rb",
"spec/support/field_option_helper.rb",
"spec/support/field_record_helper.rb",
"spec/support/fieldset_associator_helper.rb",
"spec/support/fieldset_child_helper.rb",
"spec/support/fieldset_helper.rb"
]
s.homepage = "http://github.com/jeremiahishere/dynamic_fieldsets"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.25"
s.summary = "Dynamic fieldsets for rails controllers"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rails>, [">= 3.0.7"])
s.add_development_dependency(%q<capybara>, [">= 0.4.0"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<ruby-debug19>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_development_dependency(%q<json>, [">= 0"])
s.add_development_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_development_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_development_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_development_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_development_dependency(%q<ZenTest>, [">= 0"])
s.add_development_dependency(%q<autotest-rails>, [">= 0"])
s.add_development_dependency(%q<cucumber>, [">= 0"])
s.add_development_dependency(%q<cucumber-rails>, [">= 0"])
s.add_development_dependency(%q<database_cleaner>, [">= 0"])
s.add_development_dependency(%q<yard>, ["~> 0.6.0"])
s.add_development_dependency(%q<ci_reporter>, [">= 0"])
else
s.add_dependency(%q<rails>, [">= 3.0.7"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_dependency(%q<ZenTest>, [">= 0"])
s.add_dependency(%q<autotest-rails>, [">= 0"])
s.add_dependency(%q<cucumber>, [">= 0"])
s.add_dependency(%q<cucumber-rails>, [">= 0"])
s.add_dependency(%q<database_cleaner>, [">= 0"])
s.add_dependency(%q<yard>, ["~> 0.6.0"])
s.add_dependency(%q<ci_reporter>, [">= 0"])
end
else
s.add_dependency(%q<rails>, [">= 3.0.7"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_dependency(%q<ZenTest>, [">= 0"])
s.add_dependency(%q<autotest-rails>, [">= 0"])
s.add_dependency(%q<cucumber>, [">= 0"])
s.add_dependency(%q<cucumber-rails>, [">= 0"])
s.add_dependency(%q<database_cleaner>, [">= 0"])
s.add_dependency(%q<yard>, ["~> 0.6.0"])
s.add_dependency(%q<ci_reporter>, [">= 0"])
end
end
Regenerate gemspec for version 0.1.16
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "dynamic_fieldsets"
s.version = "0.1.16"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Jeremiah Hemphill", "Ethan Pemble", "John Carter"]
s.date = "2013-08-15"
s.description = "Dynamic fieldsets for rails controllers"
s.email = "jeremiah@cloudspace.com"
s.extra_rdoc_files = [
"README.rdoc"
]
s.files = [
".rdebugrc",
".rspec",
"CHANGELOG",
"Gemfile",
"Gemfile.lock",
"MIT-LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"app/controllers/dynamic_fieldsets/fields_controller.rb",
"app/controllers/dynamic_fieldsets/fieldset_associators_controller.rb",
"app/controllers/dynamic_fieldsets/fieldset_children_controller.rb",
"app/controllers/dynamic_fieldsets/fieldsets_controller.rb",
"app/helpers/dynamic_fieldsets/fields_helper.rb",
"app/helpers/dynamic_fieldsets/fieldset_children_helper.rb",
"app/helpers/dynamic_fieldsets/nested_model_helper.rb",
"app/helpers/dynamic_fieldsets_helper.rb",
"app/models/dynamic_fieldsets.rb",
"app/models/dynamic_fieldsets/checkbox_field.rb",
"app/models/dynamic_fieldsets/date_field.rb",
"app/models/dynamic_fieldsets/datetime_field.rb",
"app/models/dynamic_fieldsets/dependency.rb",
"app/models/dynamic_fieldsets/dependency_clause.rb",
"app/models/dynamic_fieldsets/dependency_group.rb",
"app/models/dynamic_fieldsets/field.rb",
"app/models/dynamic_fieldsets/field_default.rb",
"app/models/dynamic_fieldsets/field_html_attribute.rb",
"app/models/dynamic_fieldsets/field_option.rb",
"app/models/dynamic_fieldsets/field_record.rb",
"app/models/dynamic_fieldsets/fieldset.rb",
"app/models/dynamic_fieldsets/fieldset_associator.rb",
"app/models/dynamic_fieldsets/fieldset_child.rb",
"app/models/dynamic_fieldsets/instruction_field.rb",
"app/models/dynamic_fieldsets/multiple_select_field.rb",
"app/models/dynamic_fieldsets/radio_field.rb",
"app/models/dynamic_fieldsets/select_field.rb",
"app/models/dynamic_fieldsets/text_field.rb",
"app/models/dynamic_fieldsets/textarea_field.rb",
"app/views/dynamic_fieldsets/fields/_disable_field_form.html.erb",
"app/views/dynamic_fieldsets/fields/_field_default_fields.html.erb",
"app/views/dynamic_fieldsets/fields/_field_html_attribute_fields.html.erb",
"app/views/dynamic_fieldsets/fields/_field_option_fields.html.erb",
"app/views/dynamic_fieldsets/fields/_form.html.erb",
"app/views/dynamic_fieldsets/fields/edit.html.erb",
"app/views/dynamic_fieldsets/fields/index.html.erb",
"app/views/dynamic_fieldsets/fields/new.html.erb",
"app/views/dynamic_fieldsets/fields/show.html.erb",
"app/views/dynamic_fieldsets/fieldset_associators/index.html.erb",
"app/views/dynamic_fieldsets/fieldset_associators/show.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_dependency_clause_fields.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_dependency_fields.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_dependency_group_fields.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_form.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/edit.html.erb",
"app/views/dynamic_fieldsets/fieldsets/_associate_child.html.erb",
"app/views/dynamic_fieldsets/fieldsets/_child.html.erb",
"app/views/dynamic_fieldsets/fieldsets/_form.html.erb",
"app/views/dynamic_fieldsets/fieldsets/children.html.erb",
"app/views/dynamic_fieldsets/fieldsets/edit.html.erb",
"app/views/dynamic_fieldsets/fieldsets/index.html.erb",
"app/views/dynamic_fieldsets/fieldsets/new.html.erb",
"app/views/dynamic_fieldsets/fieldsets/reorder.html.erb",
"app/views/dynamic_fieldsets/fieldsets/show.html.erb",
"app/views/dynamic_fieldsets/form_partials/_checkbox_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_date_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_datetime_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_input_footer.html.erb",
"app/views/dynamic_fieldsets/form_partials/_input_header.html.erb",
"app/views/dynamic_fieldsets/form_partials/_instruction_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_multiple_select_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_radio_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_select_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_text_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_textarea_field.html.erb",
"app/views/dynamic_fieldsets/shared/_fieldset_footer.html.erb",
"app/views/dynamic_fieldsets/shared/_fieldset_header.html.erb",
"app/views/dynamic_fieldsets/shared/_javascript_watcher.html.erb",
"app/views/dynamic_fieldsets/shared/_nested_model_javascript.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_incomplete.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_incomplete_footer.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_incomplete_header.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_instruction.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_multiple_answers.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_single_answer.html.erb",
"autotest/discover.rb",
"config/.routes.rb.swp",
"config/routes.rb",
"dynamic_fieldsets.gemspec",
"lib/dynamic_fieldsets.rb",
"lib/dynamic_fieldsets/config.rb",
"lib/dynamic_fieldsets/dynamic_fieldsets_in_model.rb",
"lib/dynamic_fieldsets/engine.rb",
"lib/dynamic_fieldsets/field_with_field_options.rb",
"lib/dynamic_fieldsets/field_with_multiple_answers.rb",
"lib/dynamic_fieldsets/field_with_single_answer.rb",
"lib/dynamic_fieldsets/railtie.rb",
"lib/generators/dynamic_fieldsets/controllers_generator.rb",
"lib/generators/dynamic_fieldsets/install_generator.rb",
"lib/generators/dynamic_fieldsets/templates/config.rb",
"lib/generators/dynamic_fieldsets/templates/migrations/install_migration.rb",
"lib/generators/dynamic_fieldsets/views_generator.rb",
"spec/dummy/Rakefile",
"spec/dummy/app/controllers/application_controller.rb",
"spec/dummy/app/controllers/information_forms_controller.rb",
"spec/dummy/app/helpers/application_helper.rb",
"spec/dummy/app/helpers/information_forms_helper.rb",
"spec/dummy/app/models/information_form.rb",
"spec/dummy/app/views/information_forms/_form.html.erb",
"spec/dummy/app/views/information_forms/dynamic_view.html.erb",
"spec/dummy/app/views/information_forms/edit.html.erb",
"spec/dummy/app/views/information_forms/index.html.erb",
"spec/dummy/app/views/information_forms/new.html.erb",
"spec/dummy/app/views/information_forms/show.html.erb",
"spec/dummy/app/views/layouts/application.html.erb",
"spec/dummy/config.ru",
"spec/dummy/config/application.rb",
"spec/dummy/config/boot.rb",
"spec/dummy/config/database.yml",
"spec/dummy/config/environment.rb",
"spec/dummy/config/environments/development.rb",
"spec/dummy/config/environments/production.rb",
"spec/dummy/config/environments/test.rb",
"spec/dummy/config/initializers/backtrace_silencers.rb",
"spec/dummy/config/initializers/dynamic_fieldsets.rb",
"spec/dummy/config/initializers/inflections.rb",
"spec/dummy/config/initializers/mime_types.rb",
"spec/dummy/config/initializers/secret_token.rb",
"spec/dummy/config/initializers/session_store.rb",
"spec/dummy/config/locales/en.yml",
"spec/dummy/config/routes.rb",
"spec/dummy/db/migrate/20110727210451_create_information_forms.rb",
"spec/dummy/db/migrate/20120213211033_create_dynamic_fieldsets_tables.rb",
"spec/dummy/db/schema.rb",
"spec/dummy/features/field.feature",
"spec/dummy/features/fieldset.feature",
"spec/dummy/features/fieldset_associator.feature",
"spec/dummy/features/fieldset_children.feature",
"spec/dummy/features/javascript_tests.feature",
"spec/dummy/features/step_definitions/debugging_steps.rb",
"spec/dummy/features/step_definitions/field_steps.rb",
"spec/dummy/features/step_definitions/fieldset_associator_steps.rb",
"spec/dummy/features/step_definitions/fieldset_children_steps.rb",
"spec/dummy/features/step_definitions/fieldset_steps.rb",
"spec/dummy/features/step_definitions/javascript_steps.rb",
"spec/dummy/features/step_definitions/web_steps.rb",
"spec/dummy/features/support/env.rb",
"spec/dummy/features/support/paths.rb",
"spec/dummy/features/support/selectors.rb",
"spec/dummy/public/404.html",
"spec/dummy/public/422.html",
"spec/dummy/public/500.html",
"spec/dummy/public/favicon.ico",
"spec/dummy/public/javascripts/application.js",
"spec/dummy/public/javascripts/jquery-1.6.2.min.js",
"spec/dummy/public/javascripts/jquery-ui-1.8.15.custom.min.js",
"spec/dummy/public/javascripts/jquery-ui-nestedSortable.js",
"spec/dummy/public/javascripts/jquery.min.js",
"spec/dummy/public/stylesheets/.gitkeep",
"spec/dummy/public/stylesheets/scaffold.css",
"spec/dummy/script/rails",
"spec/dynamic_fieldsets_helper_spec.rb",
"spec/dynamic_fieldsets_in_model_spec.rb",
"spec/dynamic_fieldsets_spec.rb",
"spec/field_with_field_options_spec.rb",
"spec/field_with_multiple_answers_spec.rb",
"spec/field_with_single_answer_spec.rb",
"spec/integration/navigation_spec.rb",
"spec/models/checkbox_field_spec.rb",
"spec/models/date_field_spec.rb",
"spec/models/datetime_field_spec.rb",
"spec/models/dependency_clause_spec.rb",
"spec/models/dependency_group_spec.rb",
"spec/models/dependency_spec.rb",
"spec/models/field_default_spec.rb",
"spec/models/field_html_attribute_spec.rb",
"spec/models/field_option_spec.rb",
"spec/models/field_record_spec.rb",
"spec/models/field_spec.rb",
"spec/models/fieldset_associator_spec.rb",
"spec/models/fieldset_child_spec.rb",
"spec/models/fieldset_spec.rb",
"spec/models/instruction_field_spec.rb",
"spec/models/multiple_select_field_spec.rb",
"spec/models/radio_field_spec.rb",
"spec/models/text_field_spec.rb",
"spec/models/textarea_field_spec.rb",
"spec/spec_helper.rb",
"spec/support/dependency_group_helper.rb",
"spec/support/dependency_helper.rb",
"spec/support/field_default_helper.rb",
"spec/support/field_helper.rb",
"spec/support/field_html_attribute_helper.rb",
"spec/support/field_option_helper.rb",
"spec/support/field_record_helper.rb",
"spec/support/fieldset_associator_helper.rb",
"spec/support/fieldset_child_helper.rb",
"spec/support/fieldset_helper.rb"
]
s.homepage = "http://github.com/jeremiahishere/dynamic_fieldsets"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.25"
s.summary = "Dynamic fieldsets for rails controllers"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rails>, [">= 3.0.7"])
s.add_development_dependency(%q<capybara>, [">= 0.4.0"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<ruby-debug19>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_development_dependency(%q<json>, [">= 0"])
s.add_development_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_development_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_development_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_development_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_development_dependency(%q<ZenTest>, [">= 0"])
s.add_development_dependency(%q<autotest-rails>, [">= 0"])
s.add_development_dependency(%q<cucumber>, [">= 0"])
s.add_development_dependency(%q<cucumber-rails>, [">= 0"])
s.add_development_dependency(%q<database_cleaner>, [">= 0"])
s.add_development_dependency(%q<yard>, ["~> 0.6.0"])
s.add_development_dependency(%q<ci_reporter>, [">= 0"])
else
s.add_dependency(%q<rails>, [">= 3.0.7"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_dependency(%q<ZenTest>, [">= 0"])
s.add_dependency(%q<autotest-rails>, [">= 0"])
s.add_dependency(%q<cucumber>, [">= 0"])
s.add_dependency(%q<cucumber-rails>, [">= 0"])
s.add_dependency(%q<database_cleaner>, [">= 0"])
s.add_dependency(%q<yard>, ["~> 0.6.0"])
s.add_dependency(%q<ci_reporter>, [">= 0"])
end
else
s.add_dependency(%q<rails>, [">= 3.0.7"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_dependency(%q<ZenTest>, [">= 0"])
s.add_dependency(%q<autotest-rails>, [">= 0"])
s.add_dependency(%q<cucumber>, [">= 0"])
s.add_dependency(%q<cucumber-rails>, [">= 0"])
s.add_dependency(%q<database_cleaner>, [">= 0"])
s.add_dependency(%q<yard>, ["~> 0.6.0"])
s.add_dependency(%q<ci_reporter>, [">= 0"])
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "dynamic_fieldsets"
s.version = "0.1.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Jeremiah Hemphill", "Ethan Pemble", "John Carter"]
s.date = "2012-02-28"
s.description = "Dynamic fieldsets for rails controllers"
s.email = "jeremiah@cloudspace.com"
s.extra_rdoc_files = [
"README.rdoc"
]
s.files = [
".rdebugrc",
".rspec",
"CHANGELOG",
"Gemfile",
"Gemfile.lock",
"MIT-LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"app/controllers/dynamic_fieldsets/fields_controller.rb",
"app/controllers/dynamic_fieldsets/fieldset_associators_controller.rb",
"app/controllers/dynamic_fieldsets/fieldset_children_controller.rb",
"app/controllers/dynamic_fieldsets/fieldsets_controller.rb",
"app/helpers/dynamic_fieldsets/fields_helper.rb",
"app/helpers/dynamic_fieldsets/fieldset_children_helper.rb",
"app/helpers/dynamic_fieldsets/nested_model_helper.rb",
"app/helpers/dynamic_fieldsets_helper.rb",
"app/models/dynamic_fieldsets.rb",
"app/models/dynamic_fieldsets/checkbox_field.rb",
"app/models/dynamic_fieldsets/date_field.rb",
"app/models/dynamic_fieldsets/datetime_field.rb",
"app/models/dynamic_fieldsets/dependency.rb",
"app/models/dynamic_fieldsets/dependency_clause.rb",
"app/models/dynamic_fieldsets/dependency_group.rb",
"app/models/dynamic_fieldsets/field.rb",
"app/models/dynamic_fieldsets/field_default.rb",
"app/models/dynamic_fieldsets/field_html_attribute.rb",
"app/models/dynamic_fieldsets/field_option.rb",
"app/models/dynamic_fieldsets/field_record.rb",
"app/models/dynamic_fieldsets/fieldset.rb",
"app/models/dynamic_fieldsets/fieldset_associator.rb",
"app/models/dynamic_fieldsets/fieldset_child.rb",
"app/models/dynamic_fieldsets/instruction_field.rb",
"app/models/dynamic_fieldsets/multiple_select_field.rb",
"app/models/dynamic_fieldsets/radio_field.rb",
"app/models/dynamic_fieldsets/select_field.rb",
"app/models/dynamic_fieldsets/text_field.rb",
"app/models/dynamic_fieldsets/textarea_field.rb",
"app/views/dynamic_fieldsets/fields/_disable_field_form.html.erb",
"app/views/dynamic_fieldsets/fields/_field_default_fields.html.erb",
"app/views/dynamic_fieldsets/fields/_field_html_attribute_fields.html.erb",
"app/views/dynamic_fieldsets/fields/_field_option_fields.html.erb",
"app/views/dynamic_fieldsets/fields/_form.html.erb",
"app/views/dynamic_fieldsets/fields/edit.html.erb",
"app/views/dynamic_fieldsets/fields/index.html.erb",
"app/views/dynamic_fieldsets/fields/new.html.erb",
"app/views/dynamic_fieldsets/fields/show.html.erb",
"app/views/dynamic_fieldsets/fieldset_associators/index.html.erb",
"app/views/dynamic_fieldsets/fieldset_associators/show.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_dependency_clause_fields.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_dependency_fields.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_dependency_group_fields.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_form.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/edit.html.erb",
"app/views/dynamic_fieldsets/fieldsets/_associate_child.html.erb",
"app/views/dynamic_fieldsets/fieldsets/_child.html.erb",
"app/views/dynamic_fieldsets/fieldsets/_form.html.erb",
"app/views/dynamic_fieldsets/fieldsets/children.html.erb",
"app/views/dynamic_fieldsets/fieldsets/edit.html.erb",
"app/views/dynamic_fieldsets/fieldsets/index.html.erb",
"app/views/dynamic_fieldsets/fieldsets/new.html.erb",
"app/views/dynamic_fieldsets/fieldsets/reorder.html.erb",
"app/views/dynamic_fieldsets/fieldsets/show.html.erb",
"app/views/dynamic_fieldsets/form_partials/_checkbox_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_date_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_datetime_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_input_footer.html.erb",
"app/views/dynamic_fieldsets/form_partials/_input_header.html.erb",
"app/views/dynamic_fieldsets/form_partials/_instruction_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_multiple_select_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_radio_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_select_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_text_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_textarea_field.html.erb",
"app/views/dynamic_fieldsets/shared/_javascript_watcher.html.erb",
"app/views/dynamic_fieldsets/shared/_nested_model_javascript.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_incomplete.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_incomplete_footer.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_incomplete_header.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_instruction.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_multiple_answers.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_single_answer.html.erb",
"autotest/discover.rb",
"config/.routes.rb.swp",
"config/routes.rb",
"dynamic_fieldsets.gemspec",
"lib/dynamic_fieldsets.rb",
"lib/dynamic_fieldsets/config.rb",
"lib/dynamic_fieldsets/dynamic_fieldsets_in_model.rb",
"lib/dynamic_fieldsets/engine.rb",
"lib/dynamic_fieldsets/field_with_field_options.rb",
"lib/dynamic_fieldsets/field_with_multiple_answers.rb",
"lib/dynamic_fieldsets/field_with_single_answer.rb",
"lib/dynamic_fieldsets/railtie.rb",
"lib/generators/dynamic_fieldsets/controllers_generator.rb",
"lib/generators/dynamic_fieldsets/install_generator.rb",
"lib/generators/dynamic_fieldsets/templates/config.rb",
"lib/generators/dynamic_fieldsets/templates/migrations/install_migration.rb",
"lib/generators/dynamic_fieldsets/views_generator.rb",
"spec/dummy/Rakefile",
"spec/dummy/app/controllers/application_controller.rb",
"spec/dummy/app/controllers/information_forms_controller.rb",
"spec/dummy/app/helpers/application_helper.rb",
"spec/dummy/app/helpers/information_forms_helper.rb",
"spec/dummy/app/models/information_form.rb",
"spec/dummy/app/views/information_forms/_form.html.erb",
"spec/dummy/app/views/information_forms/dynamic_view.html.erb",
"spec/dummy/app/views/information_forms/edit.html.erb",
"spec/dummy/app/views/information_forms/index.html.erb",
"spec/dummy/app/views/information_forms/new.html.erb",
"spec/dummy/app/views/information_forms/show.html.erb",
"spec/dummy/app/views/layouts/application.html.erb",
"spec/dummy/config.ru",
"spec/dummy/config/application.rb",
"spec/dummy/config/boot.rb",
"spec/dummy/config/database.yml",
"spec/dummy/config/environment.rb",
"spec/dummy/config/environments/development.rb",
"spec/dummy/config/environments/production.rb",
"spec/dummy/config/environments/test.rb",
"spec/dummy/config/initializers/backtrace_silencers.rb",
"spec/dummy/config/initializers/dynamic_fieldsets.rb",
"spec/dummy/config/initializers/inflections.rb",
"spec/dummy/config/initializers/mime_types.rb",
"spec/dummy/config/initializers/secret_token.rb",
"spec/dummy/config/initializers/session_store.rb",
"spec/dummy/config/locales/en.yml",
"spec/dummy/config/routes.rb",
"spec/dummy/db/migrate/20110727210451_create_information_forms.rb",
"spec/dummy/db/migrate/20120213211033_create_dynamic_fieldsets_tables.rb",
"spec/dummy/db/schema.rb",
"spec/dummy/features/field.feature",
"spec/dummy/features/fieldset.feature",
"spec/dummy/features/fieldset_associator.feature",
"spec/dummy/features/fieldset_children.feature",
"spec/dummy/features/javascript_tests.feature",
"spec/dummy/features/step_definitions/debugging_steps.rb",
"spec/dummy/features/step_definitions/field_steps.rb",
"spec/dummy/features/step_definitions/fieldset_associator_steps.rb",
"spec/dummy/features/step_definitions/fieldset_children_steps.rb",
"spec/dummy/features/step_definitions/fieldset_steps.rb",
"spec/dummy/features/step_definitions/javascript_steps.rb",
"spec/dummy/features/step_definitions/web_steps.rb",
"spec/dummy/features/support/env.rb",
"spec/dummy/features/support/paths.rb",
"spec/dummy/features/support/selectors.rb",
"spec/dummy/public/404.html",
"spec/dummy/public/422.html",
"spec/dummy/public/500.html",
"spec/dummy/public/favicon.ico",
"spec/dummy/public/javascripts/application.js",
"spec/dummy/public/javascripts/jquery-1.6.2.min.js",
"spec/dummy/public/javascripts/jquery-ui-1.8.15.custom.min.js",
"spec/dummy/public/javascripts/jquery-ui-nestedSortable.js",
"spec/dummy/public/javascripts/jquery.min.js",
"spec/dummy/public/stylesheets/.gitkeep",
"spec/dummy/public/stylesheets/scaffold.css",
"spec/dummy/script/rails",
"spec/dynamic_fieldsets_helper_spec.rb",
"spec/dynamic_fieldsets_in_model_spec.rb",
"spec/dynamic_fieldsets_spec.rb",
"spec/field_with_field_options_spec.rb",
"spec/field_with_multiple_answers_spec.rb",
"spec/field_with_single_answer_spec.rb",
"spec/integration/navigation_spec.rb",
"spec/models/checkbox_field_spec.rb",
"spec/models/date_field_spec.rb",
"spec/models/datetime_field_spec.rb",
"spec/models/dependency_clause_spec.rb",
"spec/models/dependency_group_spec.rb",
"spec/models/dependency_spec.rb",
"spec/models/field_default_spec.rb",
"spec/models/field_html_attribute_spec.rb",
"spec/models/field_option_spec.rb",
"spec/models/field_record_spec.rb",
"spec/models/field_spec.rb",
"spec/models/fieldset_associator_spec.rb",
"spec/models/fieldset_child_spec.rb",
"spec/models/fieldset_spec.rb",
"spec/models/instruction_field_spec.rb",
"spec/models/multiple_select_field_spec.rb",
"spec/models/radio_field_spec.rb",
"spec/models/text_field_spec.rb",
"spec/models/textarea_field_spec.rb",
"spec/spec_helper.rb",
"spec/support/dependency_group_helper.rb",
"spec/support/dependency_helper.rb",
"spec/support/field_default_helper.rb",
"spec/support/field_helper.rb",
"spec/support/field_html_attribute_helper.rb",
"spec/support/field_option_helper.rb",
"spec/support/field_record_helper.rb",
"spec/support/fieldset_associator_helper.rb",
"spec/support/fieldset_child_helper.rb",
"spec/support/fieldset_helper.rb"
]
s.homepage = "http://github.com/jeremiahishere/dynamic_fieldsets"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.15"
s.summary = "Dynamic fieldsets for rails controllers"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rails>, ["~> 3.0.7"])
s.add_development_dependency(%q<capybara>, [">= 0.4.0"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<ruby-debug19>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_development_dependency(%q<json>, [">= 0"])
s.add_development_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_development_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_development_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_development_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_development_dependency(%q<ZenTest>, [">= 0"])
s.add_development_dependency(%q<autotest-rails>, [">= 0"])
s.add_development_dependency(%q<cucumber>, [">= 0"])
s.add_development_dependency(%q<cucumber-rails>, [">= 0"])
s.add_development_dependency(%q<database_cleaner>, [">= 0"])
s.add_development_dependency(%q<yard>, ["~> 0.6.0"])
s.add_development_dependency(%q<ci_reporter>, [">= 0"])
else
s.add_dependency(%q<rails>, ["~> 3.0.7"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_dependency(%q<ZenTest>, [">= 0"])
s.add_dependency(%q<autotest-rails>, [">= 0"])
s.add_dependency(%q<cucumber>, [">= 0"])
s.add_dependency(%q<cucumber-rails>, [">= 0"])
s.add_dependency(%q<database_cleaner>, [">= 0"])
s.add_dependency(%q<yard>, ["~> 0.6.0"])
s.add_dependency(%q<ci_reporter>, [">= 0"])
end
else
s.add_dependency(%q<rails>, ["~> 3.0.7"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_dependency(%q<ZenTest>, [">= 0"])
s.add_dependency(%q<autotest-rails>, [">= 0"])
s.add_dependency(%q<cucumber>, [">= 0"])
s.add_dependency(%q<cucumber-rails>, [">= 0"])
s.add_dependency(%q<database_cleaner>, [">= 0"])
s.add_dependency(%q<yard>, ["~> 0.6.0"])
s.add_dependency(%q<ci_reporter>, [">= 0"])
end
end
Regenerate gemspec for version 0.1.1
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "dynamic_fieldsets"
s.version = "0.1.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Jeremiah Hemphill", "Ethan Pemble", "John Carter"]
s.date = "2012-02-29"
s.description = "Dynamic fieldsets for rails controllers"
s.email = "jeremiah@cloudspace.com"
s.extra_rdoc_files = [
"README.rdoc"
]
s.files = [
".rdebugrc",
".rspec",
"CHANGELOG",
"Gemfile",
"Gemfile.lock",
"MIT-LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"app/controllers/dynamic_fieldsets/fields_controller.rb",
"app/controllers/dynamic_fieldsets/fieldset_associators_controller.rb",
"app/controllers/dynamic_fieldsets/fieldset_children_controller.rb",
"app/controllers/dynamic_fieldsets/fieldsets_controller.rb",
"app/helpers/dynamic_fieldsets/fields_helper.rb",
"app/helpers/dynamic_fieldsets/fieldset_children_helper.rb",
"app/helpers/dynamic_fieldsets/nested_model_helper.rb",
"app/helpers/dynamic_fieldsets_helper.rb",
"app/models/dynamic_fieldsets.rb",
"app/models/dynamic_fieldsets/checkbox_field.rb",
"app/models/dynamic_fieldsets/date_field.rb",
"app/models/dynamic_fieldsets/datetime_field.rb",
"app/models/dynamic_fieldsets/dependency.rb",
"app/models/dynamic_fieldsets/dependency_clause.rb",
"app/models/dynamic_fieldsets/dependency_group.rb",
"app/models/dynamic_fieldsets/field.rb",
"app/models/dynamic_fieldsets/field_default.rb",
"app/models/dynamic_fieldsets/field_html_attribute.rb",
"app/models/dynamic_fieldsets/field_option.rb",
"app/models/dynamic_fieldsets/field_record.rb",
"app/models/dynamic_fieldsets/fieldset.rb",
"app/models/dynamic_fieldsets/fieldset_associator.rb",
"app/models/dynamic_fieldsets/fieldset_child.rb",
"app/models/dynamic_fieldsets/instruction_field.rb",
"app/models/dynamic_fieldsets/multiple_select_field.rb",
"app/models/dynamic_fieldsets/radio_field.rb",
"app/models/dynamic_fieldsets/select_field.rb",
"app/models/dynamic_fieldsets/text_field.rb",
"app/models/dynamic_fieldsets/textarea_field.rb",
"app/views/dynamic_fieldsets/fields/_disable_field_form.html.erb",
"app/views/dynamic_fieldsets/fields/_field_default_fields.html.erb",
"app/views/dynamic_fieldsets/fields/_field_html_attribute_fields.html.erb",
"app/views/dynamic_fieldsets/fields/_field_option_fields.html.erb",
"app/views/dynamic_fieldsets/fields/_form.html.erb",
"app/views/dynamic_fieldsets/fields/edit.html.erb",
"app/views/dynamic_fieldsets/fields/index.html.erb",
"app/views/dynamic_fieldsets/fields/new.html.erb",
"app/views/dynamic_fieldsets/fields/show.html.erb",
"app/views/dynamic_fieldsets/fieldset_associators/index.html.erb",
"app/views/dynamic_fieldsets/fieldset_associators/show.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_dependency_clause_fields.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_dependency_fields.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_dependency_group_fields.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/_form.html.erb",
"app/views/dynamic_fieldsets/fieldset_children/edit.html.erb",
"app/views/dynamic_fieldsets/fieldsets/_associate_child.html.erb",
"app/views/dynamic_fieldsets/fieldsets/_child.html.erb",
"app/views/dynamic_fieldsets/fieldsets/_form.html.erb",
"app/views/dynamic_fieldsets/fieldsets/children.html.erb",
"app/views/dynamic_fieldsets/fieldsets/edit.html.erb",
"app/views/dynamic_fieldsets/fieldsets/index.html.erb",
"app/views/dynamic_fieldsets/fieldsets/new.html.erb",
"app/views/dynamic_fieldsets/fieldsets/reorder.html.erb",
"app/views/dynamic_fieldsets/fieldsets/show.html.erb",
"app/views/dynamic_fieldsets/form_partials/_checkbox_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_date_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_datetime_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_input_footer.html.erb",
"app/views/dynamic_fieldsets/form_partials/_input_header.html.erb",
"app/views/dynamic_fieldsets/form_partials/_instruction_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_multiple_select_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_radio_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_select_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_text_field.html.erb",
"app/views/dynamic_fieldsets/form_partials/_textarea_field.html.erb",
"app/views/dynamic_fieldsets/shared/_javascript_watcher.html.erb",
"app/views/dynamic_fieldsets/shared/_nested_model_javascript.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_incomplete.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_incomplete_footer.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_incomplete_header.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_instruction.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_multiple_answers.html.erb",
"app/views/dynamic_fieldsets/show_partials/_show_single_answer.html.erb",
"autotest/discover.rb",
"config/.routes.rb.swp",
"config/routes.rb",
"dynamic_fieldsets.gemspec",
"lib/dynamic_fieldsets.rb",
"lib/dynamic_fieldsets/config.rb",
"lib/dynamic_fieldsets/dynamic_fieldsets_in_model.rb",
"lib/dynamic_fieldsets/engine.rb",
"lib/dynamic_fieldsets/field_with_field_options.rb",
"lib/dynamic_fieldsets/field_with_multiple_answers.rb",
"lib/dynamic_fieldsets/field_with_single_answer.rb",
"lib/dynamic_fieldsets/railtie.rb",
"lib/generators/dynamic_fieldsets/controllers_generator.rb",
"lib/generators/dynamic_fieldsets/install_generator.rb",
"lib/generators/dynamic_fieldsets/templates/config.rb",
"lib/generators/dynamic_fieldsets/templates/migrations/install_migration.rb",
"lib/generators/dynamic_fieldsets/views_generator.rb",
"spec/dummy/Rakefile",
"spec/dummy/app/controllers/application_controller.rb",
"spec/dummy/app/controllers/information_forms_controller.rb",
"spec/dummy/app/helpers/application_helper.rb",
"spec/dummy/app/helpers/information_forms_helper.rb",
"spec/dummy/app/models/information_form.rb",
"spec/dummy/app/views/information_forms/_form.html.erb",
"spec/dummy/app/views/information_forms/dynamic_view.html.erb",
"spec/dummy/app/views/information_forms/edit.html.erb",
"spec/dummy/app/views/information_forms/index.html.erb",
"spec/dummy/app/views/information_forms/new.html.erb",
"spec/dummy/app/views/information_forms/show.html.erb",
"spec/dummy/app/views/layouts/application.html.erb",
"spec/dummy/config.ru",
"spec/dummy/config/application.rb",
"spec/dummy/config/boot.rb",
"spec/dummy/config/database.yml",
"spec/dummy/config/environment.rb",
"spec/dummy/config/environments/development.rb",
"spec/dummy/config/environments/production.rb",
"spec/dummy/config/environments/test.rb",
"spec/dummy/config/initializers/backtrace_silencers.rb",
"spec/dummy/config/initializers/dynamic_fieldsets.rb",
"spec/dummy/config/initializers/inflections.rb",
"spec/dummy/config/initializers/mime_types.rb",
"spec/dummy/config/initializers/secret_token.rb",
"spec/dummy/config/initializers/session_store.rb",
"spec/dummy/config/locales/en.yml",
"spec/dummy/config/routes.rb",
"spec/dummy/db/migrate/20110727210451_create_information_forms.rb",
"spec/dummy/db/migrate/20120213211033_create_dynamic_fieldsets_tables.rb",
"spec/dummy/db/schema.rb",
"spec/dummy/features/field.feature",
"spec/dummy/features/fieldset.feature",
"spec/dummy/features/fieldset_associator.feature",
"spec/dummy/features/fieldset_children.feature",
"spec/dummy/features/javascript_tests.feature",
"spec/dummy/features/step_definitions/debugging_steps.rb",
"spec/dummy/features/step_definitions/field_steps.rb",
"spec/dummy/features/step_definitions/fieldset_associator_steps.rb",
"spec/dummy/features/step_definitions/fieldset_children_steps.rb",
"spec/dummy/features/step_definitions/fieldset_steps.rb",
"spec/dummy/features/step_definitions/javascript_steps.rb",
"spec/dummy/features/step_definitions/web_steps.rb",
"spec/dummy/features/support/env.rb",
"spec/dummy/features/support/paths.rb",
"spec/dummy/features/support/selectors.rb",
"spec/dummy/public/404.html",
"spec/dummy/public/422.html",
"spec/dummy/public/500.html",
"spec/dummy/public/favicon.ico",
"spec/dummy/public/javascripts/application.js",
"spec/dummy/public/javascripts/jquery-1.6.2.min.js",
"spec/dummy/public/javascripts/jquery-ui-1.8.15.custom.min.js",
"spec/dummy/public/javascripts/jquery-ui-nestedSortable.js",
"spec/dummy/public/javascripts/jquery.min.js",
"spec/dummy/public/stylesheets/.gitkeep",
"spec/dummy/public/stylesheets/scaffold.css",
"spec/dummy/script/rails",
"spec/dynamic_fieldsets_helper_spec.rb",
"spec/dynamic_fieldsets_in_model_spec.rb",
"spec/dynamic_fieldsets_spec.rb",
"spec/field_with_field_options_spec.rb",
"spec/field_with_multiple_answers_spec.rb",
"spec/field_with_single_answer_spec.rb",
"spec/integration/navigation_spec.rb",
"spec/models/checkbox_field_spec.rb",
"spec/models/date_field_spec.rb",
"spec/models/datetime_field_spec.rb",
"spec/models/dependency_clause_spec.rb",
"spec/models/dependency_group_spec.rb",
"spec/models/dependency_spec.rb",
"spec/models/field_default_spec.rb",
"spec/models/field_html_attribute_spec.rb",
"spec/models/field_option_spec.rb",
"spec/models/field_record_spec.rb",
"spec/models/field_spec.rb",
"spec/models/fieldset_associator_spec.rb",
"spec/models/fieldset_child_spec.rb",
"spec/models/fieldset_spec.rb",
"spec/models/instruction_field_spec.rb",
"spec/models/multiple_select_field_spec.rb",
"spec/models/radio_field_spec.rb",
"spec/models/text_field_spec.rb",
"spec/models/textarea_field_spec.rb",
"spec/spec_helper.rb",
"spec/support/dependency_group_helper.rb",
"spec/support/dependency_helper.rb",
"spec/support/field_default_helper.rb",
"spec/support/field_helper.rb",
"spec/support/field_html_attribute_helper.rb",
"spec/support/field_option_helper.rb",
"spec/support/field_record_helper.rb",
"spec/support/fieldset_associator_helper.rb",
"spec/support/fieldset_child_helper.rb",
"spec/support/fieldset_helper.rb"
]
s.homepage = "http://github.com/jeremiahishere/dynamic_fieldsets"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.15"
s.summary = "Dynamic fieldsets for rails controllers"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rails>, ["~> 3.0.7"])
s.add_development_dependency(%q<capybara>, [">= 0.4.0"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<ruby-debug19>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_development_dependency(%q<json>, [">= 0"])
s.add_development_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_development_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_development_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_development_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_development_dependency(%q<ZenTest>, [">= 0"])
s.add_development_dependency(%q<autotest-rails>, [">= 0"])
s.add_development_dependency(%q<cucumber>, [">= 0"])
s.add_development_dependency(%q<cucumber-rails>, [">= 0"])
s.add_development_dependency(%q<database_cleaner>, [">= 0"])
s.add_development_dependency(%q<yard>, ["~> 0.6.0"])
s.add_development_dependency(%q<ci_reporter>, [">= 0"])
else
s.add_dependency(%q<rails>, ["~> 3.0.7"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_dependency(%q<ZenTest>, [">= 0"])
s.add_dependency(%q<autotest-rails>, [">= 0"])
s.add_dependency(%q<cucumber>, [">= 0"])
s.add_dependency(%q<cucumber-rails>, [">= 0"])
s.add_dependency(%q<database_cleaner>, [">= 0"])
s.add_dependency(%q<yard>, ["~> 0.6.0"])
s.add_dependency(%q<ci_reporter>, [">= 0"])
end
else
s.add_dependency(%q<rails>, ["~> 3.0.7"])
s.add_dependency(%q<capybara>, [">= 0.4.0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.6.0"])
s.add_dependency(%q<rspec-rails>, ["~> 2.6.1"])
s.add_dependency(%q<shoulda>, ["~> 3.0.0"])
s.add_dependency(%q<ruby_parser>, ["~> 2.3.1"])
s.add_dependency(%q<ZenTest>, [">= 0"])
s.add_dependency(%q<autotest-rails>, [">= 0"])
s.add_dependency(%q<cucumber>, [">= 0"])
s.add_dependency(%q<cucumber-rails>, [">= 0"])
s.add_dependency(%q<database_cleaner>, [">= 0"])
s.add_dependency(%q<yard>, ["~> 0.6.0"])
s.add_dependency(%q<ci_reporter>, [">= 0"])
end
end
|
# table_catalog | postgres_to_redshift
# table_schema | public
# table_name | films
# column_name | description
# ordinal_position | 2
# column_default |
# is_nullable | YES
# data_type | character varying
# character_maximum_length | 255
# character_octet_length | 1020
# numeric_precision |
# numeric_precision_radix |
# numeric_scale |
# datetime_precision |
# interval_type |
# interval_precision |
# character_set_catalog |
# character_set_schema |
# character_set_name |
# collation_catalog |
# collation_schema |
# collation_name |
# domain_catalog |
# domain_schema |
# domain_name |
# udt_catalog | postgres_to_redshift
# udt_schema | pg_catalog
# udt_name | varchar
# scope_catalog |
# scope_schema |
# scope_name |
# maximum_cardinality |
# dtd_identifier | 2
# is_self_referencing | NO
# is_identity | NO
# identity_generation |
# identity_start |
# identity_increment |
# identity_maximum |
# identity_minimum |
# identity_cycle |
# is_generated | NEVER
# generation_expression |
# is_updatable | YES
#
class PostgresToRedshift::Column
attr_accessor :attributes
CAST_TYPES_FOR_COPY = {
"text" => "CHARACTER VARYING(65535)",
"json" => "CHARACTER VARYING(65535)",
"bytea" => "CHARACTER VARYING(65535)",
"money" => "DECIMAL(19,2)",
"oid" => "CHARACTER VARYING(65535)",
}
def initialize(attributes: )
self.attributes = attributes
end
def name
attributes["column_name"]
end
def name_for_copy
if needs_type_cast?
%Q[CAST("#{name}" AS #{data_type_for_copy}) AS #{name}]
else
%Q["#{name}"]
end
end
def data_type
attributes["data_type"]
end
def data_type_for_copy
CAST_TYPES_FOR_COPY[data_type] || data_type
end
private
def needs_type_cast?
data_type != data_type_for_copy
end
end
Allow jsonb
# table_catalog | postgres_to_redshift
# table_schema | public
# table_name | films
# column_name | description
# ordinal_position | 2
# column_default |
# is_nullable | YES
# data_type | character varying
# character_maximum_length | 255
# character_octet_length | 1020
# numeric_precision |
# numeric_precision_radix |
# numeric_scale |
# datetime_precision |
# interval_type |
# interval_precision |
# character_set_catalog |
# character_set_schema |
# character_set_name |
# collation_catalog |
# collation_schema |
# collation_name |
# domain_catalog |
# domain_schema |
# domain_name |
# udt_catalog | postgres_to_redshift
# udt_schema | pg_catalog
# udt_name | varchar
# scope_catalog |
# scope_schema |
# scope_name |
# maximum_cardinality |
# dtd_identifier | 2
# is_self_referencing | NO
# is_identity | NO
# identity_generation |
# identity_start |
# identity_increment |
# identity_maximum |
# identity_minimum |
# identity_cycle |
# is_generated | NEVER
# generation_expression |
# is_updatable | YES
#
class PostgresToRedshift::Column
attr_accessor :attributes
CAST_TYPES_FOR_COPY = {
"text" => "CHARACTER VARYING(65535)",
"json" => "CHARACTER VARYING(65535)",
"jsonb" => "CHARACTER VARYING(65535)",
"bytea" => "CHARACTER VARYING(65535)",
"money" => "DECIMAL(19,2)",
"oid" => "CHARACTER VARYING(65535)",
}
def initialize(attributes: )
self.attributes = attributes
end
def name
attributes["column_name"]
end
def name_for_copy
if needs_type_cast?
%Q[CAST("#{name}" AS #{data_type_for_copy}) AS #{name}]
else
%Q["#{name}"]
end
end
def data_type
attributes["data_type"]
end
def data_type_for_copy
CAST_TYPES_FOR_COPY[data_type] || data_type
end
private
def needs_type_cast?
data_type != data_type_for_copy
end
end
|
require "nicoquery/api/tag_search_rss"
require "nicoquery/object_mapper/tag_search_rss"
require "nicoquery/object/movie"
module NicoQuery
module Object
class TagSearch
attr_accessor :movies
[
'title',
'url',
'link',
'description',
'publish_date',
'creator',
].each do |field_name|
define_method(field_name) { @hash.meta.send field_name }
end
def initialize(tag: tag, sort: sort, order: order, page: page)
@movies = []
source = (NicoQuery::Api::TagSearchRss.new(tag: tag, sort: sort, order: order, page: page)).get
@hash = NicoQuery::ObjectMapper::TagSearchRss.new source
@hash.items.map do |item|
movie = NicoQuery::Object::Movie.new item.video_id
movie.set_tag_search_rss_source item
@movies.push movie
end
end
end
end
end
# <title>マイリスト to_test‐ニコニコ動画</title>
# <link>http://www.nicovideo.jp/mylist/38369702</link>
# <atom:link rel="self" type="application/rss+xml" href="http://www.nicovideo.jp/mylist/38369702?rss=2.0"/>
# <description></description>
# <pubDate>Sat, 17 Aug 2013 22:51:40 +0900</pubDate>
# <lastBuildDate>Sat, 17 Aug 2013 22:51:40 +0900</lastBuildDate>
# <generator>ニコニコ動画</generator>
# <dc:creator>うえおに</dc:creator>
# <language>ja-jp</language>
# <copyright>(c) niwango, inc. All rights reserved.</copyright>
# <docs>http://blogs.law.harvard.edu/tech/rss</docs>
fix bug
require "nicoquery/api/tag_search_rss"
require "nicoquery/object/tag_search"
require "nicoquery/object_mapper/tag_search_rss"
require "nicoquery/object/movie"
module NicoQuery
module Object
class TagSearch
attr_accessor :movies
[
'title',
'url',
'link',
'description',
'publish_date',
'creator',
].each do |field_name|
define_method(field_name) { @hash.meta.send field_name }
end
def initialize(tag: tag, sort: sort, order: order, page: page)
@movies = []
source = (NicoQuery::Api::TagSearchRss.new(tag: tag, sort: sort, order: order, page: page)).get
@hash = NicoQuery::ObjectMapper::TagSearchRss.new source
@hash.items.map do |item|
movie = NicoQuery::Object::Movie.new item.video_id
movie.set_tag_search_rss_source item
@movies.push movie
end
end
end
end
end
# <title>マイリスト to_test‐ニコニコ動画</title>
# <link>http://www.nicovideo.jp/mylist/38369702</link>
# <atom:link rel="self" type="application/rss+xml" href="http://www.nicovideo.jp/mylist/38369702?rss=2.0"/>
# <description></description>
# <pubDate>Sat, 17 Aug 2013 22:51:40 +0900</pubDate>
# <lastBuildDate>Sat, 17 Aug 2013 22:51:40 +0900</lastBuildDate>
# <generator>ニコニコ動画</generator>
# <dc:creator>うえおに</dc:creator>
# <language>ja-jp</language>
# <copyright>(c) niwango, inc. All rights reserved.</copyright>
# <docs>http://blogs.law.harvard.edu/tech/rss</docs>
|
#!/usr/bin/ruby
# json_stats_fetcher.rb - Publish Ostrich stats to Ganglia.
#
# The latest version is always available at:
# http://github.com/twitter/ostrich/blob/master/src/scripts/json_stats_fetcher.rb
#
require 'rubygems'
require 'getoptlong'
require 'socket'
require 'json'
require 'timeout'
require 'open-uri'
def valid_gmetric_name?(name)
# Determines if a gmetric name is valid.
#
# Ganglia is very intolerant of metric named with non-standard characters,
# where non-standard contains most everything other than letters, numbers and
# some common symbols.
#
# Returns true if the metric is a valid gmetric name, otherwise false.
if name =~ /^[A-Za-z0-9_-]+$/
return true
else
$stderr.puts "Metric <#{name}> contains invalid characters."
return false
end
end
def report_metric(name, value, units)
if not valid_gmetric_name?(name)
return
end
if $report_to_ganglia
system("gmetric -t float -n \"#{$ganglia_prefix}#{name}\" -v \"#{value}\" -u \"#{units}\" -d #{$stat_timeout}")
else
puts "#{$ganglia_prefix}#{name}=#{value} #{units}"
end
end
$report_to_ganglia = true
$ganglia_prefix = ''
$stat_timeout = 86400
$pattern = /^x-/
hostname = "localhost"
port = 9989
use_web = false
def usage(port)
puts
puts "usage: json_stats_fetcher.rb [options]"
puts "options:"
puts " -n say what I would report, but don't report it"
puts " -w use web interface"
puts " -h <hostname> connect to another host (default: localhost)"
puts " -i <pattern> ignore all stats matching pattern (default: #{$pattern.inspect})"
puts " -p <port> connect to another port (default: #{port})"
puts " -P <prefix> optional prefix for ganglia names"
puts
end
opts = GetoptLong.new(
[ '--help', GetoptLong::NO_ARGUMENT ],
[ '-n', GetoptLong::NO_ARGUMENT ],
[ '-h', GetoptLong::REQUIRED_ARGUMENT ],
[ '-i', GetoptLong::REQUIRED_ARGUMENT ],
[ '-p', GetoptLong::REQUIRED_ARGUMENT ],
[ '-P', GetoptLong::REQUIRED_ARGUMENT ],
[ '-w', GetoptLong::NO_ARGUMENT ]
)
opts.each do |opt, arg|
case opt
when '--help'
usage(port)
exit 0
when '-n'
$report_to_ganglia = false
when '-h'
hostname = arg
when '-i'
$pattern = /#{arg}/
when '-p'
port = arg.to_i
when '-P'
$ganglia_prefix = arg
when '-w'
port = 9990
use_web = true
end
end
stats_dir = "/tmp/stats-#{port}"
singleton_file = "#{stats_dir}/json_stats_fetcher_running"
Dir.mkdir(stats_dir) rescue nil
if File.exist?(singleton_file)
puts "NOT RUNNING -- #{singleton_file} exists."
puts "Kill other stranded stats checker processes and kill this file to resume."
exit 1
end
File.open(singleton_file, "w") { |f| f.write("i am running.\n") }
begin
Timeout::timeout(60) do
data = if use_web
open("http://#{hostname}:#{port}/stats#{'?reset=1' if $report_to_ganglia}").read
else
socket = TCPSocket.new(hostname, port)
socket.puts("stats/json#{' reset' if $report_to_ganglia}")
socket.gets
end
stats = JSON.parse(data)
report_metric("jvm_threads", stats["jvm"]["thread_count"], "threads")
report_metric("jvm_daemon_threads", stats["jvm"]["thread_daemon_count"], "threads")
report_metric("jvm_heap_used", stats["jvm"]["heap_used"], "bytes")
report_metric("jvm_heap_max", stats["jvm"]["heap_max"], "bytes")
stats["counters"].each do |name, value|
report_metric(name, (value.to_i rescue 0), "items")
end
stats["gauges"].each do |name, value|
report_metric(name, value, "value")
end
stats["timings"].reject { |name, val| name =~ $pattern }.each do |name, timing|
report_metric(name, (timing["average"] || 0).to_f / 1000.0, "sec")
report_metric("#{name}_stddev", (timing["standard_deviation"] || 0).to_f / 1000.0, "sec")
[:p25, :p50, :p75, :p90, :p99, :p999, :p9999].map(&:to_s).each do |bucket|
report_metric("#{name}_#{bucket}", (timing[bucket] || 0).to_f / 1000.0, "sec") if timing[bucket]
end
end
end
ensure
File.unlink(singleton_file)
end
don't report to counters/gauges that start with a pattern
#!/usr/bin/ruby
# json_stats_fetcher.rb - Publish Ostrich stats to Ganglia.
#
# The latest version is always available at:
# http://github.com/twitter/ostrich/blob/master/src/scripts/json_stats_fetcher.rb
#
require 'rubygems'
require 'getoptlong'
require 'socket'
require 'json'
require 'timeout'
require 'open-uri'
def valid_gmetric_name?(name)
# Determines if a gmetric name is valid.
#
# Ganglia is very intolerant of metric named with non-standard characters,
# where non-standard contains most everything other than letters, numbers and
# some common symbols.
#
# Returns true if the metric is a valid gmetric name, otherwise false.
if name =~ /^[A-Za-z0-9_-]+$/
return true
else
$stderr.puts "Metric <#{name}> contains invalid characters."
return false
end
end
def report_metric(name, value, units)
if not valid_gmetric_name?(name)
return
end
if $report_to_ganglia
system("gmetric -t float -n \"#{$ganglia_prefix}#{name}\" -v \"#{value}\" -u \"#{units}\" -d #{$stat_timeout}")
else
puts "#{$ganglia_prefix}#{name}=#{value} #{units}"
end
end
$report_to_ganglia = true
$ganglia_prefix = ''
$stat_timeout = 86400
$pattern = /^x-/
hostname = "localhost"
port = 9989
use_web = false
def usage(port)
puts
puts "usage: json_stats_fetcher.rb [options]"
puts "options:"
puts " -n say what I would report, but don't report it"
puts " -w use web interface"
puts " -h <hostname> connect to another host (default: localhost)"
puts " -i <pattern> ignore all stats matching pattern (default: #{$pattern.inspect})"
puts " -p <port> connect to another port (default: #{port})"
puts " -P <prefix> optional prefix for ganglia names"
puts
end
opts = GetoptLong.new(
[ '--help', GetoptLong::NO_ARGUMENT ],
[ '-n', GetoptLong::NO_ARGUMENT ],
[ '-h', GetoptLong::REQUIRED_ARGUMENT ],
[ '-i', GetoptLong::REQUIRED_ARGUMENT ],
[ '-p', GetoptLong::REQUIRED_ARGUMENT ],
[ '-P', GetoptLong::REQUIRED_ARGUMENT ],
[ '-w', GetoptLong::NO_ARGUMENT ]
)
opts.each do |opt, arg|
case opt
when '--help'
usage(port)
exit 0
when '-n'
$report_to_ganglia = false
when '-h'
hostname = arg
when '-i'
$pattern = /#{arg}/
when '-p'
port = arg.to_i
when '-P'
$ganglia_prefix = arg
when '-w'
port = 9990
use_web = true
end
end
stats_dir = "/tmp/stats-#{port}"
singleton_file = "#{stats_dir}/json_stats_fetcher_running"
Dir.mkdir(stats_dir) rescue nil
if File.exist?(singleton_file)
puts "NOT RUNNING -- #{singleton_file} exists."
puts "Kill other stranded stats checker processes and kill this file to resume."
exit 1
end
File.open(singleton_file, "w") { |f| f.write("i am running.\n") }
begin
Timeout::timeout(60) do
data = if use_web
open("http://#{hostname}:#{port}/stats#{'?reset=1' if $report_to_ganglia}").read
else
socket = TCPSocket.new(hostname, port)
socket.puts("stats/json#{' reset' if $report_to_ganglia}")
socket.gets
end
stats = JSON.parse(data)
report_metric("jvm_threads", stats["jvm"]["thread_count"], "threads")
report_metric("jvm_daemon_threads", stats["jvm"]["thread_daemon_count"], "threads")
report_metric("jvm_heap_used", stats["jvm"]["heap_used"], "bytes")
report_metric("jvm_heap_max", stats["jvm"]["heap_max"], "bytes")
stats["counters"].reject { |name, val| name =~ $pattern }.each do |name, value|
report_metric(name, (value.to_i rescue 0), "items")
end
stats["gauges"].reject { |name, val| name =~ $pattern }.each do |name, value|
report_metric(name, value, "value")
end
stats["timings"].reject { |name, val| name =~ $pattern }.each do |name, timing|
report_metric(name, (timing["average"] || 0).to_f / 1000.0, "sec")
report_metric("#{name}_stddev", (timing["standard_deviation"] || 0).to_f / 1000.0, "sec")
[:p25, :p50, :p75, :p90, :p99, :p999, :p9999].map(&:to_s).each do |bucket|
report_metric("#{name}_#{bucket}", (timing[bucket] || 0).to_f / 1000.0, "sec") if timing[bucket]
end
end
end
ensure
File.unlink(singleton_file)
end
|
module Nwmls
module ActsAsNwmlsListing
extend ActiveSupport::Concern
BOOLEAN_ATTRIBUTES =
[
:bus_line_nearby,
:publish_to_internet,
:publish_to_internet,
:senior_exemption,
:show_address_to_public,
:show_map_link_to_public,
]
module ClassMethods
def acts_as_nwmls_listing(options = {})
include ActiveModel::Model
cattr_accessor :property_type
self.property_type = options[:property_type]
cattr_accessor :attribute_mappings
self.attribute_mappings = options[:attribute_mappings]
def self.attributes
self.attribute_mappings.values.collect { |v| v.underscore.parameterize('_').to_sym }
end
def self.encoded_attributes
[:status]
end
def self.processed_attributes
BOOLEAN_ATTRIBUTES + self.encoded_attributes
end
def self.readable_attributes
self.attributes - self.processed_attributes
end
attr_writer(*self.attributes)
attr_reader(*self.readable_attributes)
def self.find(conditions = {}, filters = [])
if conditions.is_a?(Hash)
conditions.merge!(:property_type => self.property_type)
end
super(conditions, filters)
end
end
end
BOOLEAN_ATTRIBUTES.each do |method|
define_method method do
case instance_variable_get("@#{method}")
when 'Y' then true
when 'N' then false
end
end
alias_method "#{method}?", method
end
# def publish_to_internet
# @publish_to_internet == 'Y'
# end
# alias_method :publish_to_internet?, :publish_to_internet
#
end
end
duplicate key in array
module Nwmls
module ActsAsNwmlsListing
extend ActiveSupport::Concern
BOOLEAN_ATTRIBUTES =
[
:bus_line_nearby,
:publish_to_internet,
:senior_exemption,
:show_address_to_public,
:show_map_link_to_public,
]
module ClassMethods
def acts_as_nwmls_listing(options = {})
include ActiveModel::Model
cattr_accessor :property_type
self.property_type = options[:property_type]
cattr_accessor :attribute_mappings
self.attribute_mappings = options[:attribute_mappings]
def self.attributes
self.attribute_mappings.values.collect { |v| v.underscore.parameterize('_').to_sym }
end
def self.encoded_attributes
[:status]
end
def self.processed_attributes
BOOLEAN_ATTRIBUTES + self.encoded_attributes
end
def self.readable_attributes
self.attributes - self.processed_attributes
end
attr_writer(*self.attributes)
attr_reader(*self.readable_attributes)
def self.find(conditions = {}, filters = [])
if conditions.is_a?(Hash)
conditions.merge!(:property_type => self.property_type)
end
super(conditions, filters)
end
end
end
BOOLEAN_ATTRIBUTES.each do |method|
define_method method do
case instance_variable_get("@#{method}")
when 'Y' then true
when 'N' then false
end
end
alias_method "#{method}?", method
end
# def publish_to_internet
# @publish_to_internet == 'Y'
# end
# alias_method :publish_to_internet?, :publish_to_internet
#
end
end
|
module PullRequest
module Create
VERSION = "0.1.2"
end
end
:metal: Bump version to v1.0.0
module PullRequest
module Create
VERSION = "1.0.0"
end
end
|
module ObjectPatch
module Operations
class Add
def initialize(patch_hash)
@path = ObjectPatch::Pointer.decode(patch_hash.fetch("path"))
@value = patch_hash.fetch("value", nil)
end
def apply(source_hash)
recursive_set(source_hash, @path, @value)
end
def recursive_set(obj, path, new_value)
raise ArgumentError unless key = path.shift
key_type = obj.class
key = key.to_i if key_type == Array && key != "-"
raise ArgumentError if key_type == Array && key.to_s == "-" || obj.size >= key
raise ArgumentError if key_type == Hash && !obj.keys.include?(key)
if path.empty?
if key == "-"
obj.push(new_value)
else
obj[key] = new_value
end
else
recursive_set(obj[key], path, test_value)
end
obj
end
end
end
end
Removed unecessary line
module ObjectPatch
module Operations
class Add
def initialize(patch_hash)
@path = ObjectPatch::Pointer.decode(patch_hash.fetch("path"))
@value = patch_hash.fetch("value", nil)
end
def apply(source_hash)
recursive_set(source_hash, @path, @value)
end
def recursive_set(obj, path, new_value)
raise ArgumentError unless key = path.shift
key_type = obj.class
raise ArgumentError if key_type == Array && key.to_s == "-" || obj.size >= key
raise ArgumentError if key_type == Hash && !obj.keys.include?(key)
if path.empty?
if key == "-"
obj.push(new_value)
else
obj[key] = new_value
end
else
recursive_set(obj[key], path, test_value)
end
obj
end
end
end
end
|
module Octokit
class Client
# Methods for the Repositories API
#
# @see https://developer.github.com/v3/repos/
module Repositories
# Check if a repository exists
#
# @see https://developer.github.com/v3/repos/#get
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Boolean]
def repository?(repo, options = {})
!!repository(repo, options)
rescue Octokit::InvalidRepository
false
rescue Octokit::NotFound
false
end
# Get a single repository
#
# @see https://developer.github.com/v3/repos/#get
# @see https://developer.github.com/v3/licenses/#get-a-repositorys-license
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Sawyer::Resource] Repository information
def repository(repo, options = {})
get Repository.path(repo), options
end
alias :repo :repository
# Edit a repository
#
# @see https://developer.github.com/v3/repos/#edit
# @param repo [String, Hash, Repository] A GitHub repository
# @param options [Hash] Repository information to update
# @option options [String] :name Name of the repo
# @option options [String] :description Description of the repo
# @option options [String] :homepage Home page of the repo
# @option options [String] :private `true` makes the repository private, and `false` makes it public.
# @option options [String] :has_issues `true` enables issues for this repo, `false` disables issues.
# @option options [String] :has_wiki `true` enables wiki for this repo, `false` disables wiki.
# @option options [Boolean] :is_template `true` makes the repository a template, `false` makes it not a template.
# @option options [String] :has_downloads `true` enables downloads for this repo, `false` disables downloads.
# @option options [String] :default_branch Update the default branch for this repository.
# @return [Sawyer::Resource] Repository information
def edit_repository(repo, options = {})
repo = Repository.new(repo)
if options.include? :is_template
options = ensure_api_media_type(:template_repositories, options)
end
options[:name] ||= repo.name
patch "repos/#{repo}", options
end
alias :edit :edit_repository
alias :update_repository :edit_repository
alias :update :edit_repository
# List user repositories
#
# If user is not supplied, repositories for the current
# authenticated user are returned.
#
# @note If the user provided is a GitHub organization, only the
# organization's public repositories will be listed. For retrieving
# organization repositories the {Organizations#organization_repositories}
# method should be used instead.
# @see https://developer.github.com/v3/repos/#list-your-repositories
# @see https://developer.github.com/v3/repos/#list-user-repositories
# @param user [Integer, String] Optional GitHub user login or id for which
# to list repos.
# @return [Array<Sawyer::Resource>] List of repositories
def repositories(user=nil, options = {})
paginate "#{User.path user}/repos", options
end
alias :list_repositories :repositories
alias :list_repos :repositories
alias :repos :repositories
# List all repositories
#
# This provides a dump of every repository, in the order that they were
# created.
#
# @see https://developer.github.com/v3/repos/#list-all-public-repositories
#
# @param options [Hash] Optional options
# @option options [Integer] :since The integer ID of the last Repository
# that you’ve seen.
# @return [Array<Sawyer::Resource>] List of repositories.
def all_repositories(options = {})
paginate 'repositories', options
end
# Star a repository
#
# @param repo [String, Hash, Repository] A GitHub repository
# @return [Boolean] `true` if successfully starred
# @see https://developer.github.com/v3/activity/starring/#star-a-repository
def star(repo, options = {})
boolean_from_response :put, "user/starred/#{Repository.new(repo)}", options
end
# Unstar a repository
#
# @param repo [String, Hash, Repository] A GitHub repository
# @return [Boolean] `true` if successfully unstarred
# @see https://developer.github.com/v3/activity/starring/#unstar-a-repository
def unstar(repo, options = {})
boolean_from_response :delete, "user/starred/#{Repository.new(repo)}", options
end
# Watch a repository
#
# @param repo [String, Hash, Repository] A GitHub repository
# @return [Boolean] `true` if successfully watched
# @deprecated Use #star instead
# @see https://developer.github.com/v3/activity/watching/#watch-a-repository-legacy
def watch(repo, options = {})
boolean_from_response :put, "user/watched/#{Repository.new(repo)}", options
end
# Unwatch a repository
#
# @param repo [String, Hash, Repository] A GitHub repository
# @return [Boolean] `true` if successfully unwatched
# @deprecated Use #unstar instead
# @see https://developer.github.com/v3/activity/watching/#stop-watching-a-repository-legacy
def unwatch(repo, options = {})
boolean_from_response :delete, "user/watched/#{Repository.new(repo)}", options
end
# Fork a repository
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Sawyer::Resource] Repository info for the new fork
# @see https://developer.github.com/v3/repos/forks/#create-a-fork
def fork(repo, options = {})
post "#{Repository.path repo}/forks", options
end
# Create a repository for a user or organization
#
# @param name [String] Name of the new repo
# @option options [String] :description Description of the repo
# @option options [String] :homepage Home page of the repo
# @option options [String] :private `true` makes the repository private, and `false` makes it public.
# @option options [String] :has_issues `true` enables issues for this repo, `false` disables issues.
# @option options [String] :has_wiki `true` enables wiki for this repo, `false` disables wiki.
# @option options [Boolean] :is_template `true` makes this repo available as a template repository, `false` to prevent it.
# @option options [String] :has_downloads `true` enables downloads for this repo, `false` disables downloads.
# @option options [String] :organization Short name for the org under which to create the repo.
# @option options [Integer] :team_id The id of the team that will be granted access to this repository. This is only valid when creating a repo in an organization.
# @option options [Boolean] :auto_init `true` to create an initial commit with empty README. Default is `false`.
# @option options [String] :gitignore_template Desired language or platform .gitignore template to apply. Ignored if auto_init parameter is not provided.
# @return [Sawyer::Resource] Repository info for the new repository
# @see https://developer.github.com/v3/repos/#create
def create_repository(name, options = {})
opts = options.dup
organization = opts.delete :organization
opts.merge! :name => name
if opts.include? :is_template
opts = ensure_api_media_type(:template_repositories, opts)
end
if organization.nil?
post 'user/repos', opts
else
post "#{Organization.path organization}/repos", opts
end
end
alias :create_repo :create_repository
alias :create :create_repository
# Delete repository
#
# Note: If OAuth is used, 'delete_repo' scope is required
#
# @see https://developer.github.com/v3/repos/#delete-a-repository
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Boolean] `true` if repository was deleted
def delete_repository(repo, options = {})
boolean_from_response :delete, Repository.path(repo), options
end
alias :delete_repo :delete_repository
# Transfer repository
#
# Transfer a repository owned by your organization
#
# @see https://developer.github.com/v3/repos/#transfer-a-repository
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @param new_owner [String] The username or organization name the repository will be transferred to.
# @param options [Array<Integer>] :team_ids ID of the team or teams to add to the repository. Teams can only be added to organization-owned repositories.
# @return [Sawyer::Resource] Repository info for the transferred repository
def transfer_repository(repo, new_owner, options = {})
options = ensure_api_media_type(:transfer_repository, options)
post "#{Repository.path repo}/transfer", options.merge({ new_owner: new_owner })
end
alias :transfer_repo :transfer_repository
# Create a repository for a user or organization generated from a template repository
#
# @param repo [Integer, String, Hash, Repository] A GitHub template repository
# @param name [String] Name of the new repo
# @option options [String] :owner Organization or user who the new repository will belong to.
# @option options [String] :description Description of the repo
# @option options [String] :private `true` makes the repository private, and `false` makes it public.
# @option options [Boolean] :include_all_branches `true` copies all branches from the template repository, `false` (default) makes it only copy the master branch.
# @return [Sawyer::Resource] Repository info for the new repository
def create_repository_from_template(repo, name, options = {})
options.merge! :name => name
options = ensure_api_media_type(:template_repositories, options)
post "#{Repository.path repo}/generate", options
end
alias :create_repo_from_template :create_repository_from_template
# Hide a public repository
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Sawyer::Resource] Updated repository info
def set_private(repo, options = {})
# GitHub Api for setting private updated to use private attr, rather than public
update_repository repo, options.merge({ :private => true })
end
# Unhide a private repository
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Sawyer::Resource] Updated repository info
def set_public(repo, options = {})
# GitHub Api for setting private updated to use private attr, rather than public
update_repository repo, options.merge({ :private => false })
end
# Get deploy keys on a repo
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Array<Sawyer::Resource>] Array of hashes representing deploy keys.
# @see https://developer.github.com/v3/repos/keys/#list-deploy-keys
# @example
# @client.deploy_keys('octokit/octokit.rb')
# @example
# @client.list_deploy_keys('octokit/octokit.rb')
def deploy_keys(repo, options = {})
paginate "#{Repository.path repo}/keys", options
end
alias :list_deploy_keys :deploy_keys
# Get a single deploy key for a repo
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param id [Integer] Deploy key ID.
# @return [Sawyer::Resource] Deploy key.
# @see https://developer.github.com/v3/repos/keys/#get-a-deploy-key
# @example
# @client.deploy_key('octokit/octokit.rb', 8675309)
def deploy_key(repo, id, options={})
get "#{Repository.path repo}/keys/#{id}", options
end
# Add deploy key to a repo
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param title [String] Title reference for the deploy key.
# @param key [String] Public key.
# @return [Sawyer::Resource] Hash representing newly added key.
# @see https://developer.github.com/v3/repos/keys/#add-a-new-deploy-key
# @example
# @client.add_deploy_key('octokit/octokit.rb', 'Staging server', 'ssh-rsa AAA...')
def add_deploy_key(repo, title, key, options = {})
post "#{Repository.path repo}/keys", options.merge(:title => title, :key => key)
end
# Edit a deploy key
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param id [Integer] Deploy key ID.
# @param options [Hash] Attributes to edit.
# @option title [String] Key title.
# @option key [String] Public key.
# @return [Sawyer::Resource] Updated deploy key.
# @deprecated This method is no longer supported in the API
# @see https://developer.github.com/changes/2014-02-24-finer-grained-scopes-for-ssh-keys/
# @see https://developer.github.com/v3/repos/keys/#edit-a-deploy-key
# @example Update the key for a deploy key.
# @client.edit_deploy_key('octokit/octokit.rb', 8675309, :key => 'ssh-rsa BBB...')
# @example
# @client.update_deploy_key('octokit/octokit.rb', 8675309, :title => 'Uber', :key => 'ssh-rsa BBB...'))
def edit_deploy_key(repo, id, options)
patch "#{Repository.path repo}/keys/#{id}", options
end
alias :update_deploy_key :edit_deploy_key
# Remove deploy key from a repo
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param id [Integer] Id of the deploy key to remove.
# @return [Boolean] True if key removed, false otherwise.
# @see https://developer.github.com/v3/repos/keys/#remove-a-deploy-key
# @example
# @client.remove_deploy_key('octokit/octokit.rb', 100000)
def remove_deploy_key(repo, id, options = {})
boolean_from_response :delete, "#{Repository.path repo}/keys/#{id}", options
end
# List collaborators
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @option options [String] :affiliation Filters the return array by affiliation.
# Can be one of: <tt>outside</tt>, <tt>direct</tt>, or <tt>all</tt>.
# If not specified, defaults to <tt>all</tt>
# @return [Array<Sawyer::Resource>] Array of hashes representing collaborating users.
# @see https://developer.github.com/v3/repos/collaborators/#list-collaborators
# @example
# Octokit.collaborators('octokit/octokit.rb')
# @example
# Octokit.collabs('octokit/octokit.rb')
# @example
# @client.collabs('octokit/octokit.rb')
def collaborators(repo, options = {})
paginate "#{Repository.path repo}/collaborators", options
end
alias :collabs :collaborators
# Add collaborator to repo
#
# This can also be used to update the permission of an existing collaborator
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param collaborator [String] Collaborator GitHub username to add.
# @option options [String] :permission The permission to grant the collaborator.
# Only valid on organization-owned repositories.
# Can be one of: <tt>pull</tt>, <tt>push</tt>, or <tt>admin</tt>.
# If not specified, defaults to <tt>push</tt>
# @return [Boolean] True if collaborator added, false otherwise.
# @see https://developer.github.com/v3/repos/collaborators/#add-user-as-a-collaborator
# @example
# @client.add_collaborator('octokit/octokit.rb', 'holman')
# @example
# @client.add_collab('octokit/octokit.rb', 'holman')
# @example Add a collaborator with admin permissions
# @client.add_collaborator('octokit/octokit.rb', 'holman', permission: 'admin')
def add_collaborator(repo, collaborator, options = {})
boolean_from_response :put, "#{Repository.path repo}/collaborators/#{collaborator}", options
end
alias :add_collab :add_collaborator
# Remove collaborator from repo.
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param collaborator [String] Collaborator GitHub username to remove.
# @return [Boolean] True if collaborator removed, false otherwise.
# @see https://developer.github.com/v3/repos/collaborators/#remove-user-as-a-collaborator
# @example
# @client.remove_collaborator('octokit/octokit.rb', 'holman')
# @example
# @client.remove_collab('octokit/octokit.rb', 'holman')
def remove_collaborator(repo, collaborator, options = {})
boolean_from_response :delete, "#{Repository.path repo}/collaborators/#{collaborator}", options
end
alias :remove_collab :remove_collaborator
# Checks if a user is a collaborator for a repo.
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param collaborator [String] Collaborator GitHub username to check.
# @return [Boolean] True if user is a collaborator, false otherwise.
# @see https://developer.github.com/v3/repos/collaborators/#check-if-a-user-is-a-collaborator
# @example
# @client.collaborator?('octokit/octokit.rb', 'holman')
def collaborator?(repo, collaborator, options={})
boolean_from_response :get, "#{Repository.path repo}/collaborators/#{collaborator}", options
end
# Get a user's permission level for a repo.
#
# Requires authenticated client
#
# @return [Sawyer::Resource] Hash representing the user's permission level for the given repository
# @see https://developer.github.com/v3/repos/collaborators/#review-a-users-permission-level
# @example
# @client.permission_level('octokit/octokit.rb', 'lizzhale')
def permission_level(repo, collaborator, options={})
get "#{Repository.path repo}/collaborators/#{collaborator}/permission", options
end
# List teams for a repo
#
# Requires authenticated client that is an owner or collaborator of the repo.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing teams.
# @see https://developer.github.com/v3/repos/#list-teams
# @example
# @client.repository_teams('octokit/pengwynn')
# @example
# @client.repo_teams('octokit/pengwynn')
# @example
# @client.teams('octokit/pengwynn')
def repository_teams(repo, options = {})
paginate "#{Repository.path repo}/teams", options
end
alias :repo_teams :repository_teams
alias :teams :repository_teams
# List all topics for a repository
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Sawyer::Resource] representing the topics for given repo
# @see https://developer.github.com/v3/repos/#list-all-topics-for-a-repository
# @example List topics for octokit/octokit.rb
# Octokit.topics('octokit/octokit.rb')
# @example List topics for octokit/octokit.rb
# client.topics('octokit/octokit.rb')
def topics(repo, options = {})
opts = ensure_api_media_type(:topics, options)
paginate "#{Repository.path repo}/topics", opts
end
# Replace all topics for a repository
#
# Requires authenticated client.
#
# @param repo [Integer, String, Repository, Hash] A Github repository
# @param names [Array] An array of topics to add to the repository.
# @return [Sawyer::Resource] representing the replaced topics for given repo
# @see https://developer.github.com/v3/repos/#replace-all-topics-for-a-repository
# @example Replace topics for octokit/octokit.rb
# client.replace_all_topics('octokit/octokit.rb', ['octocat', 'atom', 'electron', 'API'])
# @example Clear all topics for octokit/octokit.rb
# client.replace_all_topics('octokit/octokit.rb', [])
def replace_all_topics(repo, names, options = {})
opts = ensure_api_media_type(:topics, options)
put "#{Repository.path repo}/topics", opts.merge(:names => names)
end
# List contributors to a repo
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param anon [Boolean] Set true to include anonymous contributors.
# @return [Array<Sawyer::Resource>] Array of hashes representing users.
# @see https://developer.github.com/v3/repos/#list-contributors
# @example
# Octokit.contributors('octokit/octokit.rb', true)
# @example
# Octokit.contribs('octokit/octokit.rb')
# @example
# @client.contribs('octokit/octokit.rb')
def contributors(repo, anon = nil, options = {})
options[:anon] = 1 if anon.to_s[/1|true/]
paginate "#{Repository.path repo}/contributors", options
end
alias :contribs :contributors
# List stargazers of a repo
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing users.
# @see https://developer.github.com/v3/activity/starring/#list-stargazers
# @example
# Octokit.stargazers('octokit/octokit.rb')
# @example
# @client.stargazers('octokit/octokit.rb')
def stargazers(repo, options = {})
paginate "#{Repository.path repo}/stargazers", options
end
# @deprecated Use {#stargazers} instead
#
# List watchers of repo.
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing users.
# @see https://developer.github.com/v3/repos/watching/#list-watchers
# @example
# Octokit.watchers('octokit/octokit.rb')
# @example
# @client.watchers('octokit/octokit.rb')
def watchers(repo, options = {})
paginate "#{Repository.path repo}/watchers", options
end
# List forks
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing repos.
# @see https://developer.github.com/v3/repos/forks/#list-forks
# @example
# Octokit.forks('octokit/octokit.rb')
# @example
# Octokit.network('octokit/octokit.rb')
# @example
# @client.forks('octokit/octokit.rb')
def forks(repo, options = {})
paginate "#{Repository.path repo}/forks", options
end
alias :network :forks
# List languages of code in the repo.
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of Hashes representing languages.
# @see https://developer.github.com/v3/repos/#list-languages
# @example
# Octokit.languages('octokit/octokit.rb')
# @example
# @client.languages('octokit/octokit.rb')
def languages(repo, options = {})
paginate "#{Repository.path repo}/languages", options
end
# List tags
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing tags.
# @see https://developer.github.com/v3/repos/#list-tags
# @example
# Octokit.tags('octokit/octokit.rb')
# @example
# @client.tags('octokit/octokit.rb')
def tags(repo, options = {})
paginate "#{Repository.path repo}/tags", options
end
# List branches
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing branches.
# @see https://developer.github.com/v3/repos/#list-branches
# @example
# Octokit.branches('octokit/octokit.rb')
# @example
# @client.branches('octokit/octokit.rb')
def branches(repo, options = {})
paginate "#{Repository.path repo}/branches", options
end
# Get a single branch from a repository
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param branch [String] Branch name
# @return [Sawyer::Resource] The branch requested, if it exists
# @see https://developer.github.com/v3/repos/#get-branch
# @example Get branch 'master` from octokit/octokit.rb
# Octokit.branch("octokit/octokit.rb", "master")
def branch(repo, branch, options = {})
get "#{Repository.path repo}/branches/#{branch}", options
end
alias :get_branch :branch
# Lock a single branch from a repository
#
# Requires authenticated client
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param branch [String] Branch name
# @option options [Hash] :required_status_checks If not null, the following keys are required:
# <tt>:enforce_admins [boolean] Enforce required status checks for repository administrators.</tt>
# <tt>:strict [boolean] Require branches to be up to date before merging.</tt>
# <tt>:contexts [Array] The list of status checks to require in order to merge into this branch</tt>
#
# @option options [Hash] :restrictions If not null, the following keys are required:
# <tt>:users [Array] The list of user logins with push access</tt>
# <tt>:teams [Array] The list of team slugs with push access</tt>.
#
# Teams and users restrictions are only available for organization-owned repositories.
# @return [Sawyer::Resource] The protected branch
# @see https://developer.github.com/v3/repos/#enabling-and-disabling-branch-protection
# @example
# @client.protect_branch('octokit/octokit.rb', 'master', foo)
def protect_branch(repo, branch, options = {})
opts = ensure_api_media_type(:branch_protection, options)
opts[:restrictions] ||= nil
opts[:required_status_checks] ||= nil
put "#{Repository.path repo}/branches/#{branch}/protection", opts
end
# Get branch protection summary
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param branch [String] Branch name
# @return [Sawyer::Resource, nil] Branch protection summary or nil if the branch
# is not protected
# @see https://developer.github.com/v3/repos/branches/#get-branch-protection
# @example
# @client.branch_protection('octokit/octokit.rb', 'master')
def branch_protection(repo, branch, options = {})
opts = ensure_api_media_type(:branch_protection, options)
begin
get "#{Repository.path repo}/branches/#{branch}/protection", opts
rescue Octokit::BranchNotProtected
nil
end
end
# Unlock a single branch from a repository
#
# Requires authenticated client
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param branch [String] Branch name
# @return [Sawyer::Resource] The unprotected branch
# @see https://developer.github.com/v3/repos/#enabling-and-disabling-branch-protection
# @example
# @client.unprotect_branch('octokit/octokit.rb', 'master')
def unprotect_branch(repo, branch, options = {})
opts = ensure_api_media_type(:branch_protection, options)
boolean_from_response :delete, "#{Repository.path repo}/branches/#{branch}/protection", opts
end
# List users available for assigning to issues.
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing users.
# @see https://developer.github.com/v3/issues/assignees/#list-assignees
# @example
# Octokit.repository_assignees('octokit/octokit.rb')
# @example
# Octokit.repo_assignees('octokit/octokit.rb')
# @example
# @client.repository_assignees('octokit/octokit.rb')
def repository_assignees(repo, options = {})
paginate "#{Repository.path repo}/assignees", options
end
alias :repo_assignees :repository_assignees
# Check to see if a particular user is an assignee for a repository.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param assignee [String] User login to check
# @return [Boolean] True if assignable on project, false otherwise.
# @see https://developer.github.com/v3/issues/assignees/#check-assignee
# @example
# Octokit.check_assignee('octokit/octokit.rb', 'andrew')
def check_assignee(repo, assignee, options = {})
boolean_from_response :get, "#{Repository.path repo}/assignees/#{assignee}", options
end
# List watchers subscribing to notifications for a repo
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of users watching.
# @see https://developer.github.com/v3/activity/watching/#list-watchers
# @example
# @client.subscribers("octokit/octokit.rb")
def subscribers(repo, options = {})
paginate "#{Repository.path repo}/subscribers", options
end
# Get a repository subscription
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Sawyer::Resource] Repository subscription.
# @see https://developer.github.com/v3/activity/watching/#get-a-repository-subscription
# @example
# @client.subscription("octokit/octokit.rb")
def subscription(repo, options = {})
get "#{Repository.path repo}/subscription", options
end
# Update repository subscription
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param options [Hash]
#
# @option options [Boolean] :subscribed Determines if notifications
# should be received from this repository.
# @option options [Boolean] :ignored Deterimines if all notifications
# should be blocked from this repository.
# @return [Sawyer::Resource] Updated repository subscription.
# @see https://developer.github.com/v3/activity/watching/#set-a-repository-subscription
# @example Subscribe to notifications for a repository
# @client.update_subscription("octokit/octokit.rb", {subscribed: true})
def update_subscription(repo, options = {})
put "#{Repository.path repo}/subscription", options
end
# Delete a repository subscription
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Boolean] True if subscription deleted, false otherwise.
# @see https://developer.github.com/v3/activity/watching/#delete-a-repository-subscription
#
# @example
# @client.delete_subscription("octokit/octokit.rb")
def delete_subscription(repo, options = {})
boolean_from_response :delete, "#{Repository.path repo}/subscription", options
end
end
end
end
Fixes docs link for 'PATCH /repos/:owner/:repo'.
The link https://developer.github.com/v3/repos/#edit does not exist anymore.
Instead it is now https://developer.github.com/v3/repos/#update-a-repository.
module Octokit
class Client
# Methods for the Repositories API
#
# @see https://developer.github.com/v3/repos/
module Repositories
# Check if a repository exists
#
# @see https://developer.github.com/v3/repos/#get
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Boolean]
def repository?(repo, options = {})
!!repository(repo, options)
rescue Octokit::InvalidRepository
false
rescue Octokit::NotFound
false
end
# Get a single repository
#
# @see https://developer.github.com/v3/repos/#get
# @see https://developer.github.com/v3/licenses/#get-a-repositorys-license
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Sawyer::Resource] Repository information
def repository(repo, options = {})
get Repository.path(repo), options
end
alias :repo :repository
# Edit a repository
#
# @see https://developer.github.com/v3/repos/#update-a-repository
# @param repo [String, Hash, Repository] A GitHub repository
# @param options [Hash] Repository information to update
# @option options [String] :name Name of the repo
# @option options [String] :description Description of the repo
# @option options [String] :homepage Home page of the repo
# @option options [String] :private `true` makes the repository private, and `false` makes it public.
# @option options [String] :has_issues `true` enables issues for this repo, `false` disables issues.
# @option options [String] :has_wiki `true` enables wiki for this repo, `false` disables wiki.
# @option options [Boolean] :is_template `true` makes the repository a template, `false` makes it not a template.
# @option options [String] :has_downloads `true` enables downloads for this repo, `false` disables downloads.
# @option options [String] :default_branch Update the default branch for this repository.
# @return [Sawyer::Resource] Repository information
def edit_repository(repo, options = {})
repo = Repository.new(repo)
if options.include? :is_template
options = ensure_api_media_type(:template_repositories, options)
end
options[:name] ||= repo.name
patch "repos/#{repo}", options
end
alias :edit :edit_repository
alias :update_repository :edit_repository
alias :update :edit_repository
# List user repositories
#
# If user is not supplied, repositories for the current
# authenticated user are returned.
#
# @note If the user provided is a GitHub organization, only the
# organization's public repositories will be listed. For retrieving
# organization repositories the {Organizations#organization_repositories}
# method should be used instead.
# @see https://developer.github.com/v3/repos/#list-your-repositories
# @see https://developer.github.com/v3/repos/#list-user-repositories
# @param user [Integer, String] Optional GitHub user login or id for which
# to list repos.
# @return [Array<Sawyer::Resource>] List of repositories
def repositories(user=nil, options = {})
paginate "#{User.path user}/repos", options
end
alias :list_repositories :repositories
alias :list_repos :repositories
alias :repos :repositories
# List all repositories
#
# This provides a dump of every repository, in the order that they were
# created.
#
# @see https://developer.github.com/v3/repos/#list-all-public-repositories
#
# @param options [Hash] Optional options
# @option options [Integer] :since The integer ID of the last Repository
# that you’ve seen.
# @return [Array<Sawyer::Resource>] List of repositories.
def all_repositories(options = {})
paginate 'repositories', options
end
# Star a repository
#
# @param repo [String, Hash, Repository] A GitHub repository
# @return [Boolean] `true` if successfully starred
# @see https://developer.github.com/v3/activity/starring/#star-a-repository
def star(repo, options = {})
boolean_from_response :put, "user/starred/#{Repository.new(repo)}", options
end
# Unstar a repository
#
# @param repo [String, Hash, Repository] A GitHub repository
# @return [Boolean] `true` if successfully unstarred
# @see https://developer.github.com/v3/activity/starring/#unstar-a-repository
def unstar(repo, options = {})
boolean_from_response :delete, "user/starred/#{Repository.new(repo)}", options
end
# Watch a repository
#
# @param repo [String, Hash, Repository] A GitHub repository
# @return [Boolean] `true` if successfully watched
# @deprecated Use #star instead
# @see https://developer.github.com/v3/activity/watching/#watch-a-repository-legacy
def watch(repo, options = {})
boolean_from_response :put, "user/watched/#{Repository.new(repo)}", options
end
# Unwatch a repository
#
# @param repo [String, Hash, Repository] A GitHub repository
# @return [Boolean] `true` if successfully unwatched
# @deprecated Use #unstar instead
# @see https://developer.github.com/v3/activity/watching/#stop-watching-a-repository-legacy
def unwatch(repo, options = {})
boolean_from_response :delete, "user/watched/#{Repository.new(repo)}", options
end
# Fork a repository
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Sawyer::Resource] Repository info for the new fork
# @see https://developer.github.com/v3/repos/forks/#create-a-fork
def fork(repo, options = {})
post "#{Repository.path repo}/forks", options
end
# Create a repository for a user or organization
#
# @param name [String] Name of the new repo
# @option options [String] :description Description of the repo
# @option options [String] :homepage Home page of the repo
# @option options [String] :private `true` makes the repository private, and `false` makes it public.
# @option options [String] :has_issues `true` enables issues for this repo, `false` disables issues.
# @option options [String] :has_wiki `true` enables wiki for this repo, `false` disables wiki.
# @option options [Boolean] :is_template `true` makes this repo available as a template repository, `false` to prevent it.
# @option options [String] :has_downloads `true` enables downloads for this repo, `false` disables downloads.
# @option options [String] :organization Short name for the org under which to create the repo.
# @option options [Integer] :team_id The id of the team that will be granted access to this repository. This is only valid when creating a repo in an organization.
# @option options [Boolean] :auto_init `true` to create an initial commit with empty README. Default is `false`.
# @option options [String] :gitignore_template Desired language or platform .gitignore template to apply. Ignored if auto_init parameter is not provided.
# @return [Sawyer::Resource] Repository info for the new repository
# @see https://developer.github.com/v3/repos/#create
def create_repository(name, options = {})
opts = options.dup
organization = opts.delete :organization
opts.merge! :name => name
if opts.include? :is_template
opts = ensure_api_media_type(:template_repositories, opts)
end
if organization.nil?
post 'user/repos', opts
else
post "#{Organization.path organization}/repos", opts
end
end
alias :create_repo :create_repository
alias :create :create_repository
# Delete repository
#
# Note: If OAuth is used, 'delete_repo' scope is required
#
# @see https://developer.github.com/v3/repos/#delete-a-repository
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Boolean] `true` if repository was deleted
def delete_repository(repo, options = {})
boolean_from_response :delete, Repository.path(repo), options
end
alias :delete_repo :delete_repository
# Transfer repository
#
# Transfer a repository owned by your organization
#
# @see https://developer.github.com/v3/repos/#transfer-a-repository
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @param new_owner [String] The username or organization name the repository will be transferred to.
# @param options [Array<Integer>] :team_ids ID of the team or teams to add to the repository. Teams can only be added to organization-owned repositories.
# @return [Sawyer::Resource] Repository info for the transferred repository
def transfer_repository(repo, new_owner, options = {})
options = ensure_api_media_type(:transfer_repository, options)
post "#{Repository.path repo}/transfer", options.merge({ new_owner: new_owner })
end
alias :transfer_repo :transfer_repository
# Create a repository for a user or organization generated from a template repository
#
# @param repo [Integer, String, Hash, Repository] A GitHub template repository
# @param name [String] Name of the new repo
# @option options [String] :owner Organization or user who the new repository will belong to.
# @option options [String] :description Description of the repo
# @option options [String] :private `true` makes the repository private, and `false` makes it public.
# @option options [Boolean] :include_all_branches `true` copies all branches from the template repository, `false` (default) makes it only copy the master branch.
# @return [Sawyer::Resource] Repository info for the new repository
def create_repository_from_template(repo, name, options = {})
options.merge! :name => name
options = ensure_api_media_type(:template_repositories, options)
post "#{Repository.path repo}/generate", options
end
alias :create_repo_from_template :create_repository_from_template
# Hide a public repository
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Sawyer::Resource] Updated repository info
def set_private(repo, options = {})
# GitHub Api for setting private updated to use private attr, rather than public
update_repository repo, options.merge({ :private => true })
end
# Unhide a private repository
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Sawyer::Resource] Updated repository info
def set_public(repo, options = {})
# GitHub Api for setting private updated to use private attr, rather than public
update_repository repo, options.merge({ :private => false })
end
# Get deploy keys on a repo
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Array<Sawyer::Resource>] Array of hashes representing deploy keys.
# @see https://developer.github.com/v3/repos/keys/#list-deploy-keys
# @example
# @client.deploy_keys('octokit/octokit.rb')
# @example
# @client.list_deploy_keys('octokit/octokit.rb')
def deploy_keys(repo, options = {})
paginate "#{Repository.path repo}/keys", options
end
alias :list_deploy_keys :deploy_keys
# Get a single deploy key for a repo
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param id [Integer] Deploy key ID.
# @return [Sawyer::Resource] Deploy key.
# @see https://developer.github.com/v3/repos/keys/#get-a-deploy-key
# @example
# @client.deploy_key('octokit/octokit.rb', 8675309)
def deploy_key(repo, id, options={})
get "#{Repository.path repo}/keys/#{id}", options
end
# Add deploy key to a repo
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param title [String] Title reference for the deploy key.
# @param key [String] Public key.
# @return [Sawyer::Resource] Hash representing newly added key.
# @see https://developer.github.com/v3/repos/keys/#add-a-new-deploy-key
# @example
# @client.add_deploy_key('octokit/octokit.rb', 'Staging server', 'ssh-rsa AAA...')
def add_deploy_key(repo, title, key, options = {})
post "#{Repository.path repo}/keys", options.merge(:title => title, :key => key)
end
# Edit a deploy key
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param id [Integer] Deploy key ID.
# @param options [Hash] Attributes to edit.
# @option title [String] Key title.
# @option key [String] Public key.
# @return [Sawyer::Resource] Updated deploy key.
# @deprecated This method is no longer supported in the API
# @see https://developer.github.com/changes/2014-02-24-finer-grained-scopes-for-ssh-keys/
# @see https://developer.github.com/v3/repos/keys/#edit-a-deploy-key
# @example Update the key for a deploy key.
# @client.edit_deploy_key('octokit/octokit.rb', 8675309, :key => 'ssh-rsa BBB...')
# @example
# @client.update_deploy_key('octokit/octokit.rb', 8675309, :title => 'Uber', :key => 'ssh-rsa BBB...'))
def edit_deploy_key(repo, id, options)
patch "#{Repository.path repo}/keys/#{id}", options
end
alias :update_deploy_key :edit_deploy_key
# Remove deploy key from a repo
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param id [Integer] Id of the deploy key to remove.
# @return [Boolean] True if key removed, false otherwise.
# @see https://developer.github.com/v3/repos/keys/#remove-a-deploy-key
# @example
# @client.remove_deploy_key('octokit/octokit.rb', 100000)
def remove_deploy_key(repo, id, options = {})
boolean_from_response :delete, "#{Repository.path repo}/keys/#{id}", options
end
# List collaborators
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @option options [String] :affiliation Filters the return array by affiliation.
# Can be one of: <tt>outside</tt>, <tt>direct</tt>, or <tt>all</tt>.
# If not specified, defaults to <tt>all</tt>
# @return [Array<Sawyer::Resource>] Array of hashes representing collaborating users.
# @see https://developer.github.com/v3/repos/collaborators/#list-collaborators
# @example
# Octokit.collaborators('octokit/octokit.rb')
# @example
# Octokit.collabs('octokit/octokit.rb')
# @example
# @client.collabs('octokit/octokit.rb')
def collaborators(repo, options = {})
paginate "#{Repository.path repo}/collaborators", options
end
alias :collabs :collaborators
# Add collaborator to repo
#
# This can also be used to update the permission of an existing collaborator
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param collaborator [String] Collaborator GitHub username to add.
# @option options [String] :permission The permission to grant the collaborator.
# Only valid on organization-owned repositories.
# Can be one of: <tt>pull</tt>, <tt>push</tt>, or <tt>admin</tt>.
# If not specified, defaults to <tt>push</tt>
# @return [Boolean] True if collaborator added, false otherwise.
# @see https://developer.github.com/v3/repos/collaborators/#add-user-as-a-collaborator
# @example
# @client.add_collaborator('octokit/octokit.rb', 'holman')
# @example
# @client.add_collab('octokit/octokit.rb', 'holman')
# @example Add a collaborator with admin permissions
# @client.add_collaborator('octokit/octokit.rb', 'holman', permission: 'admin')
def add_collaborator(repo, collaborator, options = {})
boolean_from_response :put, "#{Repository.path repo}/collaborators/#{collaborator}", options
end
alias :add_collab :add_collaborator
# Remove collaborator from repo.
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param collaborator [String] Collaborator GitHub username to remove.
# @return [Boolean] True if collaborator removed, false otherwise.
# @see https://developer.github.com/v3/repos/collaborators/#remove-user-as-a-collaborator
# @example
# @client.remove_collaborator('octokit/octokit.rb', 'holman')
# @example
# @client.remove_collab('octokit/octokit.rb', 'holman')
def remove_collaborator(repo, collaborator, options = {})
boolean_from_response :delete, "#{Repository.path repo}/collaborators/#{collaborator}", options
end
alias :remove_collab :remove_collaborator
# Checks if a user is a collaborator for a repo.
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param collaborator [String] Collaborator GitHub username to check.
# @return [Boolean] True if user is a collaborator, false otherwise.
# @see https://developer.github.com/v3/repos/collaborators/#check-if-a-user-is-a-collaborator
# @example
# @client.collaborator?('octokit/octokit.rb', 'holman')
def collaborator?(repo, collaborator, options={})
boolean_from_response :get, "#{Repository.path repo}/collaborators/#{collaborator}", options
end
# Get a user's permission level for a repo.
#
# Requires authenticated client
#
# @return [Sawyer::Resource] Hash representing the user's permission level for the given repository
# @see https://developer.github.com/v3/repos/collaborators/#review-a-users-permission-level
# @example
# @client.permission_level('octokit/octokit.rb', 'lizzhale')
def permission_level(repo, collaborator, options={})
get "#{Repository.path repo}/collaborators/#{collaborator}/permission", options
end
# List teams for a repo
#
# Requires authenticated client that is an owner or collaborator of the repo.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing teams.
# @see https://developer.github.com/v3/repos/#list-teams
# @example
# @client.repository_teams('octokit/pengwynn')
# @example
# @client.repo_teams('octokit/pengwynn')
# @example
# @client.teams('octokit/pengwynn')
def repository_teams(repo, options = {})
paginate "#{Repository.path repo}/teams", options
end
alias :repo_teams :repository_teams
alias :teams :repository_teams
# List all topics for a repository
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Sawyer::Resource] representing the topics for given repo
# @see https://developer.github.com/v3/repos/#list-all-topics-for-a-repository
# @example List topics for octokit/octokit.rb
# Octokit.topics('octokit/octokit.rb')
# @example List topics for octokit/octokit.rb
# client.topics('octokit/octokit.rb')
def topics(repo, options = {})
opts = ensure_api_media_type(:topics, options)
paginate "#{Repository.path repo}/topics", opts
end
# Replace all topics for a repository
#
# Requires authenticated client.
#
# @param repo [Integer, String, Repository, Hash] A Github repository
# @param names [Array] An array of topics to add to the repository.
# @return [Sawyer::Resource] representing the replaced topics for given repo
# @see https://developer.github.com/v3/repos/#replace-all-topics-for-a-repository
# @example Replace topics for octokit/octokit.rb
# client.replace_all_topics('octokit/octokit.rb', ['octocat', 'atom', 'electron', 'API'])
# @example Clear all topics for octokit/octokit.rb
# client.replace_all_topics('octokit/octokit.rb', [])
def replace_all_topics(repo, names, options = {})
opts = ensure_api_media_type(:topics, options)
put "#{Repository.path repo}/topics", opts.merge(:names => names)
end
# List contributors to a repo
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param anon [Boolean] Set true to include anonymous contributors.
# @return [Array<Sawyer::Resource>] Array of hashes representing users.
# @see https://developer.github.com/v3/repos/#list-contributors
# @example
# Octokit.contributors('octokit/octokit.rb', true)
# @example
# Octokit.contribs('octokit/octokit.rb')
# @example
# @client.contribs('octokit/octokit.rb')
def contributors(repo, anon = nil, options = {})
options[:anon] = 1 if anon.to_s[/1|true/]
paginate "#{Repository.path repo}/contributors", options
end
alias :contribs :contributors
# List stargazers of a repo
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing users.
# @see https://developer.github.com/v3/activity/starring/#list-stargazers
# @example
# Octokit.stargazers('octokit/octokit.rb')
# @example
# @client.stargazers('octokit/octokit.rb')
def stargazers(repo, options = {})
paginate "#{Repository.path repo}/stargazers", options
end
# @deprecated Use {#stargazers} instead
#
# List watchers of repo.
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing users.
# @see https://developer.github.com/v3/repos/watching/#list-watchers
# @example
# Octokit.watchers('octokit/octokit.rb')
# @example
# @client.watchers('octokit/octokit.rb')
def watchers(repo, options = {})
paginate "#{Repository.path repo}/watchers", options
end
# List forks
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing repos.
# @see https://developer.github.com/v3/repos/forks/#list-forks
# @example
# Octokit.forks('octokit/octokit.rb')
# @example
# Octokit.network('octokit/octokit.rb')
# @example
# @client.forks('octokit/octokit.rb')
def forks(repo, options = {})
paginate "#{Repository.path repo}/forks", options
end
alias :network :forks
# List languages of code in the repo.
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of Hashes representing languages.
# @see https://developer.github.com/v3/repos/#list-languages
# @example
# Octokit.languages('octokit/octokit.rb')
# @example
# @client.languages('octokit/octokit.rb')
def languages(repo, options = {})
paginate "#{Repository.path repo}/languages", options
end
# List tags
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing tags.
# @see https://developer.github.com/v3/repos/#list-tags
# @example
# Octokit.tags('octokit/octokit.rb')
# @example
# @client.tags('octokit/octokit.rb')
def tags(repo, options = {})
paginate "#{Repository.path repo}/tags", options
end
# List branches
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing branches.
# @see https://developer.github.com/v3/repos/#list-branches
# @example
# Octokit.branches('octokit/octokit.rb')
# @example
# @client.branches('octokit/octokit.rb')
def branches(repo, options = {})
paginate "#{Repository.path repo}/branches", options
end
# Get a single branch from a repository
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param branch [String] Branch name
# @return [Sawyer::Resource] The branch requested, if it exists
# @see https://developer.github.com/v3/repos/#get-branch
# @example Get branch 'master` from octokit/octokit.rb
# Octokit.branch("octokit/octokit.rb", "master")
def branch(repo, branch, options = {})
get "#{Repository.path repo}/branches/#{branch}", options
end
alias :get_branch :branch
# Lock a single branch from a repository
#
# Requires authenticated client
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param branch [String] Branch name
# @option options [Hash] :required_status_checks If not null, the following keys are required:
# <tt>:enforce_admins [boolean] Enforce required status checks for repository administrators.</tt>
# <tt>:strict [boolean] Require branches to be up to date before merging.</tt>
# <tt>:contexts [Array] The list of status checks to require in order to merge into this branch</tt>
#
# @option options [Hash] :restrictions If not null, the following keys are required:
# <tt>:users [Array] The list of user logins with push access</tt>
# <tt>:teams [Array] The list of team slugs with push access</tt>.
#
# Teams and users restrictions are only available for organization-owned repositories.
# @return [Sawyer::Resource] The protected branch
# @see https://developer.github.com/v3/repos/#enabling-and-disabling-branch-protection
# @example
# @client.protect_branch('octokit/octokit.rb', 'master', foo)
def protect_branch(repo, branch, options = {})
opts = ensure_api_media_type(:branch_protection, options)
opts[:restrictions] ||= nil
opts[:required_status_checks] ||= nil
put "#{Repository.path repo}/branches/#{branch}/protection", opts
end
# Get branch protection summary
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param branch [String] Branch name
# @return [Sawyer::Resource, nil] Branch protection summary or nil if the branch
# is not protected
# @see https://developer.github.com/v3/repos/branches/#get-branch-protection
# @example
# @client.branch_protection('octokit/octokit.rb', 'master')
def branch_protection(repo, branch, options = {})
opts = ensure_api_media_type(:branch_protection, options)
begin
get "#{Repository.path repo}/branches/#{branch}/protection", opts
rescue Octokit::BranchNotProtected
nil
end
end
# Unlock a single branch from a repository
#
# Requires authenticated client
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param branch [String] Branch name
# @return [Sawyer::Resource] The unprotected branch
# @see https://developer.github.com/v3/repos/#enabling-and-disabling-branch-protection
# @example
# @client.unprotect_branch('octokit/octokit.rb', 'master')
def unprotect_branch(repo, branch, options = {})
opts = ensure_api_media_type(:branch_protection, options)
boolean_from_response :delete, "#{Repository.path repo}/branches/#{branch}/protection", opts
end
# List users available for assigning to issues.
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing users.
# @see https://developer.github.com/v3/issues/assignees/#list-assignees
# @example
# Octokit.repository_assignees('octokit/octokit.rb')
# @example
# Octokit.repo_assignees('octokit/octokit.rb')
# @example
# @client.repository_assignees('octokit/octokit.rb')
def repository_assignees(repo, options = {})
paginate "#{Repository.path repo}/assignees", options
end
alias :repo_assignees :repository_assignees
# Check to see if a particular user is an assignee for a repository.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param assignee [String] User login to check
# @return [Boolean] True if assignable on project, false otherwise.
# @see https://developer.github.com/v3/issues/assignees/#check-assignee
# @example
# Octokit.check_assignee('octokit/octokit.rb', 'andrew')
def check_assignee(repo, assignee, options = {})
boolean_from_response :get, "#{Repository.path repo}/assignees/#{assignee}", options
end
# List watchers subscribing to notifications for a repo
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of users watching.
# @see https://developer.github.com/v3/activity/watching/#list-watchers
# @example
# @client.subscribers("octokit/octokit.rb")
def subscribers(repo, options = {})
paginate "#{Repository.path repo}/subscribers", options
end
# Get a repository subscription
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Sawyer::Resource] Repository subscription.
# @see https://developer.github.com/v3/activity/watching/#get-a-repository-subscription
# @example
# @client.subscription("octokit/octokit.rb")
def subscription(repo, options = {})
get "#{Repository.path repo}/subscription", options
end
# Update repository subscription
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param options [Hash]
#
# @option options [Boolean] :subscribed Determines if notifications
# should be received from this repository.
# @option options [Boolean] :ignored Deterimines if all notifications
# should be blocked from this repository.
# @return [Sawyer::Resource] Updated repository subscription.
# @see https://developer.github.com/v3/activity/watching/#set-a-repository-subscription
# @example Subscribe to notifications for a repository
# @client.update_subscription("octokit/octokit.rb", {subscribed: true})
def update_subscription(repo, options = {})
put "#{Repository.path repo}/subscription", options
end
# Delete a repository subscription
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Boolean] True if subscription deleted, false otherwise.
# @see https://developer.github.com/v3/activity/watching/#delete-a-repository-subscription
#
# @example
# @client.delete_subscription("octokit/octokit.rb")
def delete_subscription(repo, options = {})
boolean_from_response :delete, "#{Repository.path repo}/subscription", options
end
end
end
end
|
require 'rspec'
require 'tempfile'
require 'rspec/core/rake_task'
require 'bosh/dev/bat_helper'
require 'bosh/dev/sandbox/nginx'
require 'bosh/dev/sandbox/workspace'
require 'common/thread_pool'
require 'parallel_tests/tasks'
namespace :spec do
namespace :integration do
desc 'Run BOSH integration tests against a local sandbox'
task :agent => :install_dependencies do
sh('go/src/github.com/cloudfoundry/bosh-agent/bin/build')
run_integration_specs
end
desc 'Install BOSH integration test dependencies (currently Nginx)'
task :install_dependencies do
unless ENV['SKIP_NGINX'] == 'true'
nginx = Bosh::Dev::Sandbox::Nginx.new
nginx.install
end
end
def run_integration_specs
Bosh::Dev::Sandbox::Workspace.clean
num_processes = ENV['NUM_GROUPS']
num_processes ||= ENV['TRAVIS'] ? 4 : nil
options = {}
options[:count] = num_processes if num_processes
options[:group] = ENV['GROUP'] if ENV['GROUP']
puts 'Launching parallel execution of spec/integration'
run_in_parallel('spec/integration', options)
end
def run_in_parallel(test_path, options={})
spec_path = ENV['SPEC_PATH']
count = " -n #{options[:count]}" unless options[:count].to_s.empty?
group = " --only-group #{options[:group]}" unless options[:group].to_s.empty?
command = begin
if spec_path
"https_proxy= http_proxy= bundle exec rspec #{spec_path}"
else
"https_proxy= http_proxy= bundle exec parallel_test '#{test_path}'#{count}#{group} --group-by filesize --type rspec -o '--format documentation'"
end
end
puts command
abort unless system(command)
end
end
task :integration => %w(spec:integration:agent)
namespace :unit do
desc 'Run unit tests for each BOSH component gem in parallel'
task ruby_gems: %w(rubocop) do
trap('INT') { exit }
builds = Dir['*'].select { |f| File.directory?(f) && File.exists?("#{f}/spec") }
builds -= %w(bat)
cpi_builds = builds.select { |f| File.directory?(f) && f.end_with?("_cpi") }
spec_logs = Dir.mktmpdir
puts "Logging spec results in #{spec_logs}"
max_threads = ENV.fetch('BOSH_MAX_THREADS', 10).to_i
null_logger = Logging::Logger.new('Ignored')
Bosh::ThreadPool.new(max_threads: max_threads, logger: null_logger).wrap do |pool|
builds.each do |build|
pool.process do
log_file = "#{spec_logs}/#{build}.log"
rspec_files = cpi_builds.include?(build) ? "spec/unit/" : "spec/"
rspec_cmd = "rspec --tty --backtrace -c -f p #{rspec_files}"
# inject command name so coverage results for each component don't clobber others
if system({'BOSH_BUILD_NAME' => build}, "cd #{build} && #{rspec_cmd} > #{log_file} 2>&1")
puts "----- BEGIN #{build}"
puts " #{rspec_cmd}"
print File.read(log_file)
puts "----- END #{build}\n\n"
else
raise("#{build} failed to build unit tests: #{File.read(log_file)}")
end
end
end
pool.wait
end
end
task(:agent) do
# Do not use exec because this task is part of other tasks
sh('cd go/src/github.com/cloudfoundry/bosh-agent/ && bin/test-unit')
end
end
task :unit => %w(spec:unit:ruby_gems spec:unit:agent)
namespace :external do
desc 'AWS bootstrap CLI can provision and destroy resources'
RSpec::Core::RakeTask.new(:aws_bootstrap) do |t|
t.pattern = 'spec/external/aws_bootstrap_spec.rb'
t.rspec_opts = %w(--format documentation --color)
end
end
namespace :system do
desc 'Run system (BATs) tests (deploys microbosh)'
task :micro, [:infrastructure_name, :hypervisor_name, :operating_system_name, :operating_system_version, :net_type, :agent_name, :light, :disk_format] do |_, args|
Bosh::Dev::BatHelper.for_rake_args(args).deploy_microbosh_and_run_bats
end
desc 'Run system (BATs) tests (uses existing microbosh)'
task :existing_micro, [:infrastructure_name, :hypervisor_name, :operating_system_name, :operating_system_version, :net_type, :agent_name, :light, :disk_format] do |_, args|
Bosh::Dev::BatHelper.for_rake_args(args).run_bats
end
desc 'Deploy microbosh for system (BATs) tests'
task :deploy_micro, [:infrastructure_name, :hypervisor_name, :operating_system_name, :operating_system_version, :net_type, :agent_name, :light, :disk_format] do |_, args|
Bosh::Dev::BatHelper.for_rake_args(args).deploy_bats_microbosh
end
end
end
desc 'Run unit and integration specs'
task :spec => %w(spec:unit spec:integration)
Added retries to installation of nginx in integration specs
The blob syncing is flaky. Tries 3 times.
[#98764200](https://www.pivotaltracker.com/story/show/98764200)
Signed-off-by: Tim Hausler <0e2fe13acea22d58959d12c030372eeff089f09b@pivotal.io>
require 'rspec'
require 'tempfile'
require 'rspec/core/rake_task'
require 'bosh/dev/bat_helper'
require 'bosh/dev/sandbox/nginx'
require 'bosh/dev/sandbox/workspace'
require 'common/thread_pool'
require 'parallel_tests/tasks'
namespace :spec do
namespace :integration do
desc 'Run BOSH integration tests against a local sandbox'
task :agent => :install_dependencies do
sh('go/src/github.com/cloudfoundry/bosh-agent/bin/build')
run_integration_specs
end
desc 'Install BOSH integration test dependencies (currently Nginx)'
task :install_dependencies do
unless ENV['SKIP_NGINX'] == 'true'
nginx = Bosh::Dev::Sandbox::Nginx.new
retries = 3
begin
nginx.install
rescue
retries -= 1
retry if retries > 0
raise
end
end
end
def run_integration_specs
Bosh::Dev::Sandbox::Workspace.clean
num_processes = ENV['NUM_GROUPS']
num_processes ||= ENV['TRAVIS'] ? 4 : nil
options = {}
options[:count] = num_processes if num_processes
options[:group] = ENV['GROUP'] if ENV['GROUP']
puts 'Launching parallel execution of spec/integration'
run_in_parallel('spec/integration', options)
end
def run_in_parallel(test_path, options={})
spec_path = ENV['SPEC_PATH']
count = " -n #{options[:count]}" unless options[:count].to_s.empty?
group = " --only-group #{options[:group]}" unless options[:group].to_s.empty?
command = begin
if spec_path
"https_proxy= http_proxy= bundle exec rspec #{spec_path}"
else
"https_proxy= http_proxy= bundle exec parallel_test '#{test_path}'#{count}#{group} --group-by filesize --type rspec -o '--format documentation'"
end
end
puts command
abort unless system(command)
end
end
task :integration => %w(spec:integration:agent)
namespace :unit do
desc 'Run unit tests for each BOSH component gem in parallel'
task ruby_gems: %w(rubocop) do
trap('INT') { exit }
builds = Dir['*'].select { |f| File.directory?(f) && File.exists?("#{f}/spec") }
builds -= %w(bat)
cpi_builds = builds.select { |f| File.directory?(f) && f.end_with?("_cpi") }
spec_logs = Dir.mktmpdir
puts "Logging spec results in #{spec_logs}"
max_threads = ENV.fetch('BOSH_MAX_THREADS', 10).to_i
null_logger = Logging::Logger.new('Ignored')
Bosh::ThreadPool.new(max_threads: max_threads, logger: null_logger).wrap do |pool|
builds.each do |build|
pool.process do
log_file = "#{spec_logs}/#{build}.log"
rspec_files = cpi_builds.include?(build) ? "spec/unit/" : "spec/"
rspec_cmd = "rspec --tty --backtrace -c -f p #{rspec_files}"
# inject command name so coverage results for each component don't clobber others
if system({'BOSH_BUILD_NAME' => build}, "cd #{build} && #{rspec_cmd} > #{log_file} 2>&1")
puts "----- BEGIN #{build}"
puts " #{rspec_cmd}"
print File.read(log_file)
puts "----- END #{build}\n\n"
else
raise("#{build} failed to build unit tests: #{File.read(log_file)}")
end
end
end
pool.wait
end
end
task(:agent) do
# Do not use exec because this task is part of other tasks
sh('cd go/src/github.com/cloudfoundry/bosh-agent/ && bin/test-unit')
end
end
task :unit => %w(spec:unit:ruby_gems spec:unit:agent)
namespace :external do
desc 'AWS bootstrap CLI can provision and destroy resources'
RSpec::Core::RakeTask.new(:aws_bootstrap) do |t|
t.pattern = 'spec/external/aws_bootstrap_spec.rb'
t.rspec_opts = %w(--format documentation --color)
end
end
namespace :system do
desc 'Run system (BATs) tests (deploys microbosh)'
task :micro, [:infrastructure_name, :hypervisor_name, :operating_system_name, :operating_system_version, :net_type, :agent_name, :light, :disk_format] do |_, args|
Bosh::Dev::BatHelper.for_rake_args(args).deploy_microbosh_and_run_bats
end
desc 'Run system (BATs) tests (uses existing microbosh)'
task :existing_micro, [:infrastructure_name, :hypervisor_name, :operating_system_name, :operating_system_version, :net_type, :agent_name, :light, :disk_format] do |_, args|
Bosh::Dev::BatHelper.for_rake_args(args).run_bats
end
desc 'Deploy microbosh for system (BATs) tests'
task :deploy_micro, [:infrastructure_name, :hypervisor_name, :operating_system_name, :operating_system_version, :net_type, :agent_name, :light, :disk_format] do |_, args|
Bosh::Dev::BatHelper.for_rake_args(args).deploy_bats_microbosh
end
end
end
desc 'Run unit and integration specs'
task :spec => %w(spec:unit spec:integration)
|
require 'omniauth-oauth2'
module OmniAuth
module Strategies
class Oschina < OmniAuth::Strategies::OAuth2
option :client_options, {
:site => 'http://www.oschina.net',
:authorize_url => 'http://www.oschina.net/action/oauth2/authorize',
:token_url => 'http://www.oschina.net/action/oauth2/token',
:profile_url => 'http://www.oschina.net/action/oauth2/user'
}
def request_phase
super
end
def authorize_params
super.tap do |params|
%w[scope client_options].each do |v|
if request.params[v]
params[v.to_sym] = request.params[v]
# to support omniauth-oauth2's auto csrf protection
session['omniauth.state'] = params[:state] if v == 'state'
end
end
end
end
uid { raw_info['id'].to_s }
info do
{
'nickname' => raw_info['name'],
'email' => email,
'name' => raw_info['name'],
'avatar' => raw_info['avatar'],
'blog' => raw_info['url'],
}
end
extra do
{:raw_info => raw_info}
end
def raw_info
access_token.options[:param_name] = 'access_token'
access_token.options[:mode] = :query
@raw_info ||= access_token.get('action/oauth2/user').parsed
end
def email
raw_info['email']
end
def email_access_allowed?
options['scope'] =~ /user/
end
end
end
end
OmniAuth.config.add_camelization 'oschina', 'Oschina'
http -> https
require 'omniauth-oauth2'
module OmniAuth
module Strategies
class Oschina < OmniAuth::Strategies::OAuth2
option :client_options, {
:site => 'http://www.oschina.net',
:authorize_url => 'https://www.oschina.net/action/oauth2/authorize',
:token_url => 'https://www.oschina.net/action/oauth2/token',
:profile_url => 'https://www.oschina.net/action/oauth2/user'
}
def request_phase
super
end
def authorize_params
super.tap do |params|
%w[scope client_options].each do |v|
if request.params[v]
params[v.to_sym] = request.params[v]
# to support omniauth-oauth2's auto csrf protection
session['omniauth.state'] = params[:state] if v == 'state'
end
end
end
end
uid { raw_info['id'].to_s }
info do
{
'nickname' => raw_info['name'],
'email' => email,
'name' => raw_info['name'],
'avatar' => raw_info['avatar'],
'blog' => raw_info['url'],
}
end
extra do
{:raw_info => raw_info}
end
def raw_info
access_token.options[:param_name] = 'access_token'
access_token.options[:mode] = :query
@raw_info ||= access_token.get('action/oauth2/user').parsed
end
def email
raw_info['email']
end
def email_access_allowed?
options['scope'] =~ /user/
end
end
end
end
OmniAuth.config.add_camelization 'oschina', 'Oschina'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.