CombinedText stringlengths 4 3.42M |
|---|
module ManagerRefresh::SaveCollection
module Saver
module SqlHelperUpsert
# Builds ON CONFLICT UPDATE updating branch for one column identified by the passed key
#
# @param key [Symbol] key that is column name
# @return [String] SQL clause for upserting one column
def build_insert_set_cols(key)
"#{quote_column_name(key)} = EXCLUDED.#{quote_column_name(key)}"
end
# @param all_attribute_keys [Array<Symbol>] Array of all columns we will be saving into each table row
# @param hashes [Array<Hash>] data used for building a batch insert sql query
# @param mode [Symbol] Mode for saving, allowed values are [:full, :partial], :full is when we save all
# columns of a row, :partial is when we save only few columns, so a partial row.
# @param on_conflict [Symbol, NilClass] defines behavior on conflict with unique index constraint, allowed values
# are :do_update, :do_nothing, nil
def build_insert_query(all_attribute_keys, hashes, on_conflict: nil, mode:, column_name: nil)
_log.debug("Building insert query for #{inventory_collection} of size #{inventory_collection.size}...")
# Cache the connection for the batch
connection = get_connection
# Ignore versioning columns that are set separately
ignore_cols = mode == :partial ? [:resource_timestamp, :resource_version] : []
# Make sure we don't send a primary_key for INSERT in any form, it could break PG sequencer
all_attribute_keys_array = all_attribute_keys.to_a - [primary_key.to_s, primary_key.to_sym] - ignore_cols
insert_query = insert_query_insert_values(hashes, all_attribute_keys_array, connection)
insert_query += insert_query_on_conflict_behavior(all_attribute_keys, on_conflict, mode, ignore_cols, column_name)
insert_query += insert_query_returning
_log.debug("Building insert query for #{inventory_collection} of size #{inventory_collection.size}...Complete")
insert_query
end
private
def insert_query_insert_values(hashes, all_attribute_keys_array, connection)
values = hashes.map do |hash|
"(#{all_attribute_keys_array.map { |x| quote(connection, hash[x], x) }.join(",")})"
end.join(",")
col_names = all_attribute_keys_array.map { |x| quote_column_name(x) }.join(",")
<<-SQL
INSERT INTO #{q_table_name} (#{col_names})
VALUES
#{values}
SQL
end
def insert_query_on_conflict_behavior(all_attribute_keys, on_conflict, mode, ignore_cols, column_name)
return "" unless inventory_collection.parallel_safe?
insert_query_on_conflict = insert_query_on_conflict_do(on_conflict)
if on_conflict == :do_update
insert_query_on_conflict += insert_query_on_conflict_update(all_attribute_keys, mode, ignore_cols, column_name)
end
insert_query_on_conflict
end
def insert_query_on_conflict_do(on_conflict)
if on_conflict == :do_nothing
<<-SQL
ON CONFLICT DO NOTHING
SQL
elsif on_conflict == :do_update
index_where_condition = unique_index_for(unique_index_keys).where
where_to_sql = index_where_condition ? "WHERE #{index_where_condition}" : ""
<<-SQL
ON CONFLICT (#{unique_index_columns.map { |x| quote_column_name(x) }.join(",")}) #{where_to_sql}
DO
UPDATE
SQL
end
end
def insert_query_on_conflict_update(all_attribute_keys, mode, ignore_cols, column_name)
if mode == :partial
ignore_cols += [:resource_timestamps, :resource_timestamps_max, :resource_versions, :resource_versions_max]
end
ignore_cols += [:created_on, :created_at] # Lets not change created for the update clause
# If there is not version attribute, the update part will be ignored below
version_attribute = if supports_remote_data_timestamp?(all_attribute_keys)
:resource_timestamp
elsif supports_remote_data_version?(all_attribute_keys)
:resource_version
end
# TODO(lsmola) should we add :deleted => false to the update clause? That should handle a reconnect, without a
# a need to list :deleted anywhere in the parser. We just need to check that a model has the :deleted attribute
query = <<-SQL
SET #{(all_attribute_keys - ignore_cols).map { |key| build_insert_set_cols(key) }.join(", ")}
SQL
# This conditional will make sure we are avoiding rewriting new data by old data. But we want it only when
# remote_data_timestamp is a part of the data.
query += insert_query_on_conflict_update_mode(mode, version_attribute, column_name) if version_attribute
query
end
def insert_query_on_conflict_update_mode(mode, version_attribute, column_name)
if mode == :full
full_update_condition(version_attribute)
elsif mode == :partial
raise "Column name must be provided" unless column_name
partial_update_condition(version_attribute, column_name)
end
end
def full_update_condition(attr_full)
attr_partial = attr_full.to_s.pluralize # Changes resource_version/timestamp to resource_versions/timestamps
attr_partial_max = "#{attr_partial}_max"
# Quote the column names
attr_full = quote_column_name(attr_full)
attr_partial = quote_column_name(attr_partial)
attr_partial_max = quote_column_name(attr_partial_max)
<<-SQL
, #{attr_partial} = '{}', #{attr_partial_max} = NULL
WHERE EXCLUDED.#{attr_full} IS NULL OR (
(#{q_table_name}.#{attr_full} IS NULL OR EXCLUDED.#{attr_full} > #{q_table_name}.#{attr_full}) AND
(#{q_table_name}.#{attr_partial_max} IS NULL OR EXCLUDED.#{attr_full} >= #{q_table_name}.#{attr_partial_max})
)
SQL
end
def partial_update_condition(attr_full, column_name)
attr_partial = attr_full.to_s.pluralize # Changes resource_version/timestamp to resource_versions/timestamps
attr_partial_max = "#{attr_partial}_max"
cast = if attr_full == :resource_timestamp
"timestamp"
elsif attr_full == :resource_version
"integer"
end
# Quote the column names
attr_full = quote_column_name(attr_full)
attr_partial = quote_column_name(attr_partial)
attr_partial_max = quote_column_name(attr_partial_max)
column_name = get_connection.quote_string(column_name.to_s)
q_table_name = get_connection.quote_table_name(table_name)
<<-SQL
, #{attr_partial} = #{q_table_name}.#{attr_partial} || ('{"#{column_name}": "' || EXCLUDED.#{attr_partial_max}::#{cast} || '"}')::jsonb
, #{attr_partial_max} = greatest(#{q_table_name}.#{attr_partial_max}::#{cast}, EXCLUDED.#{attr_partial_max}::#{cast})
WHERE EXCLUDED.#{attr_partial_max} IS NULL OR (
(#{q_table_name}.#{attr_full} IS NULL OR EXCLUDED.#{attr_partial_max} > #{q_table_name}.#{attr_full}) AND (
(#{q_table_name}.#{attr_partial}->>'#{column_name}')::#{cast} IS NULL OR
EXCLUDED.#{attr_partial_max}::#{cast} > (#{q_table_name}.#{attr_partial}->>'#{column_name}')::#{cast}
)
)
SQL
end
def insert_query_returning
<<-SQL
RETURNING "id",#{unique_index_columns.map { |x| quote_column_name(x) }.join(",")}
#{insert_query_returning_timestamps}
SQL
end
def insert_query_returning_timestamps
if inventory_collection.parallel_safe?
# For upsert, we'll return also created and updated timestamps, so we can recognize what was created and what
# updated
if inventory_collection.internal_timestamp_columns.present?
<<-SQL
, #{inventory_collection.internal_timestamp_columns.map { |x| quote_column_name(x) }.join(",")}
SQL
end
else
""
end
end
end
end
end
Store integer in resource_versions jsonb
Store integer in resource_versions jsonb
module ManagerRefresh::SaveCollection
module Saver
module SqlHelperUpsert
# Builds ON CONFLICT UPDATE updating branch for one column identified by the passed key
#
# @param key [Symbol] key that is column name
# @return [String] SQL clause for upserting one column
def build_insert_set_cols(key)
"#{quote_column_name(key)} = EXCLUDED.#{quote_column_name(key)}"
end
# @param all_attribute_keys [Array<Symbol>] Array of all columns we will be saving into each table row
# @param hashes [Array<Hash>] data used for building a batch insert sql query
# @param mode [Symbol] Mode for saving, allowed values are [:full, :partial], :full is when we save all
# columns of a row, :partial is when we save only few columns, so a partial row.
# @param on_conflict [Symbol, NilClass] defines behavior on conflict with unique index constraint, allowed values
# are :do_update, :do_nothing, nil
def build_insert_query(all_attribute_keys, hashes, on_conflict: nil, mode:, column_name: nil)
_log.debug("Building insert query for #{inventory_collection} of size #{inventory_collection.size}...")
# Cache the connection for the batch
connection = get_connection
# Ignore versioning columns that are set separately
ignore_cols = mode == :partial ? [:resource_timestamp, :resource_version] : []
# Make sure we don't send a primary_key for INSERT in any form, it could break PG sequencer
all_attribute_keys_array = all_attribute_keys.to_a - [primary_key.to_s, primary_key.to_sym] - ignore_cols
insert_query = insert_query_insert_values(hashes, all_attribute_keys_array, connection)
insert_query += insert_query_on_conflict_behavior(all_attribute_keys, on_conflict, mode, ignore_cols, column_name)
insert_query += insert_query_returning
_log.debug("Building insert query for #{inventory_collection} of size #{inventory_collection.size}...Complete")
insert_query
end
private
def insert_query_insert_values(hashes, all_attribute_keys_array, connection)
values = hashes.map do |hash|
"(#{all_attribute_keys_array.map { |x| quote(connection, hash[x], x) }.join(",")})"
end.join(",")
col_names = all_attribute_keys_array.map { |x| quote_column_name(x) }.join(",")
<<-SQL
INSERT INTO #{q_table_name} (#{col_names})
VALUES
#{values}
SQL
end
def insert_query_on_conflict_behavior(all_attribute_keys, on_conflict, mode, ignore_cols, column_name)
return "" unless inventory_collection.parallel_safe?
insert_query_on_conflict = insert_query_on_conflict_do(on_conflict)
if on_conflict == :do_update
insert_query_on_conflict += insert_query_on_conflict_update(all_attribute_keys, mode, ignore_cols, column_name)
end
insert_query_on_conflict
end
def insert_query_on_conflict_do(on_conflict)
if on_conflict == :do_nothing
<<-SQL
ON CONFLICT DO NOTHING
SQL
elsif on_conflict == :do_update
index_where_condition = unique_index_for(unique_index_keys).where
where_to_sql = index_where_condition ? "WHERE #{index_where_condition}" : ""
<<-SQL
ON CONFLICT (#{unique_index_columns.map { |x| quote_column_name(x) }.join(",")}) #{where_to_sql}
DO
UPDATE
SQL
end
end
def insert_query_on_conflict_update(all_attribute_keys, mode, ignore_cols, column_name)
if mode == :partial
ignore_cols += [:resource_timestamps, :resource_timestamps_max, :resource_versions, :resource_versions_max]
end
ignore_cols += [:created_on, :created_at] # Lets not change created for the update clause
# If there is not version attribute, the update part will be ignored below
version_attribute = if supports_remote_data_timestamp?(all_attribute_keys)
:resource_timestamp
elsif supports_remote_data_version?(all_attribute_keys)
:resource_version
end
# TODO(lsmola) should we add :deleted => false to the update clause? That should handle a reconnect, without a
# a need to list :deleted anywhere in the parser. We just need to check that a model has the :deleted attribute
query = <<-SQL
SET #{(all_attribute_keys - ignore_cols).map { |key| build_insert_set_cols(key) }.join(", ")}
SQL
# This conditional will make sure we are avoiding rewriting new data by old data. But we want it only when
# remote_data_timestamp is a part of the data.
query += insert_query_on_conflict_update_mode(mode, version_attribute, column_name) if version_attribute
query
end
def insert_query_on_conflict_update_mode(mode, version_attribute, column_name)
if mode == :full
full_update_condition(version_attribute)
elsif mode == :partial
raise "Column name must be provided" unless column_name
partial_update_condition(version_attribute, column_name)
end
end
def full_update_condition(attr_full)
attr_partial = attr_full.to_s.pluralize # Changes resource_version/timestamp to resource_versions/timestamps
attr_partial_max = "#{attr_partial}_max"
# Quote the column names
attr_full = quote_column_name(attr_full)
attr_partial = quote_column_name(attr_partial)
attr_partial_max = quote_column_name(attr_partial_max)
<<-SQL
, #{attr_partial} = '{}', #{attr_partial_max} = NULL
WHERE EXCLUDED.#{attr_full} IS NULL OR (
(#{q_table_name}.#{attr_full} IS NULL OR EXCLUDED.#{attr_full} > #{q_table_name}.#{attr_full}) AND
(#{q_table_name}.#{attr_partial_max} IS NULL OR EXCLUDED.#{attr_full} >= #{q_table_name}.#{attr_partial_max})
)
SQL
end
def partial_update_condition(attr_full, column_name)
attr_partial = attr_full.to_s.pluralize # Changes resource_version/timestamp to resource_versions/timestamps
attr_partial_max = "#{attr_partial}_max"
cast = if attr_full == :resource_timestamp
"timestamp"
elsif attr_full == :resource_version
"integer"
end
# Quote the column names
attr_full = quote_column_name(attr_full)
attr_partial = quote_column_name(attr_partial)
attr_partial_max = quote_column_name(attr_partial_max)
column_name = get_connection.quote_string(column_name.to_s)
q_table_name = get_connection.quote_table_name(table_name)
<<-SQL
#{insert_query_set_jsonb_version(cast, attr_partial, attr_partial_max, column_name)}
, #{attr_partial_max} = greatest(#{q_table_name}.#{attr_partial_max}::#{cast}, EXCLUDED.#{attr_partial_max}::#{cast})
WHERE EXCLUDED.#{attr_partial_max} IS NULL OR (
(#{q_table_name}.#{attr_full} IS NULL OR EXCLUDED.#{attr_partial_max} > #{q_table_name}.#{attr_full}) AND (
(#{q_table_name}.#{attr_partial}->>'#{column_name}')::#{cast} IS NULL OR
EXCLUDED.#{attr_partial_max}::#{cast} > (#{q_table_name}.#{attr_partial}->>'#{column_name}')::#{cast}
)
)
SQL
end
def insert_query_set_jsonb_version(cast, attr_partial, attr_partial_max, column_name)
if cast == "integer"
# If we have integer value, we don't want to encapsulate the value in ""
<<-SQL
, #{attr_partial} = #{q_table_name}.#{attr_partial} || ('{"#{column_name}": ' || EXCLUDED.#{attr_partial_max}::#{cast} || '}')::jsonb
SQL
else
<<-SQL
, #{attr_partial} = #{q_table_name}.#{attr_partial} || ('{"#{column_name}": "' || EXCLUDED.#{attr_partial_max}::#{cast} || '"}')::jsonb
SQL
end
end
def insert_query_returning
<<-SQL
RETURNING "id",#{unique_index_columns.map { |x| quote_column_name(x) }.join(",")}
#{insert_query_returning_timestamps}
SQL
end
def insert_query_returning_timestamps
if inventory_collection.parallel_safe?
# For upsert, we'll return also created and updated timestamps, so we can recognize what was created and what
# updated
if inventory_collection.internal_timestamp_columns.present?
<<-SQL
, #{inventory_collection.internal_timestamp_columns.map { |x| quote_column_name(x) }.join(",")}
SQL
end
else
""
end
end
end
end
end
|
# Usage:
#
# ❯ bundle exec ruby script/bench_reading_events.rb
# Creating 10000 events
# Took 42.35533199999918 to create events
# Took 4.9821800000027 to read all events
# ^ results from running on a 2016 MacBook
require 'benchmark'
require 'securerandom'
require 'sequel'
require 'event_sourcery/postgres'
pg_uri = ENV.fetch('BOXEN_POSTGRESQL_URL') { 'postgres://127.0.0.1:5432/' }.dup
pg_uri << 'event_sourcery_test'
pg_connection = Sequel.connect(pg_uri)
EventSourcery.configure do |config|
config.postgres.event_store_database = pg_connection
config.postgres.projections_database = pg_connection
config.logger.level = :fatal
end
def create_events_schema(pg_connection)
pg_connection.execute 'drop table if exists events'
pg_connection.execute 'drop table if exists aggregates'
EventSourcery::Postgres::Schema.create_event_store(db: pg_connection)
end
event_store = EventSourcery::Postgres.config.event_store
EVENT_TYPES = %i(
item_added
item_removed
item_starred
).freeze
def new_event(uuid)
EventSourcery::Event.new(type: EVENT_TYPES.sample,
aggregate_id: uuid,
body: { 'something' => 'simple' })
end
create_events_schema(pg_connection)
NUM_EVENTS = 10_000
puts "Creating #{NUM_EVENTS} events"
time = Benchmark.realtime do
uuid = SecureRandom.uuid
NUM_EVENTS.times do
event_store.sink(new_event(uuid))
end
end
puts "Took #{time} to create events"
seen_events_count = 0
time = Benchmark.realtime do
event_store.subscribe(from_id: 0, subscription_master: EventSourcery::EventStore::SignalHandlingSubscriptionMaster.new) do |events|
seen_events_count += events.count
throw :stop if seen_events_count >= NUM_EVENTS
end
end
puts "Took #{time} to read all events"
inline another function
# Usage:
#
# ❯ bundle exec ruby script/bench_reading_events.rb
# Creating 10000 events
# Took 42.35533199999918 to create events
# Took 4.9821800000027 to read all events
# ^ results from running on a 2016 MacBook
require 'benchmark'
require 'securerandom'
require 'sequel'
require 'event_sourcery/postgres'
pg_uri = ENV.fetch('BOXEN_POSTGRESQL_URL') { 'postgres://127.0.0.1:5432/' }.dup
pg_uri << 'event_sourcery_test'
pg_connection = Sequel.connect(pg_uri)
EventSourcery.configure do |config|
config.postgres.event_store_database = pg_connection
config.postgres.projections_database = pg_connection
config.logger.level = :fatal
end
def create_events_schema(pg_connection)
pg_connection.execute 'drop table if exists events'
pg_connection.execute 'drop table if exists aggregates'
EventSourcery::Postgres::Schema.create_event_store(db: pg_connection)
end
event_store = EventSourcery::Postgres.config.event_store
EVENT_TYPES = %i(
item_added
item_removed
item_starred
).freeze
def new_event(uuid)
EventSourcery::Event.new(type: EVENT_TYPES.sample,
aggregate_id: uuid,
body: { 'something' => 'simple' })
end
create_events_schema(pg_connection)
NUM_EVENTS = 10_000
puts "Creating #{NUM_EVENTS} events"
time = Benchmark.realtime do
uuid = SecureRandom.uuid
NUM_EVENTS.times { event_store.sink(new_event(uuid)) }
end
puts "Took #{time} to create events"
seen_events_count = 0
time = Benchmark.realtime do
event_store.subscribe(from_id: 0, subscription_master: EventSourcery::EventStore::SignalHandlingSubscriptionMaster.new) do |events|
seen_events_count += events.count
throw :stop if seen_events_count >= NUM_EVENTS
end
end
puts "Took #{time} to read all events"
|
started danny-search algorithm
class Node
attr_accessor :char, :locations
def initialize(char = nil)
self.char = char
self.locations = Hash.new { Array.new }
end
def occurances
self.locations.keys
end
def nodes_at(location)
self.locations(location)
end
def insert(location, leftNode, rightNode)
self.locations[location] = [leftNode, rightNode]
end
end
class Search
attr_accessor :my_words, :chars
def initialize(str)
self.my_words = str
self.chars = Hash.new { |hash, key| hash[key] = Node.new(key) }
end
def buildMap
(0...self.my_words.length).each do |i|
c, l, r = self.my_words[i], self.my_words[i - 1], self.my_words[i + 1]
self.chars[c].insert(i, self.chars[l], self.chars[r])
end
end
def searchNode(node, location, str)
end
def search(str)
node = self.chars[str[0]]
locations = node.occurances
end
end
s = Search.new("Hello world, I am some text that is going to be used for searching")
s.buildMap
|
require 'bunny'
module Acapi
class Requestor
class DoNothingRequestor
def request(*args)
["", "", {}]
end
def reconnect!
end
def disconnect!
end
end
class AmqpRequestor
def initialize(uri, conn)
@uri = uri
@connection = conn
end
def request(req_name, payload)
requestor = ::Acapi::Amqp::Requestor.new(@connection)
req_time = Time.now
msg = ::Acapi::Amqp::OutMessage.new(req_name, req_time, req_time, nil, payload)
in_msg = ::Acapi::Amqp::InMessage.new(*requestor.request(*msg.to_request_properties))
in_msg.to_response
end
def reconnect!
disconnect!
@connection = Bunny.new(@uri)
@connection.start
end
def disconnect!
@connection.close
end
end
def self.disable!
if defined?(@@instance) && !@instance.nil?
@@instance.disconnect!
end
@@instance = DoNothingRequestor.new
end
def self.boot!(uri)
if defined?(@@instance) && !@instance.nil?
@@instance.disconnect!
end
conn = Bunny.new(uri)
conn.start
@@instance = AmqpRequestor.new(uri, conn)
end
end
end
Adding direct requestor.
require 'bunny'
module Acapi
class Requestor
class DoNothingRequestor
def request(*args)
["", "", {}]
end
def reconnect!
end
def disconnect!
end
end
class AmqpRequestor
def initialize(uri, conn)
@uri = uri
@connection = conn
end
def request(req_name, payload)
requestor = ::Acapi::Amqp::Requestor.new(@connection)
req_time = Time.now
msg = ::Acapi::Amqp::OutMessage.new(req_name, req_time, req_time, nil, payload)
in_msg = ::Acapi::Amqp::InMessage.new(*requestor.request(*msg.to_request_properties))
in_msg.to_response
end
def reconnect!
disconnect!
@connection = Bunny.new(@uri)
@connection.start
end
def disconnect!
@connection.close
end
end
def self.disable!
if defined?(@@instance) && !@instance.nil?
@@instance.disconnect!
end
@@instance = DoNothingRequestor.new
end
def self.boot!(uri)
if defined?(@@instance) && !@instance.nil?
@@instance.disconnect!
end
conn = Bunny.new(uri)
conn.start
@@instance = AmqpRequestor.new(uri, conn)
end
def self.request(req_name, payload)
@@instance.request(req_name, payload)
end
end
end
|
module UUIDTools
class UUID
# monkey-patch Friendly::UUID to serialize UUIDs to MySQL
def quoted_id
s = raw.unpack("H*")[0]
"x'#{s}'"
end
def as_json(options = nil)
hexdigest.upcase
end
def to_param
hexdigest.upcase
end
end
end
module Arel
module Visitors
class DepthFirst < Arel::Visitors::Visitor
def visit_UUIDTools_UUID(o)
o.quoted_id
end
end
class MySQL < Arel::Visitors::ToSql
def visit_UUIDTools_UUID(o)
o.quoted_id
end
end
class SQLite < Arel::Visitors::ToSql
def visit_UUIDTools_UUID(o)
o.quoted_id
end
end
end
end
module ActiveUUID
class UUIDSerializer
def load(binary)
case binary
when UUIDTools::UUID
binary
when String
parse_string(binary)
when nil
nil
else
raise TypeError, "the given type cannot be serialized"
end
end
def dump(uuid)
case uuid
when UUIDTools::UUID
uuid.raw
when String
parse_string(uuid).raw
when nil
nil
else
raise TypeError, "the given type cannot be serialized"
end
end
private
def parse_string str
if str.length == 36
UUIDTools::UUID.parse str
elsif str.length == 32
UUIDTools::UUID.parse_hexdigest str
else
UUIDTools::UUID.parse_raw str
end
end
end
module UUID
extend ActiveSupport::Concern
included do
uuids :id
end
module ClassMethods
def natural_key_attributes
@_activeuuid_natural_key_attributes
end
def natural_key(*attributes)
@_activeuuid_natural_key_attributes = attributes
end
def uuids(*attributes)
attributes.each do |attribute|
serialize attribute.intern, ActiveUUID::UUIDSerializer.new
#class_eval <<-eos
# # def #{@association_name}
# # @_#{@association_name} ||= self.class.associations[:#{@association_name}].new_proxy(self)
# # end
#eos
end
end
end
module InstanceMethods
end
end
end
Generate UUIDs attributes in before_create callback
module UUIDTools
class UUID
# monkey-patch Friendly::UUID to serialize UUIDs to MySQL
def quoted_id
s = raw.unpack("H*")[0]
"x'#{s}'"
end
def as_json(options = nil)
hexdigest.upcase
end
def to_param
hexdigest.upcase
end
end
end
module Arel
module Visitors
class DepthFirst < Arel::Visitors::Visitor
def visit_UUIDTools_UUID(o)
o.quoted_id
end
end
class MySQL < Arel::Visitors::ToSql
def visit_UUIDTools_UUID(o)
o.quoted_id
end
end
class SQLite < Arel::Visitors::ToSql
def visit_UUIDTools_UUID(o)
o.quoted_id
end
end
end
end
module ActiveUUID
class UUIDSerializer
def load(binary)
case binary
when UUIDTools::UUID
binary
when String
parse_string(binary)
when nil
nil
else
raise TypeError, "the given type cannot be serialized"
end
end
def dump(uuid)
case uuid
when UUIDTools::UUID
uuid.raw
when String
parse_string(uuid).raw
when nil
nil
else
raise TypeError, "the given type cannot be serialized"
end
end
private
def parse_string str
if str.length == 36
UUIDTools::UUID.parse str
elsif str.length == 32
UUIDTools::UUID.parse_hexdigest str
else
UUIDTools::UUID.parse_raw str
end
end
end
module UUID
extend ActiveSupport::Concern
included do
uuids :id
before_create :generate_uuids_if_needed
end
module ClassMethods
def natural_key_attributes
@_activeuuid_natural_key_attributes
end
def natural_key(*attributes)
@_activeuuid_natural_key_attributes = attributes
end
def uuid_attributes
@_activeuuid_attributes
end
def uuids(*attributes)
@_activeuuid_attributes = attributes.collect(&:intern).each do |attribute|
serialize attribute.intern, ActiveUUID::UUIDSerializer.new
end
#class_eval <<-eos
# # def #{@association_name}
# # @_#{@association_name} ||= self.class.associations[:#{@association_name}].new_proxy(self)
# # end
#eos
end
end
def create_uuid
if nka = self.class.natural_key_attributes
# TODO if all the attributes return nil you might want to warn about this
chained = nka.collect{|a| self.send(a).to_s}.join("-")
UUIDTools::UUID.sha1_create(UUIDTools::UUID_OID_NAMESPACE, chained)
else
UUIDTools::UUID.random_create
end
end
def generate_uuids_if_needed
self.class.uuid_attributes.each do |attr|
self.send("#{attr}=", create_uuid) unless self.send(attr)
end
end
end
end
|
require "ad_hoc_template/version"
require "ad_hoc_template/parser"
require "ad_hoc_template/record_reader"
require "ad_hoc_template/default_tag_formatter"
require "ad_hoc_template/pseudohiki_formatter"
require "ad_hoc_template/entry_format_generator"
module AdHocTemplate
class DataLoader
def self.format(template, record, tag_formatter=DefaultTagFormatter.new)
if record.kind_of? Array
return format_multi_records(template, record, tag_formatter)
end
new(record, tag_formatter).format(template)
end
def self.format_multi_records(template, records,
tag_formatter=DefaultTagFormatter.new)
records.map do |record|
new(record, tag_formatter).format(template)
end.join
end
def initialize(record, tag_formatter=DefaultTagFormatter.new)
@record = record
@tag_formatter = tag_formatter
end
def visit(tree)
case tree
when Parser::IterationTagNode
format_iteration_tag(tree)
when Parser::FallbackTagNode
''.freeze
when Parser::TagNode
format_tag(tree)
when Parser::Leaf
tree.join
else
tree.map {|node| node.accept(self) }
end
end
def format_iteration_tag(tag_node)
sub_records = @record[tag_node.type]||[@record]
tag_node = cast(tag_node)
fallback_nodes = tag_node.select {|sub_node| sub_node.kind_of? Parser::FallbackTagNode }
sub_records.map do |record|
if tag_node.contains_any_value_assigned_tag_node?(record)
data_loader = AdHocTemplate::DataLoader.new(record, @tag_formatter)
tag_node.map {|leaf| leaf.accept(data_loader) }.join
elsif not fallback_nodes.empty?
fallback_nodes = fallback_nodes.map {|node| cast(node, Parser::IterationTagNode) }
fallback_nodes = cast(fallback_nodes)
data_loader = AdHocTemplate::DataLoader.new(record, @tag_formatter)
fallback_nodes.map {|leaf| leaf.accept(data_loader) }
else
"".freeze
end
end
end
def format_tag(tag_node)
leafs = tag_node.map {|leaf| leaf.accept(self) }
@tag_formatter.format(tag_node.type, leafs.join.strip, @record)
end
def format(tree)
tree.accept(self).join
end
private
def cast(node, node_type=Parser::TagNode)
node_type.new.concat(node.clone)
end
end
def self.render(record_data, template, tag_type=:default, data_format=:default,
tag_formatter=DefaultTagFormatter.new)
tree = Parser.parse(template, tag_type)
record = RecordReader.read_record(record_data, data_format)
DataLoader.format(tree, record, tag_formatter)
end
end
refactoring of DataLoader#format_iteration_tag: extract method DataLoader#format_fallback_tags
require "ad_hoc_template/version"
require "ad_hoc_template/parser"
require "ad_hoc_template/record_reader"
require "ad_hoc_template/default_tag_formatter"
require "ad_hoc_template/pseudohiki_formatter"
require "ad_hoc_template/entry_format_generator"
module AdHocTemplate
class DataLoader
def self.format(template, record, tag_formatter=DefaultTagFormatter.new)
if record.kind_of? Array
return format_multi_records(template, record, tag_formatter)
end
new(record, tag_formatter).format(template)
end
def self.format_multi_records(template, records,
tag_formatter=DefaultTagFormatter.new)
records.map do |record|
new(record, tag_formatter).format(template)
end.join
end
def initialize(record, tag_formatter=DefaultTagFormatter.new)
@record = record
@tag_formatter = tag_formatter
end
def visit(tree)
case tree
when Parser::IterationTagNode
format_iteration_tag(tree)
when Parser::FallbackTagNode
''.freeze
when Parser::TagNode
format_tag(tree)
when Parser::Leaf
tree.join
else
tree.map {|node| node.accept(self) }
end
end
def format_iteration_tag(tag_node)
sub_records = @record[tag_node.type]||[@record]
tag_node = cast(tag_node)
fallback_nodes = tag_node.select {|sub_node| sub_node.kind_of? Parser::FallbackTagNode }
sub_records.map do |record|
if tag_node.contains_any_value_assigned_tag_node?(record)
data_loader = AdHocTemplate::DataLoader.new(record, @tag_formatter)
tag_node.map {|leaf| leaf.accept(data_loader) }.join
elsif not fallback_nodes.empty?
format_fallback_tags(fallback_nodes, record)
else
"".freeze
end
end
end
def format_tag(tag_node)
leafs = tag_node.map {|leaf| leaf.accept(self) }
@tag_formatter.format(tag_node.type, leafs.join.strip, @record)
end
def format(tree)
tree.accept(self).join
end
private
def cast(node, node_type=Parser::TagNode)
node_type.new.concat(node.clone)
end
def format_fallback_tags(fallback_nodes, record)
data_loader = AdHocTemplate::DataLoader.new(record, @tag_formatter)
fallback_nodes = fallback_nodes.map {|node| cast(node, Parser::IterationTagNode) }
fallback_nodes = cast(fallback_nodes)
fallback_nodes.map {|leaf| leaf.accept(data_loader) }
end
end
def self.render(record_data, template, tag_type=:default, data_format=:default,
tag_formatter=DefaultTagFormatter.new)
tree = Parser.parse(template, tag_type)
record = RecordReader.read_record(record_data, data_format)
DataLoader.format(tree, record, tag_formatter)
end
end
|
# frozen_string_literal: true
# encoding:utf-8
#--
# Copyright (C) Bob Aman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#++
require "addressable/version"
require "addressable/idna"
require "public_suffix"
##
# Addressable is a library for processing links and URIs.
module Addressable
##
# This is an implementation of a URI parser based on
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>,
# <a href="http://www.ietf.org/rfc/rfc3987.txt">RFC 3987</a>.
class URI
##
# Raised if something other than a uri is supplied.
class InvalidURIError < StandardError
end
##
# Container for the character classes specified in
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>.
module CharacterClasses
ALPHA = "a-zA-Z"
DIGIT = "0-9"
GEN_DELIMS = "\\:\\/\\?\\#\\[\\]\\@"
SUB_DELIMS = "\\!\\$\\&\\'\\(\\)\\*\\+\\,\\;\\="
RESERVED = GEN_DELIMS + SUB_DELIMS
UNRESERVED = ALPHA + DIGIT + "\\-\\.\\_\\~"
PCHAR = UNRESERVED + SUB_DELIMS + "\\:\\@"
SCHEME = ALPHA + DIGIT + "\\-\\+\\."
HOST = UNRESERVED + SUB_DELIMS + "\\[\\:\\]"
AUTHORITY = PCHAR
PATH = PCHAR + "\\/"
QUERY = PCHAR + "\\/\\?"
FRAGMENT = PCHAR + "\\/\\?"
end
SLASH = '/'
EMPTY_STR = ''
URIREGEX = /^(([^:\/?#]+):)?(\/\/([^\/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?$/
PORT_MAPPING = {
"http" => 80,
"https" => 443,
"ftp" => 21,
"tftp" => 69,
"sftp" => 22,
"ssh" => 22,
"svn+ssh" => 22,
"telnet" => 23,
"nntp" => 119,
"gopher" => 70,
"wais" => 210,
"ldap" => 389,
"prospero" => 1525
}
##
# Returns a URI object based on the parsed string.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI string to parse.
# No parsing is performed if the object is already an
# <code>Addressable::URI</code>.
#
# @return [Addressable::URI] The parsed URI.
def self.parse(uri)
# If we were given nil, return nil.
return nil unless uri
# If a URI object is passed, just return itself.
return uri.dup if uri.kind_of?(self)
# If a URI object of the Ruby standard library variety is passed,
# convert it to a string, then parse the string.
# We do the check this way because we don't want to accidentally
# cause a missing constant exception to be thrown.
if uri.class.name =~ /^URI\b/
uri = uri.to_s
end
# Otherwise, convert to a String
begin
uri = uri.to_str
rescue TypeError, NoMethodError
raise TypeError, "Can't convert #{uri.class} into String."
end if not uri.is_a? String
# This Regexp supplied as an example in RFC 3986, and it works great.
scan = uri.scan(URIREGEX)
fragments = scan[0]
scheme = fragments[1]
authority = fragments[3]
path = fragments[4]
query = fragments[6]
fragment = fragments[8]
user = nil
password = nil
host = nil
port = nil
if authority != nil
# The Regexp above doesn't split apart the authority.
userinfo = authority[/^([^\[\]]*)@/, 1]
if userinfo != nil
user = userinfo.strip[/^([^:]*):?/, 1]
password = userinfo.strip[/:(.*)$/, 1]
end
host = authority.sub(
/^([^\[\]]*)@/, EMPTY_STR
).sub(
/:([^:@\[\]]*?)$/, EMPTY_STR
)
port = authority[/:([^:@\[\]]*?)$/, 1]
end
if port == EMPTY_STR
port = nil
end
return new(
:scheme => scheme,
:user => user,
:password => password,
:host => host,
:port => port,
:path => path,
:query => query,
:fragment => fragment
)
end
##
# Converts an input to a URI. The input does not have to be a valid
# URI — the method will use heuristics to guess what URI was intended.
# This is not standards-compliant, merely user-friendly.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI string to parse.
# No parsing is performed if the object is already an
# <code>Addressable::URI</code>.
# @param [Hash] hints
# A <code>Hash</code> of hints to the heuristic parser.
# Defaults to <code>{:scheme => "http"}</code>.
#
# @return [Addressable::URI] The parsed URI.
def self.heuristic_parse(uri, hints={})
# If we were given nil, return nil.
return nil unless uri
# If a URI object is passed, just return itself.
return uri.dup if uri.kind_of?(self)
# If a URI object of the Ruby standard library variety is passed,
# convert it to a string, then parse the string.
# We do the check this way because we don't want to accidentally
# cause a missing constant exception to be thrown.
if uri.class.name =~ /^URI\b/
uri = uri.to_s
end
if !uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{uri.class} into String."
end
# Otherwise, convert to a String
uri = uri.to_str.dup.strip
hints = {
:scheme => "http"
}.merge(hints)
case uri
when /^http:\//i
uri.sub!(/^http:\/+/i, "http://")
when /^https:\//i
uri.sub!(/^https:\/+/i, "https://")
when /^feed:\/+http:\//i
uri.sub!(/^feed:\/+http:\/+/i, "feed:http://")
when /^feed:\//i
uri.sub!(/^feed:\/+/i, "feed://")
when %r[^file:/{4}]i
uri.sub!(%r[^file:/+]i, "file:////")
when %r[^file://localhost/]i
uri.sub!(%r[^file://localhost/+]i, "file:///")
when %r[^file:/+]i
uri.sub!(%r[^file:/+]i, "file:///")
when /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/
uri.sub!(/^/, hints[:scheme] + "://")
when /\A\d+\..*:\d+\z/
uri = "#{hints[:scheme]}://#{uri}"
end
match = uri.match(URIREGEX)
fragments = match.captures
authority = fragments[3]
if authority && authority.length > 0
new_authority = authority.tr('\\', '/').gsub(' ', '%20')
# NOTE: We want offset 4, not 3!
offset = match.offset(4)
uri = uri.dup
uri[offset[0]...offset[1]] = new_authority
end
parsed = self.parse(uri)
if parsed.scheme =~ /^[^\/?#\.]+\.[^\/?#]+$/
parsed = self.parse(hints[:scheme] + "://" + uri)
end
if parsed.path.include?(".")
new_host = parsed.path[/^([^\/]+\.[^\/]*)/, 1]
if new_host
parsed.defer_validation do
new_path = parsed.path.sub(
Regexp.new("^" + Regexp.escape(new_host)), EMPTY_STR)
parsed.host = new_host
parsed.path = new_path
parsed.scheme = hints[:scheme] unless parsed.scheme
end
end
end
return parsed
end
##
# Converts a path to a file scheme URI. If the path supplied is
# relative, it will be returned as a relative URI. If the path supplied
# is actually a non-file URI, it will parse the URI as if it had been
# parsed with <code>Addressable::URI.parse</code>. Handles all of the
# various Microsoft-specific formats for specifying paths.
#
# @param [String, Addressable::URI, #to_str] path
# Typically a <code>String</code> path to a file or directory, but
# will return a sensible return value if an absolute URI is supplied
# instead.
#
# @return [Addressable::URI]
# The parsed file scheme URI or the original URI if some other URI
# scheme was provided.
#
# @example
# base = Addressable::URI.convert_path("/absolute/path/")
# uri = Addressable::URI.convert_path("relative/path")
# (base + uri).to_s
# #=> "file:///absolute/path/relative/path"
#
# Addressable::URI.convert_path(
# "c:\\windows\\My Documents 100%20\\foo.txt"
# ).to_s
# #=> "file:///c:/windows/My%20Documents%20100%20/foo.txt"
#
# Addressable::URI.convert_path("http://example.com/").to_s
# #=> "http://example.com/"
def self.convert_path(path)
# If we were given nil, return nil.
return nil unless path
# If a URI object is passed, just return itself.
return path if path.kind_of?(self)
if !path.respond_to?(:to_str)
raise TypeError, "Can't convert #{path.class} into String."
end
# Otherwise, convert to a String
path = path.to_str.strip
path.sub!(/^file:\/?\/?/, EMPTY_STR) if path =~ /^file:\/?\/?/
path = SLASH + path if path =~ /^([a-zA-Z])[\|:]/
uri = self.parse(path)
if uri.scheme == nil
# Adjust windows-style uris
uri.path.sub!(/^\/?([a-zA-Z])[\|:][\\\/]/) do
"/#{$1.downcase}:/"
end
uri.path.tr!('\\', SLASH)
if File.exist?(uri.path) &&
File.stat(uri.path).directory?
uri.path.sub!(/\/$/, EMPTY_STR)
uri.path = uri.path + '/'
end
# If the path is absolute, set the scheme and host.
if uri.path.start_with?(SLASH)
uri.scheme = "file"
uri.host = EMPTY_STR
end
uri.normalize!
end
return uri
end
##
# Joins several URIs together.
#
# @param [String, Addressable::URI, #to_str] *uris
# The URIs to join.
#
# @return [Addressable::URI] The joined URI.
#
# @example
# base = "http://example.com/"
# uri = Addressable::URI.parse("relative/path")
# Addressable::URI.join(base, uri)
# #=> #<Addressable::URI:0xcab390 URI:http://example.com/relative/path>
def self.join(*uris)
uri_objects = uris.collect do |uri|
if !uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{uri.class} into String."
end
uri.kind_of?(self) ? uri : self.parse(uri.to_str)
end
result = uri_objects.shift.dup
for uri in uri_objects
result.join!(uri)
end
return result
end
##
# Percent encodes a URI component.
#
# @param [String, #to_str] component The URI component to encode.
#
# @param [String, Regexp] character_class
# The characters which are not percent encoded. If a <code>String</code>
# is passed, the <code>String</code> must be formatted as a regular
# expression character class. (Do not include the surrounding square
# brackets.) For example, <code>"b-zB-Z0-9"</code> would cause
# everything but the letters 'b' through 'z' and the numbers '0' through
# '9' to be percent encoded. If a <code>Regexp</code> is passed, the
# value <code>/[^b-zB-Z0-9]/</code> would have the same effect. A set of
# useful <code>String</code> values may be found in the
# <code>Addressable::URI::CharacterClasses</code> module. The default
# value is the reserved plus unreserved character classes specified in
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>.
#
# @param [Regexp] upcase_encoded
# A string of characters that may already be percent encoded, and whose
# encodings should be upcased. This allows normalization of percent
# encodings for characters not included in the
# <code>character_class</code>.
#
# @return [String] The encoded component.
#
# @example
# Addressable::URI.encode_component("simple/example", "b-zB-Z0-9")
# => "simple%2Fex%61mple"
# Addressable::URI.encode_component("simple/example", /[^b-zB-Z0-9]/)
# => "simple%2Fex%61mple"
# Addressable::URI.encode_component(
# "simple/example", Addressable::URI::CharacterClasses::UNRESERVED
# )
# => "simple%2Fexample"
def self.encode_component(component, character_class=
CharacterClasses::RESERVED + CharacterClasses::UNRESERVED,
upcase_encoded='')
return nil if component.nil?
begin
if component.kind_of?(Symbol) ||
component.kind_of?(Numeric) ||
component.kind_of?(TrueClass) ||
component.kind_of?(FalseClass)
component = component.to_s
else
component = component.to_str
end
rescue TypeError, NoMethodError
raise TypeError, "Can't convert #{component.class} into String."
end if !component.is_a? String
if ![String, Regexp].include?(character_class.class)
raise TypeError,
"Expected String or Regexp, got #{character_class.inspect}"
end
if character_class.kind_of?(String)
character_class = /[^#{character_class}]/
end
# We can't perform regexps on invalid UTF sequences, but
# here we need to, so switch to ASCII.
component = component.dup
component.force_encoding(Encoding::ASCII_8BIT)
# Avoiding gsub! because there are edge cases with frozen strings
component = component.gsub(character_class) do |sequence|
(sequence.unpack('C*').map { |c| "%" + ("%02x" % c).upcase }).join
end
if upcase_encoded.length > 0
component = component.gsub(/%(#{upcase_encoded.chars.map do |char|
char.unpack('C*').map { |c| '%02x' % c }.join
end.join('|')})/i) { |s| s.upcase }
end
return component
end
class << self
alias_method :encode_component, :encode_component
end
##
# Unencodes any percent encoded characters within a URI component.
# This method may be used for unencoding either components or full URIs,
# however, it is recommended to use the <code>unencode_component</code>
# alias when unencoding components.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI or component to unencode.
#
# @param [Class] return_type
# The type of object to return.
# This value may only be set to <code>String</code> or
# <code>Addressable::URI</code>. All other values are invalid. Defaults
# to <code>String</code>.
#
# @param [String] leave_encoded
# A string of characters to leave encoded. If a percent encoded character
# in this list is encountered then it will remain percent encoded.
#
# @return [String, Addressable::URI]
# The unencoded component or URI.
# The return type is determined by the <code>return_type</code>
# parameter.
def self.unencode(uri, return_type=String, leave_encoded='')
return nil if uri.nil?
begin
uri = uri.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{uri.class} into String."
end if !uri.is_a? String
if ![String, ::Addressable::URI].include?(return_type)
raise TypeError,
"Expected Class (String or Addressable::URI), " +
"got #{return_type.inspect}"
end
uri = uri.dup
# Seriously, only use UTF-8. I'm really not kidding!
uri.force_encoding("utf-8")
leave_encoded = leave_encoded.dup.force_encoding("utf-8")
result = uri.gsub(/%[0-9a-f]{2}/iu) do |sequence|
c = sequence[1..3].to_i(16).chr
c.force_encoding("utf-8")
leave_encoded.include?(c) ? sequence : c
end
result.force_encoding("utf-8")
if return_type == String
return result
elsif return_type == ::Addressable::URI
return ::Addressable::URI.parse(result)
end
end
class << self
alias_method :unescape, :unencode
alias_method :unencode_component, :unencode
alias_method :unescape_component, :unencode
end
##
# Normalizes the encoding of a URI component.
#
# @param [String, #to_str] component The URI component to encode.
#
# @param [String, Regexp] character_class
# The characters which are not percent encoded. If a <code>String</code>
# is passed, the <code>String</code> must be formatted as a regular
# expression character class. (Do not include the surrounding square
# brackets.) For example, <code>"b-zB-Z0-9"</code> would cause
# everything but the letters 'b' through 'z' and the numbers '0'
# through '9' to be percent encoded. If a <code>Regexp</code> is passed,
# the value <code>/[^b-zB-Z0-9]/</code> would have the same effect. A
# set of useful <code>String</code> values may be found in the
# <code>Addressable::URI::CharacterClasses</code> module. The default
# value is the reserved plus unreserved character classes specified in
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>.
#
# @param [String] leave_encoded
# When <code>character_class</code> is a <code>String</code> then
# <code>leave_encoded</code> is a string of characters that should remain
# percent encoded while normalizing the component; if they appear percent
# encoded in the original component, then they will be upcased ("%2f"
# normalized to "%2F") but otherwise left alone.
#
# @return [String] The normalized component.
#
# @example
# Addressable::URI.normalize_component("simpl%65/%65xampl%65", "b-zB-Z")
# => "simple%2Fex%61mple"
# Addressable::URI.normalize_component(
# "simpl%65/%65xampl%65", /[^b-zB-Z]/
# )
# => "simple%2Fex%61mple"
# Addressable::URI.normalize_component(
# "simpl%65/%65xampl%65",
# Addressable::URI::CharacterClasses::UNRESERVED
# )
# => "simple%2Fexample"
# Addressable::URI.normalize_component(
# "one%20two%2fthree%26four",
# "0-9a-zA-Z &/",
# "/"
# )
# => "one two%2Fthree&four"
def self.normalize_component(component, character_class=
CharacterClasses::RESERVED + CharacterClasses::UNRESERVED,
leave_encoded='')
return nil if component.nil?
begin
component = component.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{component.class} into String."
end if !component.is_a? String
if ![String, Regexp].include?(character_class.class)
raise TypeError,
"Expected String or Regexp, got #{character_class.inspect}"
end
if character_class.kind_of?(String)
leave_re = if leave_encoded.length > 0
character_class = "#{character_class}%" unless character_class.include?('%')
"|%(?!#{leave_encoded.chars.map do |char|
seq = char.unpack('C*').map { |c| '%02x' % c }.join
[seq.upcase, seq.downcase]
end.flatten.join('|')})"
end
character_class = /[^#{character_class}]#{leave_re}/
end
# We can't perform regexps on invalid UTF sequences, but
# here we need to, so switch to ASCII.
component = component.dup
component.force_encoding(Encoding::ASCII_8BIT)
unencoded = self.unencode_component(component, String, leave_encoded)
begin
encoded = self.encode_component(
Addressable::IDNA.unicode_normalize_kc(unencoded),
character_class,
leave_encoded
)
rescue ArgumentError
encoded = self.encode_component(unencoded)
end
encoded.force_encoding(Encoding::UTF_8)
return encoded
end
##
# Percent encodes any special characters in the URI.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI to encode.
#
# @param [Class] return_type
# The type of object to return.
# This value may only be set to <code>String</code> or
# <code>Addressable::URI</code>. All other values are invalid. Defaults
# to <code>String</code>.
#
# @return [String, Addressable::URI]
# The encoded URI.
# The return type is determined by the <code>return_type</code>
# parameter.
def self.encode(uri, return_type=String)
return nil if uri.nil?
begin
uri = uri.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{uri.class} into String."
end if !uri.is_a? String
if ![String, ::Addressable::URI].include?(return_type)
raise TypeError,
"Expected Class (String or Addressable::URI), " +
"got #{return_type.inspect}"
end
uri_object = uri.kind_of?(self) ? uri : self.parse(uri)
encoded_uri = Addressable::URI.new(
:scheme => self.encode_component(uri_object.scheme,
Addressable::URI::CharacterClasses::SCHEME),
:authority => self.encode_component(uri_object.authority,
Addressable::URI::CharacterClasses::AUTHORITY),
:path => self.encode_component(uri_object.path,
Addressable::URI::CharacterClasses::PATH),
:query => self.encode_component(uri_object.query,
Addressable::URI::CharacterClasses::QUERY),
:fragment => self.encode_component(uri_object.fragment,
Addressable::URI::CharacterClasses::FRAGMENT)
)
if return_type == String
return encoded_uri.to_s
elsif return_type == ::Addressable::URI
return encoded_uri
end
end
class << self
alias_method :escape, :encode
end
##
# Normalizes the encoding of a URI. Characters within a hostname are
# not percent encoded to allow for internationalized domain names.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI to encode.
#
# @param [Class] return_type
# The type of object to return.
# This value may only be set to <code>String</code> or
# <code>Addressable::URI</code>. All other values are invalid. Defaults
# to <code>String</code>.
#
# @return [String, Addressable::URI]
# The encoded URI.
# The return type is determined by the <code>return_type</code>
# parameter.
def self.normalized_encode(uri, return_type=String)
begin
uri = uri.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{uri.class} into String."
end if !uri.is_a? String
if ![String, ::Addressable::URI].include?(return_type)
raise TypeError,
"Expected Class (String or Addressable::URI), " +
"got #{return_type.inspect}"
end
uri_object = uri.kind_of?(self) ? uri : self.parse(uri)
components = {
:scheme => self.unencode_component(uri_object.scheme),
:user => self.unencode_component(uri_object.user),
:password => self.unencode_component(uri_object.password),
:host => self.unencode_component(uri_object.host),
:port => (uri_object.port.nil? ? nil : uri_object.port.to_s),
:path => self.unencode_component(uri_object.path),
:query => self.unencode_component(uri_object.query),
:fragment => self.unencode_component(uri_object.fragment)
}
components.each do |key, value|
if value != nil
begin
components[key] =
Addressable::IDNA.unicode_normalize_kc(value.to_str)
rescue ArgumentError
# Likely a malformed UTF-8 character, skip unicode normalization
components[key] = value.to_str
end
end
end
encoded_uri = Addressable::URI.new(
:scheme => self.encode_component(components[:scheme],
Addressable::URI::CharacterClasses::SCHEME),
:user => self.encode_component(components[:user],
Addressable::URI::CharacterClasses::UNRESERVED),
:password => self.encode_component(components[:password],
Addressable::URI::CharacterClasses::UNRESERVED),
:host => components[:host],
:port => components[:port],
:path => self.encode_component(components[:path],
Addressable::URI::CharacterClasses::PATH),
:query => self.encode_component(components[:query],
Addressable::URI::CharacterClasses::QUERY),
:fragment => self.encode_component(components[:fragment],
Addressable::URI::CharacterClasses::FRAGMENT)
)
if return_type == String
return encoded_uri.to_s
elsif return_type == ::Addressable::URI
return encoded_uri
end
end
##
# Encodes a set of key/value pairs according to the rules for the
# <code>application/x-www-form-urlencoded</code> MIME type.
#
# @param [#to_hash, #to_ary] form_values
# The form values to encode.
#
# @param [TrueClass, FalseClass] sort
# Sort the key/value pairs prior to encoding.
# Defaults to <code>false</code>.
#
# @return [String]
# The encoded value.
def self.form_encode(form_values, sort=false)
if form_values.respond_to?(:to_hash)
form_values = form_values.to_hash.to_a
elsif form_values.respond_to?(:to_ary)
form_values = form_values.to_ary
else
raise TypeError, "Can't convert #{form_values.class} into Array."
end
form_values = form_values.inject([]) do |accu, (key, value)|
if value.kind_of?(Array)
value.each do |v|
accu << [key.to_s, v.to_s]
end
else
accu << [key.to_s, value.to_s]
end
accu
end
if sort
# Useful for OAuth and optimizing caching systems
form_values = form_values.sort
end
escaped_form_values = form_values.map do |(key, value)|
# Line breaks are CRLF pairs
[
self.encode_component(
key.gsub(/(\r\n|\n|\r)/, "\r\n"),
CharacterClasses::UNRESERVED
).gsub("%20", "+"),
self.encode_component(
value.gsub(/(\r\n|\n|\r)/, "\r\n"),
CharacterClasses::UNRESERVED
).gsub("%20", "+")
]
end
return escaped_form_values.map do |(key, value)|
"#{key}=#{value}"
end.join("&")
end
##
# Decodes a <code>String</code> according to the rules for the
# <code>application/x-www-form-urlencoded</code> MIME type.
#
# @param [String, #to_str] encoded_value
# The form values to decode.
#
# @return [Array]
# The decoded values.
# This is not a <code>Hash</code> because of the possibility for
# duplicate keys.
def self.form_unencode(encoded_value)
if !encoded_value.respond_to?(:to_str)
raise TypeError, "Can't convert #{encoded_value.class} into String."
end
encoded_value = encoded_value.to_str
split_values = encoded_value.split("&").map do |pair|
pair.split("=", 2)
end
return split_values.map do |(key, value)|
[
key ? self.unencode_component(
key.gsub("+", "%20")).gsub(/(\r\n|\n|\r)/, "\n") : nil,
value ? (self.unencode_component(
value.gsub("+", "%20")).gsub(/(\r\n|\n|\r)/, "\n")) : nil
]
end
end
##
# Creates a new uri object from component parts.
#
# @option [String, #to_str] scheme The scheme component.
# @option [String, #to_str] user The user component.
# @option [String, #to_str] password The password component.
# @option [String, #to_str] userinfo
# The userinfo component. If this is supplied, the user and password
# components must be omitted.
# @option [String, #to_str] host The host component.
# @option [String, #to_str] port The port component.
# @option [String, #to_str] authority
# The authority component. If this is supplied, the user, password,
# userinfo, host, and port components must be omitted.
# @option [String, #to_str] path The path component.
# @option [String, #to_str] query The query component.
# @option [String, #to_str] fragment The fragment component.
#
# @return [Addressable::URI] The constructed URI object.
def initialize(options={})
if options.has_key?(:authority)
if (options.keys & [:userinfo, :user, :password, :host, :port]).any?
raise ArgumentError,
"Cannot specify both an authority and any of the components " +
"within the authority."
end
end
if options.has_key?(:userinfo)
if (options.keys & [:user, :password]).any?
raise ArgumentError,
"Cannot specify both a userinfo and either the user or password."
end
end
self.defer_validation do
# Bunch of crazy logic required because of the composite components
# like userinfo and authority.
self.scheme = options[:scheme] if options[:scheme]
self.user = options[:user] if options[:user]
self.password = options[:password] if options[:password]
self.userinfo = options[:userinfo] if options[:userinfo]
self.host = options[:host] if options[:host]
self.port = options[:port] if options[:port]
self.authority = options[:authority] if options[:authority]
self.path = options[:path] if options[:path]
self.query = options[:query] if options[:query]
self.query_values = options[:query_values] if options[:query_values]
self.fragment = options[:fragment] if options[:fragment]
end
self.to_s
end
##
# Freeze URI, initializing instance variables.
#
# @return [Addressable::URI] The frozen URI object.
def freeze
self.normalized_scheme
self.normalized_user
self.normalized_password
self.normalized_userinfo
self.normalized_host
self.normalized_port
self.normalized_authority
self.normalized_site
self.normalized_path
self.normalized_query
self.normalized_fragment
self.hash
super
end
##
# The scheme component for this URI.
#
# @return [String] The scheme component.
def scheme
return defined?(@scheme) ? @scheme : nil
end
##
# The scheme component for this URI, normalized.
#
# @return [String] The scheme component, normalized.
def normalized_scheme
return nil unless self.scheme
@normalized_scheme ||= begin
if self.scheme =~ /^\s*ssh\+svn\s*$/i
"svn+ssh".dup
else
Addressable::URI.normalize_component(
self.scheme.strip.downcase,
Addressable::URI::CharacterClasses::SCHEME
)
end
end
# All normalized values should be UTF-8
@normalized_scheme.force_encoding(Encoding::UTF_8) if @normalized_scheme
@normalized_scheme
end
##
# Sets the scheme component for this URI.
#
# @param [String, #to_str] new_scheme The new scheme component.
def scheme=(new_scheme)
if new_scheme && !new_scheme.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_scheme.class} into String."
elsif new_scheme
new_scheme = new_scheme.to_str
end
if new_scheme && new_scheme !~ /\A[a-z][a-z0-9\.\+\-]*\z/i
raise InvalidURIError, "Invalid scheme format: #{new_scheme}"
end
@scheme = new_scheme
@scheme = nil if @scheme.to_s.strip.empty?
# Reset dependent values
remove_instance_variable(:@normalized_scheme) if defined?(@normalized_scheme)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The user component for this URI.
#
# @return [String] The user component.
def user
return defined?(@user) ? @user : nil
end
##
# The user component for this URI, normalized.
#
# @return [String] The user component, normalized.
def normalized_user
return nil unless self.user
return @normalized_user if defined?(@normalized_user)
@normalized_user ||= begin
if normalized_scheme =~ /https?/ && self.user.strip.empty? &&
(!self.password || self.password.strip.empty?)
nil
else
Addressable::URI.normalize_component(
self.user.strip,
Addressable::URI::CharacterClasses::UNRESERVED
)
end
end
# All normalized values should be UTF-8
@normalized_user.force_encoding(Encoding::UTF_8) if @normalized_user
@normalized_user
end
##
# Sets the user component for this URI.
#
# @param [String, #to_str] new_user The new user component.
def user=(new_user)
if new_user && !new_user.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_user.class} into String."
end
@user = new_user ? new_user.to_str : nil
# You can't have a nil user with a non-nil password
if password != nil
@user = EMPTY_STR if @user.nil?
end
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_user) if defined?(@normalized_user)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The password component for this URI.
#
# @return [String] The password component.
def password
return defined?(@password) ? @password : nil
end
##
# The password component for this URI, normalized.
#
# @return [String] The password component, normalized.
def normalized_password
return nil unless self.password
return @normalized_password if defined?(@normalized_password)
@normalized_password ||= begin
if self.normalized_scheme =~ /https?/ && self.password.strip.empty? &&
(!self.user || self.user.strip.empty?)
nil
else
Addressable::URI.normalize_component(
self.password.strip,
Addressable::URI::CharacterClasses::UNRESERVED
)
end
end
# All normalized values should be UTF-8
if @normalized_password
@normalized_password.force_encoding(Encoding::UTF_8)
end
@normalized_password
end
##
# Sets the password component for this URI.
#
# @param [String, #to_str] new_password The new password component.
def password=(new_password)
if new_password && !new_password.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_password.class} into String."
end
@password = new_password ? new_password.to_str : nil
# You can't have a nil user with a non-nil password
@password ||= nil
@user ||= nil
if @password != nil
@user = EMPTY_STR if @user.nil?
end
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_password) if defined?(@normalized_password)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The userinfo component for this URI.
# Combines the user and password components.
#
# @return [String] The userinfo component.
def userinfo
current_user = self.user
current_password = self.password
(current_user || current_password) && @userinfo ||= begin
if current_user && current_password
"#{current_user}:#{current_password}"
elsif current_user && !current_password
"#{current_user}"
end
end
end
##
# The userinfo component for this URI, normalized.
#
# @return [String] The userinfo component, normalized.
def normalized_userinfo
return nil unless self.userinfo
return @normalized_userinfo if defined?(@normalized_userinfo)
@normalized_userinfo ||= begin
current_user = self.normalized_user
current_password = self.normalized_password
if !current_user && !current_password
nil
elsif current_user && current_password
"#{current_user}:#{current_password}".dup
elsif current_user && !current_password
"#{current_user}".dup
end
end
# All normalized values should be UTF-8
if @normalized_userinfo
@normalized_userinfo.force_encoding(Encoding::UTF_8)
end
@normalized_userinfo
end
##
# Sets the userinfo component for this URI.
#
# @param [String, #to_str] new_userinfo The new userinfo component.
def userinfo=(new_userinfo)
if new_userinfo && !new_userinfo.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_userinfo.class} into String."
end
new_user, new_password = if new_userinfo
[
new_userinfo.to_str.strip[/^(.*):/, 1],
new_userinfo.to_str.strip[/:(.*)$/, 1]
]
else
[nil, nil]
end
# Password assigned first to ensure validity in case of nil
self.password = new_password
self.user = new_user
# Reset dependent values
remove_instance_variable(:@authority) if defined?(@authority)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The host component for this URI.
#
# @return [String] The host component.
def host
return defined?(@host) ? @host : nil
end
##
# The host component for this URI, normalized.
#
# @return [String] The host component, normalized.
def normalized_host
return nil unless self.host
@normalized_host ||= begin
if !self.host.strip.empty?
result = ::Addressable::IDNA.to_ascii(
URI.unencode_component(self.host.strip.downcase)
)
if result =~ /[^\.]\.$/
# Single trailing dots are unnecessary.
result = result[0...-1]
end
result = Addressable::URI.normalize_component(
result,
CharacterClasses::HOST)
result
else
EMPTY_STR.dup
end
end
# All normalized values should be UTF-8
@normalized_host.force_encoding(Encoding::UTF_8) if @normalized_host
@normalized_host
end
##
# Sets the host component for this URI.
#
# @param [String, #to_str] new_host The new host component.
def host=(new_host)
if new_host && !new_host.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_host.class} into String."
end
@host = new_host ? new_host.to_str : nil
# Reset dependent values
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_host) if defined?(@normalized_host)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# This method is same as URI::Generic#host except
# brackets for IPv6 (and 'IPvFuture') addresses are removed.
#
# @see Addressable::URI#host
#
# @return [String] The hostname for this URI.
def hostname
v = self.host
/\A\[(.*)\]\z/ =~ v ? $1 : v
end
##
# This method is same as URI::Generic#host= except
# the argument can be a bare IPv6 address (or 'IPvFuture').
#
# @see Addressable::URI#host=
#
# @param [String, #to_str] new_hostname The new hostname for this URI.
def hostname=(new_hostname)
if new_hostname &&
(new_hostname.respond_to?(:ipv4?) || new_hostname.respond_to?(:ipv6?))
new_hostname = new_hostname.to_s
elsif new_hostname && !new_hostname.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_hostname.class} into String."
end
v = new_hostname ? new_hostname.to_str : nil
v = "[#{v}]" if /\A\[.*\]\z/ !~ v && /:/ =~ v
self.host = v
end
##
# Returns the top-level domain for this host.
#
# @example
# Addressable::URI.parse("www.example.co.uk").tld # => "co.uk"
def tld
PublicSuffix.parse(self.host, ignore_private: true).tld
end
##
# Sets the top-level domain for this URI.
#
# @param [String, #to_str] new_tld The new top-level domain.
def tld=(new_tld)
replaced_tld = domain.sub(/#{tld}\z/, new_tld)
self.host = PublicSuffix::Domain.new(replaced_tld).to_s
end
##
# Returns the public suffix domain for this host.
#
# @example
# Addressable::URI.parse("www.example.co.uk").domain # => "example.co.uk"
def domain
PublicSuffix.domain(self.host, ignore_private: true)
end
##
# The authority component for this URI.
# Combines the user, password, host, and port components.
#
# @return [String] The authority component.
def authority
self.host && @authority ||= begin
authority = String.new
if self.userinfo != nil
authority << "#{self.userinfo}@"
end
authority << self.host
if self.port != nil
authority << ":#{self.port}"
end
authority
end
end
##
# The authority component for this URI, normalized.
#
# @return [String] The authority component, normalized.
def normalized_authority
return nil unless self.authority
@normalized_authority ||= begin
authority = String.new
if self.normalized_userinfo != nil
authority << "#{self.normalized_userinfo}@"
end
authority << self.normalized_host
if self.normalized_port != nil
authority << ":#{self.normalized_port}"
end
authority
end
# All normalized values should be UTF-8
if @normalized_authority
@normalized_authority.force_encoding(Encoding::UTF_8)
end
@normalized_authority
end
##
# Sets the authority component for this URI.
#
# @param [String, #to_str] new_authority The new authority component.
def authority=(new_authority)
if new_authority
if !new_authority.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_authority.class} into String."
end
new_authority = new_authority.to_str
new_userinfo = new_authority[/^([^\[\]]*)@/, 1]
if new_userinfo
new_user = new_userinfo.strip[/^([^:]*):?/, 1]
new_password = new_userinfo.strip[/:(.*)$/, 1]
end
new_host = new_authority.sub(
/^([^\[\]]*)@/, EMPTY_STR
).sub(
/:([^:@\[\]]*?)$/, EMPTY_STR
)
new_port =
new_authority[/:([^:@\[\]]*?)$/, 1]
end
# Password assigned first to ensure validity in case of nil
self.password = defined?(new_password) ? new_password : nil
self.user = defined?(new_user) ? new_user : nil
self.host = defined?(new_host) ? new_host : nil
self.port = defined?(new_port) ? new_port : nil
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The origin for this URI, serialized to ASCII, as per
# RFC 6454, section 6.2.
#
# @return [String] The serialized origin.
def origin
if self.scheme && self.authority
if self.normalized_port
"#{self.normalized_scheme}://#{self.normalized_host}" +
":#{self.normalized_port}"
else
"#{self.normalized_scheme}://#{self.normalized_host}"
end
else
"null"
end
end
##
# Sets the origin for this URI, serialized to ASCII, as per
# RFC 6454, section 6.2. This assignment will reset the `userinfo`
# component.
#
# @param [String, #to_str] new_origin The new origin component.
def origin=(new_origin)
if new_origin
if !new_origin.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_origin.class} into String."
end
new_origin = new_origin.to_str
new_scheme = new_origin[/^([^:\/?#]+):\/\//, 1]
unless new_scheme
raise InvalidURIError, 'An origin cannot omit the scheme.'
end
new_host = new_origin[/:\/\/([^\/?#:]+)/, 1]
unless new_host
raise InvalidURIError, 'An origin cannot omit the host.'
end
new_port = new_origin[/:([^:@\[\]\/]*?)$/, 1]
end
self.scheme = defined?(new_scheme) ? new_scheme : nil
self.host = defined?(new_host) ? new_host : nil
self.port = defined?(new_port) ? new_port : nil
self.userinfo = nil
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_authority) if defined?(@normalized_authority)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
# Returns an array of known ip-based schemes. These schemes typically
# use a similar URI form:
# <code>//<user>:<password>@<host>:<port>/<url-path></code>
def self.ip_based_schemes
return self.port_mapping.keys
end
# Returns a hash of common IP-based schemes and their default port
# numbers. Adding new schemes to this hash, as necessary, will allow
# for better URI normalization.
def self.port_mapping
PORT_MAPPING
end
##
# The port component for this URI.
# This is the port number actually given in the URI. This does not
# infer port numbers from default values.
#
# @return [Integer] The port component.
def port
return defined?(@port) ? @port : nil
end
##
# The port component for this URI, normalized.
#
# @return [Integer] The port component, normalized.
def normalized_port
return nil unless self.port
return @normalized_port if defined?(@normalized_port)
@normalized_port ||= begin
if URI.port_mapping[self.normalized_scheme] == self.port
nil
else
self.port
end
end
end
##
# Sets the port component for this URI.
#
# @param [String, Integer, #to_s] new_port The new port component.
def port=(new_port)
if new_port != nil && new_port.respond_to?(:to_str)
new_port = Addressable::URI.unencode_component(new_port.to_str)
end
if new_port.respond_to?(:valid_encoding?) && !new_port.valid_encoding?
raise InvalidURIError, "Invalid encoding in port"
end
if new_port != nil && !(new_port.to_s =~ /^\d+$/)
raise InvalidURIError,
"Invalid port number: #{new_port.inspect}"
end
@port = new_port.to_s.to_i
@port = nil if @port == 0
# Reset dependent values
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_port) if defined?(@normalized_port)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The inferred port component for this URI.
# This method will normalize to the default port for the URI's scheme if
# the port isn't explicitly specified in the URI.
#
# @return [Integer] The inferred port component.
def inferred_port
if self.port.to_i == 0
self.default_port
else
self.port.to_i
end
end
##
# The default port for this URI's scheme.
# This method will always returns the default port for the URI's scheme
# regardless of the presence of an explicit port in the URI.
#
# @return [Integer] The default port.
def default_port
URI.port_mapping[self.scheme.strip.downcase] if self.scheme
end
##
# The combination of components that represent a site.
# Combines the scheme, user, password, host, and port components.
# Primarily useful for HTTP and HTTPS.
#
# For example, <code>"http://example.com/path?query"</code> would have a
# <code>site</code> value of <code>"http://example.com"</code>.
#
# @return [String] The components that identify a site.
def site
(self.scheme || self.authority) && @site ||= begin
site_string = "".dup
site_string << "#{self.scheme}:" if self.scheme != nil
site_string << "//#{self.authority}" if self.authority != nil
site_string
end
end
##
# The normalized combination of components that represent a site.
# Combines the scheme, user, password, host, and port components.
# Primarily useful for HTTP and HTTPS.
#
# For example, <code>"http://example.com/path?query"</code> would have a
# <code>site</code> value of <code>"http://example.com"</code>.
#
# @return [String] The normalized components that identify a site.
def normalized_site
return nil unless self.site
@normalized_site ||= begin
site_string = "".dup
if self.normalized_scheme != nil
site_string << "#{self.normalized_scheme}:"
end
if self.normalized_authority != nil
site_string << "//#{self.normalized_authority}"
end
site_string
end
# All normalized values should be UTF-8
@normalized_site.force_encoding(Encoding::UTF_8) if @normalized_site
@normalized_site
end
##
# Sets the site value for this URI.
#
# @param [String, #to_str] new_site The new site value.
def site=(new_site)
if new_site
if !new_site.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_site.class} into String."
end
new_site = new_site.to_str
# These two regular expressions derived from the primary parsing
# expression
self.scheme = new_site[/^(?:([^:\/?#]+):)?(?:\/\/(?:[^\/?#]*))?$/, 1]
self.authority = new_site[
/^(?:(?:[^:\/?#]+):)?(?:\/\/([^\/?#]*))?$/, 1
]
else
self.scheme = nil
self.authority = nil
end
end
##
# The path component for this URI.
#
# @return [String] The path component.
def path
return defined?(@path) ? @path : EMPTY_STR
end
NORMPATH = /^(?!\/)[^\/:]*:.*$/
##
# The path component for this URI, normalized.
#
# @return [String] The path component, normalized.
def normalized_path
@normalized_path ||= begin
path = self.path.to_s
if self.scheme == nil && path =~ NORMPATH
# Relative paths with colons in the first segment are ambiguous.
path = path.sub(":", "%2F")
end
# String#split(delimeter, -1) uses the more strict splitting behavior
# found by default in Python.
result = path.strip.split(SLASH, -1).map do |segment|
Addressable::URI.normalize_component(
segment,
Addressable::URI::CharacterClasses::PCHAR
)
end.join(SLASH)
result = URI.normalize_path(result)
if result.empty? &&
["http", "https", "ftp", "tftp"].include?(self.normalized_scheme)
result = SLASH.dup
end
result
end
# All normalized values should be UTF-8
@normalized_path.force_encoding(Encoding::UTF_8) if @normalized_path
@normalized_path
end
##
# Sets the path component for this URI.
#
# @param [String, #to_str] new_path The new path component.
def path=(new_path)
if new_path && !new_path.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_path.class} into String."
end
@path = (new_path || EMPTY_STR).to_str
if !@path.empty? && @path[0..0] != SLASH && host != nil
@path = "/#{@path}"
end
# Reset dependent values
remove_instance_variable(:@normalized_path) if defined?(@normalized_path)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The basename, if any, of the file in the path component.
#
# @return [String] The path's basename.
def basename
# Path cannot be nil
return File.basename(self.path).sub(/;[^\/]*$/, EMPTY_STR)
end
##
# The extname, if any, of the file in the path component.
# Empty string if there is no extension.
#
# @return [String] The path's extname.
def extname
return nil unless self.path
return File.extname(self.basename)
end
##
# The query component for this URI.
#
# @return [String] The query component.
def query
return defined?(@query) ? @query : nil
end
##
# The query component for this URI, normalized.
#
# @return [String] The query component, normalized.
def normalized_query(*flags)
return nil unless self.query
return @normalized_query if defined?(@normalized_query)
@normalized_query ||= begin
modified_query_class = Addressable::URI::CharacterClasses::QUERY.dup
# Make sure possible key-value pair delimiters are escaped.
modified_query_class.sub!("\\&", "").sub!("\\;", "")
pairs = (self.query || "").split("&", -1)
pairs.sort! if flags.include?(:sorted)
component = pairs.map do |pair|
Addressable::URI.normalize_component(pair, modified_query_class, "+")
end.join("&")
component == "" ? nil : component
end
# All normalized values should be UTF-8
@normalized_query.force_encoding(Encoding::UTF_8) if @normalized_query
@normalized_query
end
##
# Sets the query component for this URI.
#
# @param [String, #to_str] new_query The new query component.
def query=(new_query)
if new_query && !new_query.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_query.class} into String."
end
@query = new_query ? new_query.to_str : nil
# Reset dependent values
remove_instance_variable(:@normalized_query) if defined?(@normalized_query)
remove_composite_values
end
##
# Converts the query component to a Hash value.
#
# @param [Class] return_type The return type desired. Value must be either
# `Hash` or `Array`.
#
# @return [Hash, Array, nil] The query string parsed as a Hash or Array
# or nil if the query string is blank.
#
# @example
# Addressable::URI.parse("?one=1&two=2&three=3").query_values
# #=> {"one" => "1", "two" => "2", "three" => "3"}
# Addressable::URI.parse("?one=two&one=three").query_values(Array)
# #=> [["one", "two"], ["one", "three"]]
# Addressable::URI.parse("?one=two&one=three").query_values(Hash)
# #=> {"one" => "three"}
# Addressable::URI.parse("?").query_values
# #=> {}
# Addressable::URI.parse("").query_values
# #=> nil
def query_values(return_type=Hash)
empty_accumulator = Array == return_type ? [] : {}
if return_type != Hash && return_type != Array
raise ArgumentError, "Invalid return type. Must be Hash or Array."
end
return nil if self.query == nil
split_query = self.query.split("&").map do |pair|
pair.split("=", 2) if pair && !pair.empty?
end.compact
return split_query.inject(empty_accumulator.dup) do |accu, pair|
# I'd rather use key/value identifiers instead of array lookups,
# but in this case I really want to maintain the exact pair structure,
# so it's best to make all changes in-place.
pair[0] = URI.unencode_component(pair[0])
if pair[1].respond_to?(:to_str)
# I loathe the fact that I have to do this. Stupid HTML 4.01.
# Treating '+' as a space was just an unbelievably bad idea.
# There was nothing wrong with '%20'!
# If it ain't broke, don't fix it!
pair[1] = URI.unencode_component(pair[1].to_str.tr("+", " "))
end
if return_type == Hash
accu[pair[0]] = pair[1]
else
accu << pair
end
accu
end
end
##
# Sets the query component for this URI from a Hash object.
# An empty Hash or Array will result in an empty query string.
#
# @param [Hash, #to_hash, Array] new_query_values The new query values.
#
# @example
# uri.query_values = {:a => "a", :b => ["c", "d", "e"]}
# uri.query
# # => "a=a&b=c&b=d&b=e"
# uri.query_values = [['a', 'a'], ['b', 'c'], ['b', 'd'], ['b', 'e']]
# uri.query
# # => "a=a&b=c&b=d&b=e"
# uri.query_values = [['a', 'a'], ['b', ['c', 'd', 'e']]]
# uri.query
# # => "a=a&b=c&b=d&b=e"
# uri.query_values = [['flag'], ['key', 'value']]
# uri.query
# # => "flag&key=value"
def query_values=(new_query_values)
if new_query_values == nil
self.query = nil
return nil
end
if !new_query_values.is_a?(Array)
if !new_query_values.respond_to?(:to_hash)
raise TypeError,
"Can't convert #{new_query_values.class} into Hash."
end
new_query_values = new_query_values.to_hash
new_query_values = new_query_values.map do |key, value|
key = key.to_s if key.kind_of?(Symbol)
[key, value]
end
# Useful default for OAuth and caching.
# Only to be used for non-Array inputs. Arrays should preserve order.
new_query_values.sort!
end
# new_query_values have form [['key1', 'value1'], ['key2', 'value2']]
buffer = "".dup
new_query_values.each do |key, value|
encoded_key = URI.encode_component(
key, CharacterClasses::UNRESERVED
)
if value == nil
buffer << "#{encoded_key}&"
elsif value.kind_of?(Array)
value.each do |sub_value|
encoded_value = URI.encode_component(
sub_value, CharacterClasses::UNRESERVED
)
buffer << "#{encoded_key}=#{encoded_value}&"
end
else
encoded_value = URI.encode_component(
value, CharacterClasses::UNRESERVED
)
buffer << "#{encoded_key}=#{encoded_value}&"
end
end
self.query = buffer.chop
end
##
# The HTTP request URI for this URI. This is the path and the
# query string.
#
# @return [String] The request URI required for an HTTP request.
def request_uri
return nil if self.absolute? && self.scheme !~ /^https?$/i
return (
(!self.path.empty? ? self.path : SLASH) +
(self.query ? "?#{self.query}" : EMPTY_STR)
)
end
##
# Sets the HTTP request URI for this URI.
#
# @param [String, #to_str] new_request_uri The new HTTP request URI.
def request_uri=(new_request_uri)
if !new_request_uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_request_uri.class} into String."
end
if self.absolute? && self.scheme !~ /^https?$/i
raise InvalidURIError,
"Cannot set an HTTP request URI for a non-HTTP URI."
end
new_request_uri = new_request_uri.to_str
path_component = new_request_uri[/^([^\?]*)\?(?:.*)$/, 1]
query_component = new_request_uri[/^(?:[^\?]*)\?(.*)$/, 1]
path_component = path_component.to_s
path_component = (!path_component.empty? ? path_component : SLASH)
self.path = path_component
self.query = query_component
# Reset dependent values
remove_composite_values
end
##
# The fragment component for this URI.
#
# @return [String] The fragment component.
def fragment
return defined?(@fragment) ? @fragment : nil
end
##
# The fragment component for this URI, normalized.
#
# @return [String] The fragment component, normalized.
def normalized_fragment
return nil unless self.fragment
return @normalized_fragment if defined?(@normalized_fragment)
@normalized_fragment ||= begin
component = Addressable::URI.normalize_component(
self.fragment,
Addressable::URI::CharacterClasses::FRAGMENT
)
component == "" ? nil : component
end
# All normalized values should be UTF-8
if @normalized_fragment
@normalized_fragment.force_encoding(Encoding::UTF_8)
end
@normalized_fragment
end
##
# Sets the fragment component for this URI.
#
# @param [String, #to_str] new_fragment The new fragment component.
def fragment=(new_fragment)
if new_fragment && !new_fragment.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_fragment.class} into String."
end
@fragment = new_fragment ? new_fragment.to_str : nil
# Reset dependent values
remove_instance_variable(:@normalized_fragment) if defined?(@normalized_fragment)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# Determines if the scheme indicates an IP-based protocol.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the scheme indicates an IP-based protocol.
# <code>false</code> otherwise.
def ip_based?
if self.scheme
return URI.ip_based_schemes.include?(
self.scheme.strip.downcase)
end
return false
end
##
# Determines if the URI is relative.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URI is relative. <code>false</code>
# otherwise.
def relative?
return self.scheme.nil?
end
##
# Determines if the URI is absolute.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URI is absolute. <code>false</code>
# otherwise.
def absolute?
return !relative?
end
##
# Joins two URIs together.
#
# @param [String, Addressable::URI, #to_str] The URI to join with.
#
# @return [Addressable::URI] The joined URI.
def join(uri)
if !uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{uri.class} into String."
end
if !uri.kind_of?(URI)
# Otherwise, convert to a String, then parse.
uri = URI.parse(uri.to_str)
end
if uri.to_s.empty?
return self.dup
end
joined_scheme = nil
joined_user = nil
joined_password = nil
joined_host = nil
joined_port = nil
joined_path = nil
joined_query = nil
joined_fragment = nil
# Section 5.2.2 of RFC 3986
if uri.scheme != nil
joined_scheme = uri.scheme
joined_user = uri.user
joined_password = uri.password
joined_host = uri.host
joined_port = uri.port
joined_path = URI.normalize_path(uri.path)
joined_query = uri.query
else
if uri.authority != nil
joined_user = uri.user
joined_password = uri.password
joined_host = uri.host
joined_port = uri.port
joined_path = URI.normalize_path(uri.path)
joined_query = uri.query
else
if uri.path == nil || uri.path.empty?
joined_path = self.path
if uri.query != nil
joined_query = uri.query
else
joined_query = self.query
end
else
if uri.path[0..0] == SLASH
joined_path = URI.normalize_path(uri.path)
else
base_path = self.path.dup
base_path = EMPTY_STR if base_path == nil
base_path = URI.normalize_path(base_path)
# Section 5.2.3 of RFC 3986
#
# Removes the right-most path segment from the base path.
if base_path.include?(SLASH)
base_path.sub!(/\/[^\/]+$/, SLASH)
else
base_path = EMPTY_STR
end
# If the base path is empty and an authority segment has been
# defined, use a base path of SLASH
if base_path.empty? && self.authority != nil
base_path = SLASH
end
joined_path = URI.normalize_path(base_path + uri.path)
end
joined_query = uri.query
end
joined_user = self.user
joined_password = self.password
joined_host = self.host
joined_port = self.port
end
joined_scheme = self.scheme
end
joined_fragment = uri.fragment
return self.class.new(
:scheme => joined_scheme,
:user => joined_user,
:password => joined_password,
:host => joined_host,
:port => joined_port,
:path => joined_path,
:query => joined_query,
:fragment => joined_fragment
)
end
alias_method :+, :join
##
# Destructive form of <code>join</code>.
#
# @param [String, Addressable::URI, #to_str] The URI to join with.
#
# @return [Addressable::URI] The joined URI.
#
# @see Addressable::URI#join
def join!(uri)
replace_self(self.join(uri))
end
##
# Merges a URI with a <code>Hash</code> of components.
# This method has different behavior from <code>join</code>. Any
# components present in the <code>hash</code> parameter will override the
# original components. The path component is not treated specially.
#
# @param [Hash, Addressable::URI, #to_hash] The components to merge with.
#
# @return [Addressable::URI] The merged URI.
#
# @see Hash#merge
def merge(hash)
if !hash.respond_to?(:to_hash)
raise TypeError, "Can't convert #{hash.class} into Hash."
end
hash = hash.to_hash
if hash.has_key?(:authority)
if (hash.keys & [:userinfo, :user, :password, :host, :port]).any?
raise ArgumentError,
"Cannot specify both an authority and any of the components " +
"within the authority."
end
end
if hash.has_key?(:userinfo)
if (hash.keys & [:user, :password]).any?
raise ArgumentError,
"Cannot specify both a userinfo and either the user or password."
end
end
uri = self.class.new
uri.defer_validation do
# Bunch of crazy logic required because of the composite components
# like userinfo and authority.
uri.scheme =
hash.has_key?(:scheme) ? hash[:scheme] : self.scheme
if hash.has_key?(:authority)
uri.authority =
hash.has_key?(:authority) ? hash[:authority] : self.authority
end
if hash.has_key?(:userinfo)
uri.userinfo =
hash.has_key?(:userinfo) ? hash[:userinfo] : self.userinfo
end
if !hash.has_key?(:userinfo) && !hash.has_key?(:authority)
uri.user =
hash.has_key?(:user) ? hash[:user] : self.user
uri.password =
hash.has_key?(:password) ? hash[:password] : self.password
end
if !hash.has_key?(:authority)
uri.host =
hash.has_key?(:host) ? hash[:host] : self.host
uri.port =
hash.has_key?(:port) ? hash[:port] : self.port
end
uri.path =
hash.has_key?(:path) ? hash[:path] : self.path
uri.query =
hash.has_key?(:query) ? hash[:query] : self.query
uri.fragment =
hash.has_key?(:fragment) ? hash[:fragment] : self.fragment
end
return uri
end
##
# Destructive form of <code>merge</code>.
#
# @param [Hash, Addressable::URI, #to_hash] The components to merge with.
#
# @return [Addressable::URI] The merged URI.
#
# @see Addressable::URI#merge
def merge!(uri)
replace_self(self.merge(uri))
end
##
# Returns the shortest normalized relative form of this URI that uses the
# supplied URI as a base for resolution. Returns an absolute URI if
# necessary. This is effectively the opposite of <code>route_to</code>.
#
# @param [String, Addressable::URI, #to_str] uri The URI to route from.
#
# @return [Addressable::URI]
# The normalized relative URI that is equivalent to the original URI.
def route_from(uri)
uri = URI.parse(uri).normalize
normalized_self = self.normalize
if normalized_self.relative?
raise ArgumentError, "Expected absolute URI, got: #{self.to_s}"
end
if uri.relative?
raise ArgumentError, "Expected absolute URI, got: #{uri.to_s}"
end
if normalized_self == uri
return Addressable::URI.parse("##{normalized_self.fragment}")
end
components = normalized_self.to_hash
if normalized_self.scheme == uri.scheme
components[:scheme] = nil
if normalized_self.authority == uri.authority
components[:user] = nil
components[:password] = nil
components[:host] = nil
components[:port] = nil
if normalized_self.path == uri.path
components[:path] = nil
if normalized_self.query == uri.query
components[:query] = nil
end
else
if uri.path != SLASH and components[:path]
self_splitted_path = split_path(components[:path])
uri_splitted_path = split_path(uri.path)
self_dir = self_splitted_path.shift
uri_dir = uri_splitted_path.shift
while !self_splitted_path.empty? && !uri_splitted_path.empty? and self_dir == uri_dir
self_dir = self_splitted_path.shift
uri_dir = uri_splitted_path.shift
end
components[:path] = (uri_splitted_path.fill('..') + [self_dir] + self_splitted_path).join(SLASH)
end
end
end
end
# Avoid network-path references.
if components[:host] != nil
components[:scheme] = normalized_self.scheme
end
return Addressable::URI.new(
:scheme => components[:scheme],
:user => components[:user],
:password => components[:password],
:host => components[:host],
:port => components[:port],
:path => components[:path],
:query => components[:query],
:fragment => components[:fragment]
)
end
##
# Returns the shortest normalized relative form of the supplied URI that
# uses this URI as a base for resolution. Returns an absolute URI if
# necessary. This is effectively the opposite of <code>route_from</code>.
#
# @param [String, Addressable::URI, #to_str] uri The URI to route to.
#
# @return [Addressable::URI]
# The normalized relative URI that is equivalent to the supplied URI.
def route_to(uri)
return URI.parse(uri).route_from(self)
end
##
# Returns a normalized URI object.
#
# NOTE: This method does not attempt to fully conform to specifications.
# It exists largely to correct other people's failures to read the
# specifications, and also to deal with caching issues since several
# different URIs may represent the same resource and should not be
# cached multiple times.
#
# @return [Addressable::URI] The normalized URI.
def normalize
# This is a special exception for the frequently misused feed
# URI scheme.
if normalized_scheme == "feed"
if self.to_s =~ /^feed:\/*http:\/*/
return URI.parse(
self.to_s[/^feed:\/*(http:\/*.*)/, 1]
).normalize
end
end
return self.class.new(
:scheme => normalized_scheme,
:authority => normalized_authority,
:path => normalized_path,
:query => normalized_query,
:fragment => normalized_fragment
)
end
##
# Destructively normalizes this URI object.
#
# @return [Addressable::URI] The normalized URI.
#
# @see Addressable::URI#normalize
def normalize!
replace_self(self.normalize)
end
##
# Creates a URI suitable for display to users. If semantic attacks are
# likely, the application should try to detect these and warn the user.
# See <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>,
# section 7.6 for more information.
#
# @return [Addressable::URI] A URI suitable for display purposes.
def display_uri
display_uri = self.normalize
display_uri.host = ::Addressable::IDNA.to_unicode(display_uri.host)
return display_uri
end
##
# Returns <code>true</code> if the URI objects are equal. This method
# normalizes both URIs before doing the comparison, and allows comparison
# against <code>Strings</code>.
#
# @param [Object] uri The URI to compare.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URIs are equivalent, <code>false</code>
# otherwise.
def ===(uri)
if uri.respond_to?(:normalize)
uri_string = uri.normalize.to_s
else
begin
uri_string = ::Addressable::URI.parse(uri).normalize.to_s
rescue InvalidURIError, TypeError
return false
end
end
return self.normalize.to_s == uri_string
end
##
# Returns <code>true</code> if the URI objects are equal. This method
# normalizes both URIs before doing the comparison.
#
# @param [Object] uri The URI to compare.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URIs are equivalent, <code>false</code>
# otherwise.
def ==(uri)
return false unless uri.kind_of?(URI)
return self.normalize.to_s == uri.normalize.to_s
end
##
# Returns <code>true</code> if the URI objects are equal. This method
# does NOT normalize either URI before doing the comparison.
#
# @param [Object] uri The URI to compare.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URIs are equivalent, <code>false</code>
# otherwise.
def eql?(uri)
return false unless uri.kind_of?(URI)
return self.to_s == uri.to_s
end
##
# A hash value that will make a URI equivalent to its normalized
# form.
#
# @return [Integer] A hash of the URI.
def hash
@hash ||= self.to_s.hash * -1
end
##
# Clones the URI object.
#
# @return [Addressable::URI] The cloned URI.
def dup
duplicated_uri = self.class.new(
:scheme => self.scheme ? self.scheme.dup : nil,
:user => self.user ? self.user.dup : nil,
:password => self.password ? self.password.dup : nil,
:host => self.host ? self.host.dup : nil,
:port => self.port,
:path => self.path ? self.path.dup : nil,
:query => self.query ? self.query.dup : nil,
:fragment => self.fragment ? self.fragment.dup : nil
)
return duplicated_uri
end
##
# Omits components from a URI.
#
# @param [Symbol] *components The components to be omitted.
#
# @return [Addressable::URI] The URI with components omitted.
#
# @example
# uri = Addressable::URI.parse("http://example.com/path?query")
# #=> #<Addressable::URI:0xcc5e7a URI:http://example.com/path?query>
# uri.omit(:scheme, :authority)
# #=> #<Addressable::URI:0xcc4d86 URI:/path?query>
def omit(*components)
invalid_components = components - [
:scheme, :user, :password, :userinfo, :host, :port, :authority,
:path, :query, :fragment
]
unless invalid_components.empty?
raise ArgumentError,
"Invalid component names: #{invalid_components.inspect}."
end
duplicated_uri = self.dup
duplicated_uri.defer_validation do
components.each do |component|
duplicated_uri.send((component.to_s + "=").to_sym, nil)
end
duplicated_uri.user = duplicated_uri.normalized_user
end
duplicated_uri
end
##
# Destructive form of omit.
#
# @param [Symbol] *components The components to be omitted.
#
# @return [Addressable::URI] The URI with components omitted.
#
# @see Addressable::URI#omit
def omit!(*components)
replace_self(self.omit(*components))
end
##
# Determines if the URI is an empty string.
#
# @return [TrueClass, FalseClass]
# Returns <code>true</code> if empty, <code>false</code> otherwise.
def empty?
return self.to_s.empty?
end
##
# Converts the URI to a <code>String</code>.
#
# @return [String] The URI's <code>String</code> representation.
def to_s
if self.scheme == nil && self.path != nil && !self.path.empty? &&
self.path =~ NORMPATH
raise InvalidURIError,
"Cannot assemble URI string with ambiguous path: '#{self.path}'"
end
@uri_string ||= begin
uri_string = String.new
uri_string << "#{self.scheme}:" if self.scheme != nil
uri_string << "//#{self.authority}" if self.authority != nil
uri_string << self.path.to_s
uri_string << "?#{self.query}" if self.query != nil
uri_string << "##{self.fragment}" if self.fragment != nil
uri_string.force_encoding(Encoding::UTF_8)
uri_string
end
end
##
# URI's are glorified <code>Strings</code>. Allow implicit conversion.
alias_method :to_str, :to_s
##
# Returns a Hash of the URI components.
#
# @return [Hash] The URI as a <code>Hash</code> of components.
def to_hash
return {
:scheme => self.scheme,
:user => self.user,
:password => self.password,
:host => self.host,
:port => self.port,
:path => self.path,
:query => self.query,
:fragment => self.fragment
}
end
##
# Returns a <code>String</code> representation of the URI object's state.
#
# @return [String] The URI object's state, as a <code>String</code>.
def inspect
sprintf("#<%s:%#0x URI:%s>", URI.to_s, self.object_id, self.to_s)
end
##
# This method allows you to make several changes to a URI simultaneously,
# which separately would cause validation errors, but in conjunction,
# are valid. The URI will be revalidated as soon as the entire block has
# been executed.
#
# @param [Proc] block
# A set of operations to perform on a given URI.
def defer_validation(&block)
raise LocalJumpError, "No block given." unless block
@validation_deferred = true
block.call()
@validation_deferred = false
validate
return nil
end
protected
SELF_REF = '.'
PARENT = '..'
RULE_2A = /\/\.\/|\/\.$/
RULE_2B_2C = /\/([^\/]*)\/\.\.\/|\/([^\/]*)\/\.\.$/
RULE_2D = /^\.\.?\/?/
RULE_PREFIXED_PARENT = /^\/\.\.?\/|^(\/\.\.?)+\/?$/
##
# Resolves paths to their simplest form.
#
# @param [String] path The path to normalize.
#
# @return [String] The normalized path.
def self.normalize_path(path)
# Section 5.2.4 of RFC 3986
return nil if path.nil?
normalized_path = path.dup
begin
mod = nil
mod ||= normalized_path.gsub!(RULE_2A, SLASH)
pair = normalized_path.match(RULE_2B_2C)
parent, current = pair[1], pair[2] if pair
if pair && ((parent != SELF_REF && parent != PARENT) ||
(current != SELF_REF && current != PARENT))
mod ||= normalized_path.gsub!(
Regexp.new(
"/#{Regexp.escape(parent.to_s)}/\\.\\./|" +
"(/#{Regexp.escape(current.to_s)}/\\.\\.$)"
), SLASH
)
end
mod ||= normalized_path.gsub!(RULE_2D, EMPTY_STR)
# Non-standard, removes prefixed dotted segments from path.
mod ||= normalized_path.gsub!(RULE_PREFIXED_PARENT, SLASH)
end until mod.nil?
return normalized_path
end
##
# Ensures that the URI is valid.
def validate
return if !!@validation_deferred
if self.scheme != nil && self.ip_based? &&
(self.host == nil || self.host.empty?) &&
(self.path == nil || self.path.empty?)
raise InvalidURIError,
"Absolute URI missing hierarchical segment: '#{self.to_s}'"
end
if self.host == nil
if self.port != nil ||
self.user != nil ||
self.password != nil
raise InvalidURIError, "Hostname not supplied: '#{self.to_s}'"
end
end
if self.path != nil && !self.path.empty? && self.path[0..0] != SLASH &&
self.authority != nil
raise InvalidURIError,
"Cannot have a relative path with an authority set: '#{self.to_s}'"
end
if self.path != nil && !self.path.empty? &&
self.path[0..1] == SLASH + SLASH && self.authority == nil
raise InvalidURIError,
"Cannot have a path with two leading slashes " +
"without an authority set: '#{self.to_s}'"
end
unreserved = CharacterClasses::UNRESERVED
sub_delims = CharacterClasses::SUB_DELIMS
if !self.host.nil? && (self.host =~ /[<>{}\/\\\?\#\@"[[:space:]]]/ ||
(self.host[/^\[(.*)\]$/, 1] != nil && self.host[/^\[(.*)\]$/, 1] !~
Regexp.new("^[#{unreserved}#{sub_delims}:]*$")))
raise InvalidURIError, "Invalid character in host: '#{self.host.to_s}'"
end
return nil
end
##
# Replaces the internal state of self with the specified URI's state.
# Used in destructive operations to avoid massive code repetition.
#
# @param [Addressable::URI] uri The URI to replace <code>self</code> with.
#
# @return [Addressable::URI] <code>self</code>.
def replace_self(uri)
# Reset dependent values
instance_variables.each do |var|
if instance_variable_defined?(var) && var != :@validation_deferred
remove_instance_variable(var)
end
end
@scheme = uri.scheme
@user = uri.user
@password = uri.password
@host = uri.host
@port = uri.port
@path = uri.path
@query = uri.query
@fragment = uri.fragment
return self
end
##
# Splits path string with "/" (slash).
# It is considered that there is empty string after last slash when
# path ends with slash.
#
# @param [String] path The path to split.
#
# @return [Array<String>] An array of parts of path.
def split_path(path)
splitted = path.split(SLASH)
splitted << EMPTY_STR if path.end_with? SLASH
splitted
end
##
# Resets composite values for the entire URI
#
# @api private
def remove_composite_values
remove_instance_variable(:@uri_string) if defined?(@uri_string)
remove_instance_variable(:@hash) if defined?(@hash)
end
end
end
use double-quoted string
# frozen_string_literal: true
# encoding:utf-8
#--
# Copyright (C) Bob Aman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#++
require "addressable/version"
require "addressable/idna"
require "public_suffix"
##
# Addressable is a library for processing links and URIs.
module Addressable
##
# This is an implementation of a URI parser based on
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>,
# <a href="http://www.ietf.org/rfc/rfc3987.txt">RFC 3987</a>.
class URI
##
# Raised if something other than a uri is supplied.
class InvalidURIError < StandardError
end
##
# Container for the character classes specified in
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>.
module CharacterClasses
ALPHA = "a-zA-Z"
DIGIT = "0-9"
GEN_DELIMS = "\\:\\/\\?\\#\\[\\]\\@"
SUB_DELIMS = "\\!\\$\\&\\'\\(\\)\\*\\+\\,\\;\\="
RESERVED = GEN_DELIMS + SUB_DELIMS
UNRESERVED = ALPHA + DIGIT + "\\-\\.\\_\\~"
PCHAR = UNRESERVED + SUB_DELIMS + "\\:\\@"
SCHEME = ALPHA + DIGIT + "\\-\\+\\."
HOST = UNRESERVED + SUB_DELIMS + "\\[\\:\\]"
AUTHORITY = PCHAR
PATH = PCHAR + "\\/"
QUERY = PCHAR + "\\/\\?"
FRAGMENT = PCHAR + "\\/\\?"
end
SLASH = '/'
EMPTY_STR = ''
URIREGEX = /^(([^:\/?#]+):)?(\/\/([^\/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?$/
PORT_MAPPING = {
"http" => 80,
"https" => 443,
"ftp" => 21,
"tftp" => 69,
"sftp" => 22,
"ssh" => 22,
"svn+ssh" => 22,
"telnet" => 23,
"nntp" => 119,
"gopher" => 70,
"wais" => 210,
"ldap" => 389,
"prospero" => 1525
}
##
# Returns a URI object based on the parsed string.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI string to parse.
# No parsing is performed if the object is already an
# <code>Addressable::URI</code>.
#
# @return [Addressable::URI] The parsed URI.
def self.parse(uri)
# If we were given nil, return nil.
return nil unless uri
# If a URI object is passed, just return itself.
return uri.dup if uri.kind_of?(self)
# If a URI object of the Ruby standard library variety is passed,
# convert it to a string, then parse the string.
# We do the check this way because we don't want to accidentally
# cause a missing constant exception to be thrown.
if uri.class.name =~ /^URI\b/
uri = uri.to_s
end
# Otherwise, convert to a String
begin
uri = uri.to_str
rescue TypeError, NoMethodError
raise TypeError, "Can't convert #{uri.class} into String."
end if not uri.is_a? String
# This Regexp supplied as an example in RFC 3986, and it works great.
scan = uri.scan(URIREGEX)
fragments = scan[0]
scheme = fragments[1]
authority = fragments[3]
path = fragments[4]
query = fragments[6]
fragment = fragments[8]
user = nil
password = nil
host = nil
port = nil
if authority != nil
# The Regexp above doesn't split apart the authority.
userinfo = authority[/^([^\[\]]*)@/, 1]
if userinfo != nil
user = userinfo.strip[/^([^:]*):?/, 1]
password = userinfo.strip[/:(.*)$/, 1]
end
host = authority.sub(
/^([^\[\]]*)@/, EMPTY_STR
).sub(
/:([^:@\[\]]*?)$/, EMPTY_STR
)
port = authority[/:([^:@\[\]]*?)$/, 1]
end
if port == EMPTY_STR
port = nil
end
return new(
:scheme => scheme,
:user => user,
:password => password,
:host => host,
:port => port,
:path => path,
:query => query,
:fragment => fragment
)
end
##
# Converts an input to a URI. The input does not have to be a valid
# URI — the method will use heuristics to guess what URI was intended.
# This is not standards-compliant, merely user-friendly.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI string to parse.
# No parsing is performed if the object is already an
# <code>Addressable::URI</code>.
# @param [Hash] hints
# A <code>Hash</code> of hints to the heuristic parser.
# Defaults to <code>{:scheme => "http"}</code>.
#
# @return [Addressable::URI] The parsed URI.
def self.heuristic_parse(uri, hints={})
# If we were given nil, return nil.
return nil unless uri
# If a URI object is passed, just return itself.
return uri.dup if uri.kind_of?(self)
# If a URI object of the Ruby standard library variety is passed,
# convert it to a string, then parse the string.
# We do the check this way because we don't want to accidentally
# cause a missing constant exception to be thrown.
if uri.class.name =~ /^URI\b/
uri = uri.to_s
end
if !uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{uri.class} into String."
end
# Otherwise, convert to a String
uri = uri.to_str.dup.strip
hints = {
:scheme => "http"
}.merge(hints)
case uri
when /^http:\//i
uri.sub!(/^http:\/+/i, "http://")
when /^https:\//i
uri.sub!(/^https:\/+/i, "https://")
when /^feed:\/+http:\//i
uri.sub!(/^feed:\/+http:\/+/i, "feed:http://")
when /^feed:\//i
uri.sub!(/^feed:\/+/i, "feed://")
when %r[^file:/{4}]i
uri.sub!(%r[^file:/+]i, "file:////")
when %r[^file://localhost/]i
uri.sub!(%r[^file://localhost/+]i, "file:///")
when %r[^file:/+]i
uri.sub!(%r[^file:/+]i, "file:///")
when /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/
uri.sub!(/^/, hints[:scheme] + "://")
when /\A\d+\..*:\d+\z/
uri = "#{hints[:scheme]}://#{uri}"
end
match = uri.match(URIREGEX)
fragments = match.captures
authority = fragments[3]
if authority && authority.length > 0
new_authority = authority.tr("\\", "/").gsub(" ", "%20")
# NOTE: We want offset 4, not 3!
offset = match.offset(4)
uri = uri.dup
uri[offset[0]...offset[1]] = new_authority
end
parsed = self.parse(uri)
if parsed.scheme =~ /^[^\/?#\.]+\.[^\/?#]+$/
parsed = self.parse(hints[:scheme] + "://" + uri)
end
if parsed.path.include?(".")
new_host = parsed.path[/^([^\/]+\.[^\/]*)/, 1]
if new_host
parsed.defer_validation do
new_path = parsed.path.sub(
Regexp.new("^" + Regexp.escape(new_host)), EMPTY_STR)
parsed.host = new_host
parsed.path = new_path
parsed.scheme = hints[:scheme] unless parsed.scheme
end
end
end
return parsed
end
##
# Converts a path to a file scheme URI. If the path supplied is
# relative, it will be returned as a relative URI. If the path supplied
# is actually a non-file URI, it will parse the URI as if it had been
# parsed with <code>Addressable::URI.parse</code>. Handles all of the
# various Microsoft-specific formats for specifying paths.
#
# @param [String, Addressable::URI, #to_str] path
# Typically a <code>String</code> path to a file or directory, but
# will return a sensible return value if an absolute URI is supplied
# instead.
#
# @return [Addressable::URI]
# The parsed file scheme URI or the original URI if some other URI
# scheme was provided.
#
# @example
# base = Addressable::URI.convert_path("/absolute/path/")
# uri = Addressable::URI.convert_path("relative/path")
# (base + uri).to_s
# #=> "file:///absolute/path/relative/path"
#
# Addressable::URI.convert_path(
# "c:\\windows\\My Documents 100%20\\foo.txt"
# ).to_s
# #=> "file:///c:/windows/My%20Documents%20100%20/foo.txt"
#
# Addressable::URI.convert_path("http://example.com/").to_s
# #=> "http://example.com/"
def self.convert_path(path)
# If we were given nil, return nil.
return nil unless path
# If a URI object is passed, just return itself.
return path if path.kind_of?(self)
if !path.respond_to?(:to_str)
raise TypeError, "Can't convert #{path.class} into String."
end
# Otherwise, convert to a String
path = path.to_str.strip
path.sub!(/^file:\/?\/?/, EMPTY_STR) if path =~ /^file:\/?\/?/
path = SLASH + path if path =~ /^([a-zA-Z])[\|:]/
uri = self.parse(path)
if uri.scheme == nil
# Adjust windows-style uris
uri.path.sub!(/^\/?([a-zA-Z])[\|:][\\\/]/) do
"/#{$1.downcase}:/"
end
uri.path.tr!("\\", SLASH)
if File.exist?(uri.path) &&
File.stat(uri.path).directory?
uri.path.sub!(/\/$/, EMPTY_STR)
uri.path = uri.path + '/'
end
# If the path is absolute, set the scheme and host.
if uri.path.start_with?(SLASH)
uri.scheme = "file"
uri.host = EMPTY_STR
end
uri.normalize!
end
return uri
end
##
# Joins several URIs together.
#
# @param [String, Addressable::URI, #to_str] *uris
# The URIs to join.
#
# @return [Addressable::URI] The joined URI.
#
# @example
# base = "http://example.com/"
# uri = Addressable::URI.parse("relative/path")
# Addressable::URI.join(base, uri)
# #=> #<Addressable::URI:0xcab390 URI:http://example.com/relative/path>
def self.join(*uris)
uri_objects = uris.collect do |uri|
if !uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{uri.class} into String."
end
uri.kind_of?(self) ? uri : self.parse(uri.to_str)
end
result = uri_objects.shift.dup
for uri in uri_objects
result.join!(uri)
end
return result
end
##
# Percent encodes a URI component.
#
# @param [String, #to_str] component The URI component to encode.
#
# @param [String, Regexp] character_class
# The characters which are not percent encoded. If a <code>String</code>
# is passed, the <code>String</code> must be formatted as a regular
# expression character class. (Do not include the surrounding square
# brackets.) For example, <code>"b-zB-Z0-9"</code> would cause
# everything but the letters 'b' through 'z' and the numbers '0' through
# '9' to be percent encoded. If a <code>Regexp</code> is passed, the
# value <code>/[^b-zB-Z0-9]/</code> would have the same effect. A set of
# useful <code>String</code> values may be found in the
# <code>Addressable::URI::CharacterClasses</code> module. The default
# value is the reserved plus unreserved character classes specified in
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>.
#
# @param [Regexp] upcase_encoded
# A string of characters that may already be percent encoded, and whose
# encodings should be upcased. This allows normalization of percent
# encodings for characters not included in the
# <code>character_class</code>.
#
# @return [String] The encoded component.
#
# @example
# Addressable::URI.encode_component("simple/example", "b-zB-Z0-9")
# => "simple%2Fex%61mple"
# Addressable::URI.encode_component("simple/example", /[^b-zB-Z0-9]/)
# => "simple%2Fex%61mple"
# Addressable::URI.encode_component(
# "simple/example", Addressable::URI::CharacterClasses::UNRESERVED
# )
# => "simple%2Fexample"
def self.encode_component(component, character_class=
CharacterClasses::RESERVED + CharacterClasses::UNRESERVED,
upcase_encoded='')
return nil if component.nil?
begin
if component.kind_of?(Symbol) ||
component.kind_of?(Numeric) ||
component.kind_of?(TrueClass) ||
component.kind_of?(FalseClass)
component = component.to_s
else
component = component.to_str
end
rescue TypeError, NoMethodError
raise TypeError, "Can't convert #{component.class} into String."
end if !component.is_a? String
if ![String, Regexp].include?(character_class.class)
raise TypeError,
"Expected String or Regexp, got #{character_class.inspect}"
end
if character_class.kind_of?(String)
character_class = /[^#{character_class}]/
end
# We can't perform regexps on invalid UTF sequences, but
# here we need to, so switch to ASCII.
component = component.dup
component.force_encoding(Encoding::ASCII_8BIT)
# Avoiding gsub! because there are edge cases with frozen strings
component = component.gsub(character_class) do |sequence|
(sequence.unpack('C*').map { |c| "%" + ("%02x" % c).upcase }).join
end
if upcase_encoded.length > 0
component = component.gsub(/%(#{upcase_encoded.chars.map do |char|
char.unpack('C*').map { |c| '%02x' % c }.join
end.join('|')})/i) { |s| s.upcase }
end
return component
end
class << self
alias_method :encode_component, :encode_component
end
##
# Unencodes any percent encoded characters within a URI component.
# This method may be used for unencoding either components or full URIs,
# however, it is recommended to use the <code>unencode_component</code>
# alias when unencoding components.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI or component to unencode.
#
# @param [Class] return_type
# The type of object to return.
# This value may only be set to <code>String</code> or
# <code>Addressable::URI</code>. All other values are invalid. Defaults
# to <code>String</code>.
#
# @param [String] leave_encoded
# A string of characters to leave encoded. If a percent encoded character
# in this list is encountered then it will remain percent encoded.
#
# @return [String, Addressable::URI]
# The unencoded component or URI.
# The return type is determined by the <code>return_type</code>
# parameter.
def self.unencode(uri, return_type=String, leave_encoded='')
return nil if uri.nil?
begin
uri = uri.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{uri.class} into String."
end if !uri.is_a? String
if ![String, ::Addressable::URI].include?(return_type)
raise TypeError,
"Expected Class (String or Addressable::URI), " +
"got #{return_type.inspect}"
end
uri = uri.dup
# Seriously, only use UTF-8. I'm really not kidding!
uri.force_encoding("utf-8")
leave_encoded = leave_encoded.dup.force_encoding("utf-8")
result = uri.gsub(/%[0-9a-f]{2}/iu) do |sequence|
c = sequence[1..3].to_i(16).chr
c.force_encoding("utf-8")
leave_encoded.include?(c) ? sequence : c
end
result.force_encoding("utf-8")
if return_type == String
return result
elsif return_type == ::Addressable::URI
return ::Addressable::URI.parse(result)
end
end
class << self
alias_method :unescape, :unencode
alias_method :unencode_component, :unencode
alias_method :unescape_component, :unencode
end
##
# Normalizes the encoding of a URI component.
#
# @param [String, #to_str] component The URI component to encode.
#
# @param [String, Regexp] character_class
# The characters which are not percent encoded. If a <code>String</code>
# is passed, the <code>String</code> must be formatted as a regular
# expression character class. (Do not include the surrounding square
# brackets.) For example, <code>"b-zB-Z0-9"</code> would cause
# everything but the letters 'b' through 'z' and the numbers '0'
# through '9' to be percent encoded. If a <code>Regexp</code> is passed,
# the value <code>/[^b-zB-Z0-9]/</code> would have the same effect. A
# set of useful <code>String</code> values may be found in the
# <code>Addressable::URI::CharacterClasses</code> module. The default
# value is the reserved plus unreserved character classes specified in
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>.
#
# @param [String] leave_encoded
# When <code>character_class</code> is a <code>String</code> then
# <code>leave_encoded</code> is a string of characters that should remain
# percent encoded while normalizing the component; if they appear percent
# encoded in the original component, then they will be upcased ("%2f"
# normalized to "%2F") but otherwise left alone.
#
# @return [String] The normalized component.
#
# @example
# Addressable::URI.normalize_component("simpl%65/%65xampl%65", "b-zB-Z")
# => "simple%2Fex%61mple"
# Addressable::URI.normalize_component(
# "simpl%65/%65xampl%65", /[^b-zB-Z]/
# )
# => "simple%2Fex%61mple"
# Addressable::URI.normalize_component(
# "simpl%65/%65xampl%65",
# Addressable::URI::CharacterClasses::UNRESERVED
# )
# => "simple%2Fexample"
# Addressable::URI.normalize_component(
# "one%20two%2fthree%26four",
# "0-9a-zA-Z &/",
# "/"
# )
# => "one two%2Fthree&four"
def self.normalize_component(component, character_class=
CharacterClasses::RESERVED + CharacterClasses::UNRESERVED,
leave_encoded='')
return nil if component.nil?
begin
component = component.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{component.class} into String."
end if !component.is_a? String
if ![String, Regexp].include?(character_class.class)
raise TypeError,
"Expected String or Regexp, got #{character_class.inspect}"
end
if character_class.kind_of?(String)
leave_re = if leave_encoded.length > 0
character_class = "#{character_class}%" unless character_class.include?('%')
"|%(?!#{leave_encoded.chars.map do |char|
seq = char.unpack('C*').map { |c| '%02x' % c }.join
[seq.upcase, seq.downcase]
end.flatten.join('|')})"
end
character_class = /[^#{character_class}]#{leave_re}/
end
# We can't perform regexps on invalid UTF sequences, but
# here we need to, so switch to ASCII.
component = component.dup
component.force_encoding(Encoding::ASCII_8BIT)
unencoded = self.unencode_component(component, String, leave_encoded)
begin
encoded = self.encode_component(
Addressable::IDNA.unicode_normalize_kc(unencoded),
character_class,
leave_encoded
)
rescue ArgumentError
encoded = self.encode_component(unencoded)
end
encoded.force_encoding(Encoding::UTF_8)
return encoded
end
##
# Percent encodes any special characters in the URI.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI to encode.
#
# @param [Class] return_type
# The type of object to return.
# This value may only be set to <code>String</code> or
# <code>Addressable::URI</code>. All other values are invalid. Defaults
# to <code>String</code>.
#
# @return [String, Addressable::URI]
# The encoded URI.
# The return type is determined by the <code>return_type</code>
# parameter.
def self.encode(uri, return_type=String)
return nil if uri.nil?
begin
uri = uri.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{uri.class} into String."
end if !uri.is_a? String
if ![String, ::Addressable::URI].include?(return_type)
raise TypeError,
"Expected Class (String or Addressable::URI), " +
"got #{return_type.inspect}"
end
uri_object = uri.kind_of?(self) ? uri : self.parse(uri)
encoded_uri = Addressable::URI.new(
:scheme => self.encode_component(uri_object.scheme,
Addressable::URI::CharacterClasses::SCHEME),
:authority => self.encode_component(uri_object.authority,
Addressable::URI::CharacterClasses::AUTHORITY),
:path => self.encode_component(uri_object.path,
Addressable::URI::CharacterClasses::PATH),
:query => self.encode_component(uri_object.query,
Addressable::URI::CharacterClasses::QUERY),
:fragment => self.encode_component(uri_object.fragment,
Addressable::URI::CharacterClasses::FRAGMENT)
)
if return_type == String
return encoded_uri.to_s
elsif return_type == ::Addressable::URI
return encoded_uri
end
end
class << self
alias_method :escape, :encode
end
##
# Normalizes the encoding of a URI. Characters within a hostname are
# not percent encoded to allow for internationalized domain names.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI to encode.
#
# @param [Class] return_type
# The type of object to return.
# This value may only be set to <code>String</code> or
# <code>Addressable::URI</code>. All other values are invalid. Defaults
# to <code>String</code>.
#
# @return [String, Addressable::URI]
# The encoded URI.
# The return type is determined by the <code>return_type</code>
# parameter.
def self.normalized_encode(uri, return_type=String)
begin
uri = uri.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{uri.class} into String."
end if !uri.is_a? String
if ![String, ::Addressable::URI].include?(return_type)
raise TypeError,
"Expected Class (String or Addressable::URI), " +
"got #{return_type.inspect}"
end
uri_object = uri.kind_of?(self) ? uri : self.parse(uri)
components = {
:scheme => self.unencode_component(uri_object.scheme),
:user => self.unencode_component(uri_object.user),
:password => self.unencode_component(uri_object.password),
:host => self.unencode_component(uri_object.host),
:port => (uri_object.port.nil? ? nil : uri_object.port.to_s),
:path => self.unencode_component(uri_object.path),
:query => self.unencode_component(uri_object.query),
:fragment => self.unencode_component(uri_object.fragment)
}
components.each do |key, value|
if value != nil
begin
components[key] =
Addressable::IDNA.unicode_normalize_kc(value.to_str)
rescue ArgumentError
# Likely a malformed UTF-8 character, skip unicode normalization
components[key] = value.to_str
end
end
end
encoded_uri = Addressable::URI.new(
:scheme => self.encode_component(components[:scheme],
Addressable::URI::CharacterClasses::SCHEME),
:user => self.encode_component(components[:user],
Addressable::URI::CharacterClasses::UNRESERVED),
:password => self.encode_component(components[:password],
Addressable::URI::CharacterClasses::UNRESERVED),
:host => components[:host],
:port => components[:port],
:path => self.encode_component(components[:path],
Addressable::URI::CharacterClasses::PATH),
:query => self.encode_component(components[:query],
Addressable::URI::CharacterClasses::QUERY),
:fragment => self.encode_component(components[:fragment],
Addressable::URI::CharacterClasses::FRAGMENT)
)
if return_type == String
return encoded_uri.to_s
elsif return_type == ::Addressable::URI
return encoded_uri
end
end
##
# Encodes a set of key/value pairs according to the rules for the
# <code>application/x-www-form-urlencoded</code> MIME type.
#
# @param [#to_hash, #to_ary] form_values
# The form values to encode.
#
# @param [TrueClass, FalseClass] sort
# Sort the key/value pairs prior to encoding.
# Defaults to <code>false</code>.
#
# @return [String]
# The encoded value.
def self.form_encode(form_values, sort=false)
if form_values.respond_to?(:to_hash)
form_values = form_values.to_hash.to_a
elsif form_values.respond_to?(:to_ary)
form_values = form_values.to_ary
else
raise TypeError, "Can't convert #{form_values.class} into Array."
end
form_values = form_values.inject([]) do |accu, (key, value)|
if value.kind_of?(Array)
value.each do |v|
accu << [key.to_s, v.to_s]
end
else
accu << [key.to_s, value.to_s]
end
accu
end
if sort
# Useful for OAuth and optimizing caching systems
form_values = form_values.sort
end
escaped_form_values = form_values.map do |(key, value)|
# Line breaks are CRLF pairs
[
self.encode_component(
key.gsub(/(\r\n|\n|\r)/, "\r\n"),
CharacterClasses::UNRESERVED
).gsub("%20", "+"),
self.encode_component(
value.gsub(/(\r\n|\n|\r)/, "\r\n"),
CharacterClasses::UNRESERVED
).gsub("%20", "+")
]
end
return escaped_form_values.map do |(key, value)|
"#{key}=#{value}"
end.join("&")
end
##
# Decodes a <code>String</code> according to the rules for the
# <code>application/x-www-form-urlencoded</code> MIME type.
#
# @param [String, #to_str] encoded_value
# The form values to decode.
#
# @return [Array]
# The decoded values.
# This is not a <code>Hash</code> because of the possibility for
# duplicate keys.
def self.form_unencode(encoded_value)
if !encoded_value.respond_to?(:to_str)
raise TypeError, "Can't convert #{encoded_value.class} into String."
end
encoded_value = encoded_value.to_str
split_values = encoded_value.split("&").map do |pair|
pair.split("=", 2)
end
return split_values.map do |(key, value)|
[
key ? self.unencode_component(
key.gsub("+", "%20")).gsub(/(\r\n|\n|\r)/, "\n") : nil,
value ? (self.unencode_component(
value.gsub("+", "%20")).gsub(/(\r\n|\n|\r)/, "\n")) : nil
]
end
end
##
# Creates a new uri object from component parts.
#
# @option [String, #to_str] scheme The scheme component.
# @option [String, #to_str] user The user component.
# @option [String, #to_str] password The password component.
# @option [String, #to_str] userinfo
# The userinfo component. If this is supplied, the user and password
# components must be omitted.
# @option [String, #to_str] host The host component.
# @option [String, #to_str] port The port component.
# @option [String, #to_str] authority
# The authority component. If this is supplied, the user, password,
# userinfo, host, and port components must be omitted.
# @option [String, #to_str] path The path component.
# @option [String, #to_str] query The query component.
# @option [String, #to_str] fragment The fragment component.
#
# @return [Addressable::URI] The constructed URI object.
def initialize(options={})
if options.has_key?(:authority)
if (options.keys & [:userinfo, :user, :password, :host, :port]).any?
raise ArgumentError,
"Cannot specify both an authority and any of the components " +
"within the authority."
end
end
if options.has_key?(:userinfo)
if (options.keys & [:user, :password]).any?
raise ArgumentError,
"Cannot specify both a userinfo and either the user or password."
end
end
self.defer_validation do
# Bunch of crazy logic required because of the composite components
# like userinfo and authority.
self.scheme = options[:scheme] if options[:scheme]
self.user = options[:user] if options[:user]
self.password = options[:password] if options[:password]
self.userinfo = options[:userinfo] if options[:userinfo]
self.host = options[:host] if options[:host]
self.port = options[:port] if options[:port]
self.authority = options[:authority] if options[:authority]
self.path = options[:path] if options[:path]
self.query = options[:query] if options[:query]
self.query_values = options[:query_values] if options[:query_values]
self.fragment = options[:fragment] if options[:fragment]
end
self.to_s
end
##
# Freeze URI, initializing instance variables.
#
# @return [Addressable::URI] The frozen URI object.
def freeze
self.normalized_scheme
self.normalized_user
self.normalized_password
self.normalized_userinfo
self.normalized_host
self.normalized_port
self.normalized_authority
self.normalized_site
self.normalized_path
self.normalized_query
self.normalized_fragment
self.hash
super
end
##
# The scheme component for this URI.
#
# @return [String] The scheme component.
def scheme
return defined?(@scheme) ? @scheme : nil
end
##
# The scheme component for this URI, normalized.
#
# @return [String] The scheme component, normalized.
def normalized_scheme
return nil unless self.scheme
@normalized_scheme ||= begin
if self.scheme =~ /^\s*ssh\+svn\s*$/i
"svn+ssh".dup
else
Addressable::URI.normalize_component(
self.scheme.strip.downcase,
Addressable::URI::CharacterClasses::SCHEME
)
end
end
# All normalized values should be UTF-8
@normalized_scheme.force_encoding(Encoding::UTF_8) if @normalized_scheme
@normalized_scheme
end
##
# Sets the scheme component for this URI.
#
# @param [String, #to_str] new_scheme The new scheme component.
def scheme=(new_scheme)
if new_scheme && !new_scheme.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_scheme.class} into String."
elsif new_scheme
new_scheme = new_scheme.to_str
end
if new_scheme && new_scheme !~ /\A[a-z][a-z0-9\.\+\-]*\z/i
raise InvalidURIError, "Invalid scheme format: #{new_scheme}"
end
@scheme = new_scheme
@scheme = nil if @scheme.to_s.strip.empty?
# Reset dependent values
remove_instance_variable(:@normalized_scheme) if defined?(@normalized_scheme)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The user component for this URI.
#
# @return [String] The user component.
def user
return defined?(@user) ? @user : nil
end
##
# The user component for this URI, normalized.
#
# @return [String] The user component, normalized.
def normalized_user
return nil unless self.user
return @normalized_user if defined?(@normalized_user)
@normalized_user ||= begin
if normalized_scheme =~ /https?/ && self.user.strip.empty? &&
(!self.password || self.password.strip.empty?)
nil
else
Addressable::URI.normalize_component(
self.user.strip,
Addressable::URI::CharacterClasses::UNRESERVED
)
end
end
# All normalized values should be UTF-8
@normalized_user.force_encoding(Encoding::UTF_8) if @normalized_user
@normalized_user
end
##
# Sets the user component for this URI.
#
# @param [String, #to_str] new_user The new user component.
def user=(new_user)
if new_user && !new_user.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_user.class} into String."
end
@user = new_user ? new_user.to_str : nil
# You can't have a nil user with a non-nil password
if password != nil
@user = EMPTY_STR if @user.nil?
end
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_user) if defined?(@normalized_user)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The password component for this URI.
#
# @return [String] The password component.
def password
return defined?(@password) ? @password : nil
end
##
# The password component for this URI, normalized.
#
# @return [String] The password component, normalized.
def normalized_password
return nil unless self.password
return @normalized_password if defined?(@normalized_password)
@normalized_password ||= begin
if self.normalized_scheme =~ /https?/ && self.password.strip.empty? &&
(!self.user || self.user.strip.empty?)
nil
else
Addressable::URI.normalize_component(
self.password.strip,
Addressable::URI::CharacterClasses::UNRESERVED
)
end
end
# All normalized values should be UTF-8
if @normalized_password
@normalized_password.force_encoding(Encoding::UTF_8)
end
@normalized_password
end
##
# Sets the password component for this URI.
#
# @param [String, #to_str] new_password The new password component.
def password=(new_password)
if new_password && !new_password.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_password.class} into String."
end
@password = new_password ? new_password.to_str : nil
# You can't have a nil user with a non-nil password
@password ||= nil
@user ||= nil
if @password != nil
@user = EMPTY_STR if @user.nil?
end
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_password) if defined?(@normalized_password)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The userinfo component for this URI.
# Combines the user and password components.
#
# @return [String] The userinfo component.
def userinfo
current_user = self.user
current_password = self.password
(current_user || current_password) && @userinfo ||= begin
if current_user && current_password
"#{current_user}:#{current_password}"
elsif current_user && !current_password
"#{current_user}"
end
end
end
##
# The userinfo component for this URI, normalized.
#
# @return [String] The userinfo component, normalized.
def normalized_userinfo
return nil unless self.userinfo
return @normalized_userinfo if defined?(@normalized_userinfo)
@normalized_userinfo ||= begin
current_user = self.normalized_user
current_password = self.normalized_password
if !current_user && !current_password
nil
elsif current_user && current_password
"#{current_user}:#{current_password}".dup
elsif current_user && !current_password
"#{current_user}".dup
end
end
# All normalized values should be UTF-8
if @normalized_userinfo
@normalized_userinfo.force_encoding(Encoding::UTF_8)
end
@normalized_userinfo
end
##
# Sets the userinfo component for this URI.
#
# @param [String, #to_str] new_userinfo The new userinfo component.
def userinfo=(new_userinfo)
if new_userinfo && !new_userinfo.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_userinfo.class} into String."
end
new_user, new_password = if new_userinfo
[
new_userinfo.to_str.strip[/^(.*):/, 1],
new_userinfo.to_str.strip[/:(.*)$/, 1]
]
else
[nil, nil]
end
# Password assigned first to ensure validity in case of nil
self.password = new_password
self.user = new_user
# Reset dependent values
remove_instance_variable(:@authority) if defined?(@authority)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The host component for this URI.
#
# @return [String] The host component.
def host
return defined?(@host) ? @host : nil
end
##
# The host component for this URI, normalized.
#
# @return [String] The host component, normalized.
def normalized_host
return nil unless self.host
@normalized_host ||= begin
if !self.host.strip.empty?
result = ::Addressable::IDNA.to_ascii(
URI.unencode_component(self.host.strip.downcase)
)
if result =~ /[^\.]\.$/
# Single trailing dots are unnecessary.
result = result[0...-1]
end
result = Addressable::URI.normalize_component(
result,
CharacterClasses::HOST)
result
else
EMPTY_STR.dup
end
end
# All normalized values should be UTF-8
@normalized_host.force_encoding(Encoding::UTF_8) if @normalized_host
@normalized_host
end
##
# Sets the host component for this URI.
#
# @param [String, #to_str] new_host The new host component.
def host=(new_host)
if new_host && !new_host.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_host.class} into String."
end
@host = new_host ? new_host.to_str : nil
# Reset dependent values
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_host) if defined?(@normalized_host)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# This method is same as URI::Generic#host except
# brackets for IPv6 (and 'IPvFuture') addresses are removed.
#
# @see Addressable::URI#host
#
# @return [String] The hostname for this URI.
def hostname
v = self.host
/\A\[(.*)\]\z/ =~ v ? $1 : v
end
##
# This method is same as URI::Generic#host= except
# the argument can be a bare IPv6 address (or 'IPvFuture').
#
# @see Addressable::URI#host=
#
# @param [String, #to_str] new_hostname The new hostname for this URI.
def hostname=(new_hostname)
if new_hostname &&
(new_hostname.respond_to?(:ipv4?) || new_hostname.respond_to?(:ipv6?))
new_hostname = new_hostname.to_s
elsif new_hostname && !new_hostname.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_hostname.class} into String."
end
v = new_hostname ? new_hostname.to_str : nil
v = "[#{v}]" if /\A\[.*\]\z/ !~ v && /:/ =~ v
self.host = v
end
##
# Returns the top-level domain for this host.
#
# @example
# Addressable::URI.parse("www.example.co.uk").tld # => "co.uk"
def tld
PublicSuffix.parse(self.host, ignore_private: true).tld
end
##
# Sets the top-level domain for this URI.
#
# @param [String, #to_str] new_tld The new top-level domain.
def tld=(new_tld)
replaced_tld = domain.sub(/#{tld}\z/, new_tld)
self.host = PublicSuffix::Domain.new(replaced_tld).to_s
end
##
# Returns the public suffix domain for this host.
#
# @example
# Addressable::URI.parse("www.example.co.uk").domain # => "example.co.uk"
def domain
PublicSuffix.domain(self.host, ignore_private: true)
end
##
# The authority component for this URI.
# Combines the user, password, host, and port components.
#
# @return [String] The authority component.
def authority
self.host && @authority ||= begin
authority = String.new
if self.userinfo != nil
authority << "#{self.userinfo}@"
end
authority << self.host
if self.port != nil
authority << ":#{self.port}"
end
authority
end
end
##
# The authority component for this URI, normalized.
#
# @return [String] The authority component, normalized.
def normalized_authority
return nil unless self.authority
@normalized_authority ||= begin
authority = String.new
if self.normalized_userinfo != nil
authority << "#{self.normalized_userinfo}@"
end
authority << self.normalized_host
if self.normalized_port != nil
authority << ":#{self.normalized_port}"
end
authority
end
# All normalized values should be UTF-8
if @normalized_authority
@normalized_authority.force_encoding(Encoding::UTF_8)
end
@normalized_authority
end
##
# Sets the authority component for this URI.
#
# @param [String, #to_str] new_authority The new authority component.
def authority=(new_authority)
if new_authority
if !new_authority.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_authority.class} into String."
end
new_authority = new_authority.to_str
new_userinfo = new_authority[/^([^\[\]]*)@/, 1]
if new_userinfo
new_user = new_userinfo.strip[/^([^:]*):?/, 1]
new_password = new_userinfo.strip[/:(.*)$/, 1]
end
new_host = new_authority.sub(
/^([^\[\]]*)@/, EMPTY_STR
).sub(
/:([^:@\[\]]*?)$/, EMPTY_STR
)
new_port =
new_authority[/:([^:@\[\]]*?)$/, 1]
end
# Password assigned first to ensure validity in case of nil
self.password = defined?(new_password) ? new_password : nil
self.user = defined?(new_user) ? new_user : nil
self.host = defined?(new_host) ? new_host : nil
self.port = defined?(new_port) ? new_port : nil
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The origin for this URI, serialized to ASCII, as per
# RFC 6454, section 6.2.
#
# @return [String] The serialized origin.
def origin
if self.scheme && self.authority
if self.normalized_port
"#{self.normalized_scheme}://#{self.normalized_host}" +
":#{self.normalized_port}"
else
"#{self.normalized_scheme}://#{self.normalized_host}"
end
else
"null"
end
end
##
# Sets the origin for this URI, serialized to ASCII, as per
# RFC 6454, section 6.2. This assignment will reset the `userinfo`
# component.
#
# @param [String, #to_str] new_origin The new origin component.
def origin=(new_origin)
if new_origin
if !new_origin.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_origin.class} into String."
end
new_origin = new_origin.to_str
new_scheme = new_origin[/^([^:\/?#]+):\/\//, 1]
unless new_scheme
raise InvalidURIError, 'An origin cannot omit the scheme.'
end
new_host = new_origin[/:\/\/([^\/?#:]+)/, 1]
unless new_host
raise InvalidURIError, 'An origin cannot omit the host.'
end
new_port = new_origin[/:([^:@\[\]\/]*?)$/, 1]
end
self.scheme = defined?(new_scheme) ? new_scheme : nil
self.host = defined?(new_host) ? new_host : nil
self.port = defined?(new_port) ? new_port : nil
self.userinfo = nil
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_authority) if defined?(@normalized_authority)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
# Returns an array of known ip-based schemes. These schemes typically
# use a similar URI form:
# <code>//<user>:<password>@<host>:<port>/<url-path></code>
def self.ip_based_schemes
return self.port_mapping.keys
end
# Returns a hash of common IP-based schemes and their default port
# numbers. Adding new schemes to this hash, as necessary, will allow
# for better URI normalization.
def self.port_mapping
PORT_MAPPING
end
##
# The port component for this URI.
# This is the port number actually given in the URI. This does not
# infer port numbers from default values.
#
# @return [Integer] The port component.
def port
return defined?(@port) ? @port : nil
end
##
# The port component for this URI, normalized.
#
# @return [Integer] The port component, normalized.
def normalized_port
return nil unless self.port
return @normalized_port if defined?(@normalized_port)
@normalized_port ||= begin
if URI.port_mapping[self.normalized_scheme] == self.port
nil
else
self.port
end
end
end
##
# Sets the port component for this URI.
#
# @param [String, Integer, #to_s] new_port The new port component.
def port=(new_port)
if new_port != nil && new_port.respond_to?(:to_str)
new_port = Addressable::URI.unencode_component(new_port.to_str)
end
if new_port.respond_to?(:valid_encoding?) && !new_port.valid_encoding?
raise InvalidURIError, "Invalid encoding in port"
end
if new_port != nil && !(new_port.to_s =~ /^\d+$/)
raise InvalidURIError,
"Invalid port number: #{new_port.inspect}"
end
@port = new_port.to_s.to_i
@port = nil if @port == 0
# Reset dependent values
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_port) if defined?(@normalized_port)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The inferred port component for this URI.
# This method will normalize to the default port for the URI's scheme if
# the port isn't explicitly specified in the URI.
#
# @return [Integer] The inferred port component.
def inferred_port
if self.port.to_i == 0
self.default_port
else
self.port.to_i
end
end
##
# The default port for this URI's scheme.
# This method will always returns the default port for the URI's scheme
# regardless of the presence of an explicit port in the URI.
#
# @return [Integer] The default port.
def default_port
URI.port_mapping[self.scheme.strip.downcase] if self.scheme
end
##
# The combination of components that represent a site.
# Combines the scheme, user, password, host, and port components.
# Primarily useful for HTTP and HTTPS.
#
# For example, <code>"http://example.com/path?query"</code> would have a
# <code>site</code> value of <code>"http://example.com"</code>.
#
# @return [String] The components that identify a site.
def site
(self.scheme || self.authority) && @site ||= begin
site_string = "".dup
site_string << "#{self.scheme}:" if self.scheme != nil
site_string << "//#{self.authority}" if self.authority != nil
site_string
end
end
##
# The normalized combination of components that represent a site.
# Combines the scheme, user, password, host, and port components.
# Primarily useful for HTTP and HTTPS.
#
# For example, <code>"http://example.com/path?query"</code> would have a
# <code>site</code> value of <code>"http://example.com"</code>.
#
# @return [String] The normalized components that identify a site.
def normalized_site
return nil unless self.site
@normalized_site ||= begin
site_string = "".dup
if self.normalized_scheme != nil
site_string << "#{self.normalized_scheme}:"
end
if self.normalized_authority != nil
site_string << "//#{self.normalized_authority}"
end
site_string
end
# All normalized values should be UTF-8
@normalized_site.force_encoding(Encoding::UTF_8) if @normalized_site
@normalized_site
end
##
# Sets the site value for this URI.
#
# @param [String, #to_str] new_site The new site value.
def site=(new_site)
if new_site
if !new_site.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_site.class} into String."
end
new_site = new_site.to_str
# These two regular expressions derived from the primary parsing
# expression
self.scheme = new_site[/^(?:([^:\/?#]+):)?(?:\/\/(?:[^\/?#]*))?$/, 1]
self.authority = new_site[
/^(?:(?:[^:\/?#]+):)?(?:\/\/([^\/?#]*))?$/, 1
]
else
self.scheme = nil
self.authority = nil
end
end
##
# The path component for this URI.
#
# @return [String] The path component.
def path
return defined?(@path) ? @path : EMPTY_STR
end
NORMPATH = /^(?!\/)[^\/:]*:.*$/
##
# The path component for this URI, normalized.
#
# @return [String] The path component, normalized.
def normalized_path
@normalized_path ||= begin
path = self.path.to_s
if self.scheme == nil && path =~ NORMPATH
# Relative paths with colons in the first segment are ambiguous.
path = path.sub(":", "%2F")
end
# String#split(delimeter, -1) uses the more strict splitting behavior
# found by default in Python.
result = path.strip.split(SLASH, -1).map do |segment|
Addressable::URI.normalize_component(
segment,
Addressable::URI::CharacterClasses::PCHAR
)
end.join(SLASH)
result = URI.normalize_path(result)
if result.empty? &&
["http", "https", "ftp", "tftp"].include?(self.normalized_scheme)
result = SLASH.dup
end
result
end
# All normalized values should be UTF-8
@normalized_path.force_encoding(Encoding::UTF_8) if @normalized_path
@normalized_path
end
##
# Sets the path component for this URI.
#
# @param [String, #to_str] new_path The new path component.
def path=(new_path)
if new_path && !new_path.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_path.class} into String."
end
@path = (new_path || EMPTY_STR).to_str
if !@path.empty? && @path[0..0] != SLASH && host != nil
@path = "/#{@path}"
end
# Reset dependent values
remove_instance_variable(:@normalized_path) if defined?(@normalized_path)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The basename, if any, of the file in the path component.
#
# @return [String] The path's basename.
def basename
# Path cannot be nil
return File.basename(self.path).sub(/;[^\/]*$/, EMPTY_STR)
end
##
# The extname, if any, of the file in the path component.
# Empty string if there is no extension.
#
# @return [String] The path's extname.
def extname
return nil unless self.path
return File.extname(self.basename)
end
##
# The query component for this URI.
#
# @return [String] The query component.
def query
return defined?(@query) ? @query : nil
end
##
# The query component for this URI, normalized.
#
# @return [String] The query component, normalized.
def normalized_query(*flags)
return nil unless self.query
return @normalized_query if defined?(@normalized_query)
@normalized_query ||= begin
modified_query_class = Addressable::URI::CharacterClasses::QUERY.dup
# Make sure possible key-value pair delimiters are escaped.
modified_query_class.sub!("\\&", "").sub!("\\;", "")
pairs = (self.query || "").split("&", -1)
pairs.sort! if flags.include?(:sorted)
component = pairs.map do |pair|
Addressable::URI.normalize_component(pair, modified_query_class, "+")
end.join("&")
component == "" ? nil : component
end
# All normalized values should be UTF-8
@normalized_query.force_encoding(Encoding::UTF_8) if @normalized_query
@normalized_query
end
##
# Sets the query component for this URI.
#
# @param [String, #to_str] new_query The new query component.
def query=(new_query)
if new_query && !new_query.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_query.class} into String."
end
@query = new_query ? new_query.to_str : nil
# Reset dependent values
remove_instance_variable(:@normalized_query) if defined?(@normalized_query)
remove_composite_values
end
##
# Converts the query component to a Hash value.
#
# @param [Class] return_type The return type desired. Value must be either
# `Hash` or `Array`.
#
# @return [Hash, Array, nil] The query string parsed as a Hash or Array
# or nil if the query string is blank.
#
# @example
# Addressable::URI.parse("?one=1&two=2&three=3").query_values
# #=> {"one" => "1", "two" => "2", "three" => "3"}
# Addressable::URI.parse("?one=two&one=three").query_values(Array)
# #=> [["one", "two"], ["one", "three"]]
# Addressable::URI.parse("?one=two&one=three").query_values(Hash)
# #=> {"one" => "three"}
# Addressable::URI.parse("?").query_values
# #=> {}
# Addressable::URI.parse("").query_values
# #=> nil
def query_values(return_type=Hash)
empty_accumulator = Array == return_type ? [] : {}
if return_type != Hash && return_type != Array
raise ArgumentError, "Invalid return type. Must be Hash or Array."
end
return nil if self.query == nil
split_query = self.query.split("&").map do |pair|
pair.split("=", 2) if pair && !pair.empty?
end.compact
return split_query.inject(empty_accumulator.dup) do |accu, pair|
# I'd rather use key/value identifiers instead of array lookups,
# but in this case I really want to maintain the exact pair structure,
# so it's best to make all changes in-place.
pair[0] = URI.unencode_component(pair[0])
if pair[1].respond_to?(:to_str)
# I loathe the fact that I have to do this. Stupid HTML 4.01.
# Treating '+' as a space was just an unbelievably bad idea.
# There was nothing wrong with '%20'!
# If it ain't broke, don't fix it!
pair[1] = URI.unencode_component(pair[1].to_str.tr("+", " "))
end
if return_type == Hash
accu[pair[0]] = pair[1]
else
accu << pair
end
accu
end
end
##
# Sets the query component for this URI from a Hash object.
# An empty Hash or Array will result in an empty query string.
#
# @param [Hash, #to_hash, Array] new_query_values The new query values.
#
# @example
# uri.query_values = {:a => "a", :b => ["c", "d", "e"]}
# uri.query
# # => "a=a&b=c&b=d&b=e"
# uri.query_values = [['a', 'a'], ['b', 'c'], ['b', 'd'], ['b', 'e']]
# uri.query
# # => "a=a&b=c&b=d&b=e"
# uri.query_values = [['a', 'a'], ['b', ['c', 'd', 'e']]]
# uri.query
# # => "a=a&b=c&b=d&b=e"
# uri.query_values = [['flag'], ['key', 'value']]
# uri.query
# # => "flag&key=value"
def query_values=(new_query_values)
if new_query_values == nil
self.query = nil
return nil
end
if !new_query_values.is_a?(Array)
if !new_query_values.respond_to?(:to_hash)
raise TypeError,
"Can't convert #{new_query_values.class} into Hash."
end
new_query_values = new_query_values.to_hash
new_query_values = new_query_values.map do |key, value|
key = key.to_s if key.kind_of?(Symbol)
[key, value]
end
# Useful default for OAuth and caching.
# Only to be used for non-Array inputs. Arrays should preserve order.
new_query_values.sort!
end
# new_query_values have form [['key1', 'value1'], ['key2', 'value2']]
buffer = "".dup
new_query_values.each do |key, value|
encoded_key = URI.encode_component(
key, CharacterClasses::UNRESERVED
)
if value == nil
buffer << "#{encoded_key}&"
elsif value.kind_of?(Array)
value.each do |sub_value|
encoded_value = URI.encode_component(
sub_value, CharacterClasses::UNRESERVED
)
buffer << "#{encoded_key}=#{encoded_value}&"
end
else
encoded_value = URI.encode_component(
value, CharacterClasses::UNRESERVED
)
buffer << "#{encoded_key}=#{encoded_value}&"
end
end
self.query = buffer.chop
end
##
# The HTTP request URI for this URI. This is the path and the
# query string.
#
# @return [String] The request URI required for an HTTP request.
def request_uri
return nil if self.absolute? && self.scheme !~ /^https?$/i
return (
(!self.path.empty? ? self.path : SLASH) +
(self.query ? "?#{self.query}" : EMPTY_STR)
)
end
##
# Sets the HTTP request URI for this URI.
#
# @param [String, #to_str] new_request_uri The new HTTP request URI.
def request_uri=(new_request_uri)
if !new_request_uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_request_uri.class} into String."
end
if self.absolute? && self.scheme !~ /^https?$/i
raise InvalidURIError,
"Cannot set an HTTP request URI for a non-HTTP URI."
end
new_request_uri = new_request_uri.to_str
path_component = new_request_uri[/^([^\?]*)\?(?:.*)$/, 1]
query_component = new_request_uri[/^(?:[^\?]*)\?(.*)$/, 1]
path_component = path_component.to_s
path_component = (!path_component.empty? ? path_component : SLASH)
self.path = path_component
self.query = query_component
# Reset dependent values
remove_composite_values
end
##
# The fragment component for this URI.
#
# @return [String] The fragment component.
def fragment
return defined?(@fragment) ? @fragment : nil
end
##
# The fragment component for this URI, normalized.
#
# @return [String] The fragment component, normalized.
def normalized_fragment
return nil unless self.fragment
return @normalized_fragment if defined?(@normalized_fragment)
@normalized_fragment ||= begin
component = Addressable::URI.normalize_component(
self.fragment,
Addressable::URI::CharacterClasses::FRAGMENT
)
component == "" ? nil : component
end
# All normalized values should be UTF-8
if @normalized_fragment
@normalized_fragment.force_encoding(Encoding::UTF_8)
end
@normalized_fragment
end
##
# Sets the fragment component for this URI.
#
# @param [String, #to_str] new_fragment The new fragment component.
def fragment=(new_fragment)
if new_fragment && !new_fragment.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_fragment.class} into String."
end
@fragment = new_fragment ? new_fragment.to_str : nil
# Reset dependent values
remove_instance_variable(:@normalized_fragment) if defined?(@normalized_fragment)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# Determines if the scheme indicates an IP-based protocol.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the scheme indicates an IP-based protocol.
# <code>false</code> otherwise.
def ip_based?
if self.scheme
return URI.ip_based_schemes.include?(
self.scheme.strip.downcase)
end
return false
end
##
# Determines if the URI is relative.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URI is relative. <code>false</code>
# otherwise.
def relative?
return self.scheme.nil?
end
##
# Determines if the URI is absolute.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URI is absolute. <code>false</code>
# otherwise.
def absolute?
return !relative?
end
##
# Joins two URIs together.
#
# @param [String, Addressable::URI, #to_str] The URI to join with.
#
# @return [Addressable::URI] The joined URI.
def join(uri)
if !uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{uri.class} into String."
end
if !uri.kind_of?(URI)
# Otherwise, convert to a String, then parse.
uri = URI.parse(uri.to_str)
end
if uri.to_s.empty?
return self.dup
end
joined_scheme = nil
joined_user = nil
joined_password = nil
joined_host = nil
joined_port = nil
joined_path = nil
joined_query = nil
joined_fragment = nil
# Section 5.2.2 of RFC 3986
if uri.scheme != nil
joined_scheme = uri.scheme
joined_user = uri.user
joined_password = uri.password
joined_host = uri.host
joined_port = uri.port
joined_path = URI.normalize_path(uri.path)
joined_query = uri.query
else
if uri.authority != nil
joined_user = uri.user
joined_password = uri.password
joined_host = uri.host
joined_port = uri.port
joined_path = URI.normalize_path(uri.path)
joined_query = uri.query
else
if uri.path == nil || uri.path.empty?
joined_path = self.path
if uri.query != nil
joined_query = uri.query
else
joined_query = self.query
end
else
if uri.path[0..0] == SLASH
joined_path = URI.normalize_path(uri.path)
else
base_path = self.path.dup
base_path = EMPTY_STR if base_path == nil
base_path = URI.normalize_path(base_path)
# Section 5.2.3 of RFC 3986
#
# Removes the right-most path segment from the base path.
if base_path.include?(SLASH)
base_path.sub!(/\/[^\/]+$/, SLASH)
else
base_path = EMPTY_STR
end
# If the base path is empty and an authority segment has been
# defined, use a base path of SLASH
if base_path.empty? && self.authority != nil
base_path = SLASH
end
joined_path = URI.normalize_path(base_path + uri.path)
end
joined_query = uri.query
end
joined_user = self.user
joined_password = self.password
joined_host = self.host
joined_port = self.port
end
joined_scheme = self.scheme
end
joined_fragment = uri.fragment
return self.class.new(
:scheme => joined_scheme,
:user => joined_user,
:password => joined_password,
:host => joined_host,
:port => joined_port,
:path => joined_path,
:query => joined_query,
:fragment => joined_fragment
)
end
alias_method :+, :join
##
# Destructive form of <code>join</code>.
#
# @param [String, Addressable::URI, #to_str] The URI to join with.
#
# @return [Addressable::URI] The joined URI.
#
# @see Addressable::URI#join
def join!(uri)
replace_self(self.join(uri))
end
##
# Merges a URI with a <code>Hash</code> of components.
# This method has different behavior from <code>join</code>. Any
# components present in the <code>hash</code> parameter will override the
# original components. The path component is not treated specially.
#
# @param [Hash, Addressable::URI, #to_hash] The components to merge with.
#
# @return [Addressable::URI] The merged URI.
#
# @see Hash#merge
def merge(hash)
if !hash.respond_to?(:to_hash)
raise TypeError, "Can't convert #{hash.class} into Hash."
end
hash = hash.to_hash
if hash.has_key?(:authority)
if (hash.keys & [:userinfo, :user, :password, :host, :port]).any?
raise ArgumentError,
"Cannot specify both an authority and any of the components " +
"within the authority."
end
end
if hash.has_key?(:userinfo)
if (hash.keys & [:user, :password]).any?
raise ArgumentError,
"Cannot specify both a userinfo and either the user or password."
end
end
uri = self.class.new
uri.defer_validation do
# Bunch of crazy logic required because of the composite components
# like userinfo and authority.
uri.scheme =
hash.has_key?(:scheme) ? hash[:scheme] : self.scheme
if hash.has_key?(:authority)
uri.authority =
hash.has_key?(:authority) ? hash[:authority] : self.authority
end
if hash.has_key?(:userinfo)
uri.userinfo =
hash.has_key?(:userinfo) ? hash[:userinfo] : self.userinfo
end
if !hash.has_key?(:userinfo) && !hash.has_key?(:authority)
uri.user =
hash.has_key?(:user) ? hash[:user] : self.user
uri.password =
hash.has_key?(:password) ? hash[:password] : self.password
end
if !hash.has_key?(:authority)
uri.host =
hash.has_key?(:host) ? hash[:host] : self.host
uri.port =
hash.has_key?(:port) ? hash[:port] : self.port
end
uri.path =
hash.has_key?(:path) ? hash[:path] : self.path
uri.query =
hash.has_key?(:query) ? hash[:query] : self.query
uri.fragment =
hash.has_key?(:fragment) ? hash[:fragment] : self.fragment
end
return uri
end
##
# Destructive form of <code>merge</code>.
#
# @param [Hash, Addressable::URI, #to_hash] The components to merge with.
#
# @return [Addressable::URI] The merged URI.
#
# @see Addressable::URI#merge
def merge!(uri)
replace_self(self.merge(uri))
end
##
# Returns the shortest normalized relative form of this URI that uses the
# supplied URI as a base for resolution. Returns an absolute URI if
# necessary. This is effectively the opposite of <code>route_to</code>.
#
# @param [String, Addressable::URI, #to_str] uri The URI to route from.
#
# @return [Addressable::URI]
# The normalized relative URI that is equivalent to the original URI.
def route_from(uri)
uri = URI.parse(uri).normalize
normalized_self = self.normalize
if normalized_self.relative?
raise ArgumentError, "Expected absolute URI, got: #{self.to_s}"
end
if uri.relative?
raise ArgumentError, "Expected absolute URI, got: #{uri.to_s}"
end
if normalized_self == uri
return Addressable::URI.parse("##{normalized_self.fragment}")
end
components = normalized_self.to_hash
if normalized_self.scheme == uri.scheme
components[:scheme] = nil
if normalized_self.authority == uri.authority
components[:user] = nil
components[:password] = nil
components[:host] = nil
components[:port] = nil
if normalized_self.path == uri.path
components[:path] = nil
if normalized_self.query == uri.query
components[:query] = nil
end
else
if uri.path != SLASH and components[:path]
self_splitted_path = split_path(components[:path])
uri_splitted_path = split_path(uri.path)
self_dir = self_splitted_path.shift
uri_dir = uri_splitted_path.shift
while !self_splitted_path.empty? && !uri_splitted_path.empty? and self_dir == uri_dir
self_dir = self_splitted_path.shift
uri_dir = uri_splitted_path.shift
end
components[:path] = (uri_splitted_path.fill('..') + [self_dir] + self_splitted_path).join(SLASH)
end
end
end
end
# Avoid network-path references.
if components[:host] != nil
components[:scheme] = normalized_self.scheme
end
return Addressable::URI.new(
:scheme => components[:scheme],
:user => components[:user],
:password => components[:password],
:host => components[:host],
:port => components[:port],
:path => components[:path],
:query => components[:query],
:fragment => components[:fragment]
)
end
##
# Returns the shortest normalized relative form of the supplied URI that
# uses this URI as a base for resolution. Returns an absolute URI if
# necessary. This is effectively the opposite of <code>route_from</code>.
#
# @param [String, Addressable::URI, #to_str] uri The URI to route to.
#
# @return [Addressable::URI]
# The normalized relative URI that is equivalent to the supplied URI.
def route_to(uri)
return URI.parse(uri).route_from(self)
end
##
# Returns a normalized URI object.
#
# NOTE: This method does not attempt to fully conform to specifications.
# It exists largely to correct other people's failures to read the
# specifications, and also to deal with caching issues since several
# different URIs may represent the same resource and should not be
# cached multiple times.
#
# @return [Addressable::URI] The normalized URI.
def normalize
# This is a special exception for the frequently misused feed
# URI scheme.
if normalized_scheme == "feed"
if self.to_s =~ /^feed:\/*http:\/*/
return URI.parse(
self.to_s[/^feed:\/*(http:\/*.*)/, 1]
).normalize
end
end
return self.class.new(
:scheme => normalized_scheme,
:authority => normalized_authority,
:path => normalized_path,
:query => normalized_query,
:fragment => normalized_fragment
)
end
##
# Destructively normalizes this URI object.
#
# @return [Addressable::URI] The normalized URI.
#
# @see Addressable::URI#normalize
def normalize!
replace_self(self.normalize)
end
##
# Creates a URI suitable for display to users. If semantic attacks are
# likely, the application should try to detect these and warn the user.
# See <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>,
# section 7.6 for more information.
#
# @return [Addressable::URI] A URI suitable for display purposes.
def display_uri
display_uri = self.normalize
display_uri.host = ::Addressable::IDNA.to_unicode(display_uri.host)
return display_uri
end
##
# Returns <code>true</code> if the URI objects are equal. This method
# normalizes both URIs before doing the comparison, and allows comparison
# against <code>Strings</code>.
#
# @param [Object] uri The URI to compare.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URIs are equivalent, <code>false</code>
# otherwise.
def ===(uri)
if uri.respond_to?(:normalize)
uri_string = uri.normalize.to_s
else
begin
uri_string = ::Addressable::URI.parse(uri).normalize.to_s
rescue InvalidURIError, TypeError
return false
end
end
return self.normalize.to_s == uri_string
end
##
# Returns <code>true</code> if the URI objects are equal. This method
# normalizes both URIs before doing the comparison.
#
# @param [Object] uri The URI to compare.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URIs are equivalent, <code>false</code>
# otherwise.
def ==(uri)
return false unless uri.kind_of?(URI)
return self.normalize.to_s == uri.normalize.to_s
end
##
# Returns <code>true</code> if the URI objects are equal. This method
# does NOT normalize either URI before doing the comparison.
#
# @param [Object] uri The URI to compare.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URIs are equivalent, <code>false</code>
# otherwise.
def eql?(uri)
return false unless uri.kind_of?(URI)
return self.to_s == uri.to_s
end
##
# A hash value that will make a URI equivalent to its normalized
# form.
#
# @return [Integer] A hash of the URI.
def hash
@hash ||= self.to_s.hash * -1
end
##
# Clones the URI object.
#
# @return [Addressable::URI] The cloned URI.
def dup
duplicated_uri = self.class.new(
:scheme => self.scheme ? self.scheme.dup : nil,
:user => self.user ? self.user.dup : nil,
:password => self.password ? self.password.dup : nil,
:host => self.host ? self.host.dup : nil,
:port => self.port,
:path => self.path ? self.path.dup : nil,
:query => self.query ? self.query.dup : nil,
:fragment => self.fragment ? self.fragment.dup : nil
)
return duplicated_uri
end
##
# Omits components from a URI.
#
# @param [Symbol] *components The components to be omitted.
#
# @return [Addressable::URI] The URI with components omitted.
#
# @example
# uri = Addressable::URI.parse("http://example.com/path?query")
# #=> #<Addressable::URI:0xcc5e7a URI:http://example.com/path?query>
# uri.omit(:scheme, :authority)
# #=> #<Addressable::URI:0xcc4d86 URI:/path?query>
def omit(*components)
invalid_components = components - [
:scheme, :user, :password, :userinfo, :host, :port, :authority,
:path, :query, :fragment
]
unless invalid_components.empty?
raise ArgumentError,
"Invalid component names: #{invalid_components.inspect}."
end
duplicated_uri = self.dup
duplicated_uri.defer_validation do
components.each do |component|
duplicated_uri.send((component.to_s + "=").to_sym, nil)
end
duplicated_uri.user = duplicated_uri.normalized_user
end
duplicated_uri
end
##
# Destructive form of omit.
#
# @param [Symbol] *components The components to be omitted.
#
# @return [Addressable::URI] The URI with components omitted.
#
# @see Addressable::URI#omit
def omit!(*components)
replace_self(self.omit(*components))
end
##
# Determines if the URI is an empty string.
#
# @return [TrueClass, FalseClass]
# Returns <code>true</code> if empty, <code>false</code> otherwise.
def empty?
return self.to_s.empty?
end
##
# Converts the URI to a <code>String</code>.
#
# @return [String] The URI's <code>String</code> representation.
def to_s
if self.scheme == nil && self.path != nil && !self.path.empty? &&
self.path =~ NORMPATH
raise InvalidURIError,
"Cannot assemble URI string with ambiguous path: '#{self.path}'"
end
@uri_string ||= begin
uri_string = String.new
uri_string << "#{self.scheme}:" if self.scheme != nil
uri_string << "//#{self.authority}" if self.authority != nil
uri_string << self.path.to_s
uri_string << "?#{self.query}" if self.query != nil
uri_string << "##{self.fragment}" if self.fragment != nil
uri_string.force_encoding(Encoding::UTF_8)
uri_string
end
end
##
# URI's are glorified <code>Strings</code>. Allow implicit conversion.
alias_method :to_str, :to_s
##
# Returns a Hash of the URI components.
#
# @return [Hash] The URI as a <code>Hash</code> of components.
def to_hash
return {
:scheme => self.scheme,
:user => self.user,
:password => self.password,
:host => self.host,
:port => self.port,
:path => self.path,
:query => self.query,
:fragment => self.fragment
}
end
##
# Returns a <code>String</code> representation of the URI object's state.
#
# @return [String] The URI object's state, as a <code>String</code>.
def inspect
sprintf("#<%s:%#0x URI:%s>", URI.to_s, self.object_id, self.to_s)
end
##
# This method allows you to make several changes to a URI simultaneously,
# which separately would cause validation errors, but in conjunction,
# are valid. The URI will be revalidated as soon as the entire block has
# been executed.
#
# @param [Proc] block
# A set of operations to perform on a given URI.
def defer_validation(&block)
raise LocalJumpError, "No block given." unless block
@validation_deferred = true
block.call()
@validation_deferred = false
validate
return nil
end
protected
SELF_REF = '.'
PARENT = '..'
RULE_2A = /\/\.\/|\/\.$/
RULE_2B_2C = /\/([^\/]*)\/\.\.\/|\/([^\/]*)\/\.\.$/
RULE_2D = /^\.\.?\/?/
RULE_PREFIXED_PARENT = /^\/\.\.?\/|^(\/\.\.?)+\/?$/
##
# Resolves paths to their simplest form.
#
# @param [String] path The path to normalize.
#
# @return [String] The normalized path.
def self.normalize_path(path)
# Section 5.2.4 of RFC 3986
return nil if path.nil?
normalized_path = path.dup
begin
mod = nil
mod ||= normalized_path.gsub!(RULE_2A, SLASH)
pair = normalized_path.match(RULE_2B_2C)
parent, current = pair[1], pair[2] if pair
if pair && ((parent != SELF_REF && parent != PARENT) ||
(current != SELF_REF && current != PARENT))
mod ||= normalized_path.gsub!(
Regexp.new(
"/#{Regexp.escape(parent.to_s)}/\\.\\./|" +
"(/#{Regexp.escape(current.to_s)}/\\.\\.$)"
), SLASH
)
end
mod ||= normalized_path.gsub!(RULE_2D, EMPTY_STR)
# Non-standard, removes prefixed dotted segments from path.
mod ||= normalized_path.gsub!(RULE_PREFIXED_PARENT, SLASH)
end until mod.nil?
return normalized_path
end
##
# Ensures that the URI is valid.
def validate
return if !!@validation_deferred
if self.scheme != nil && self.ip_based? &&
(self.host == nil || self.host.empty?) &&
(self.path == nil || self.path.empty?)
raise InvalidURIError,
"Absolute URI missing hierarchical segment: '#{self.to_s}'"
end
if self.host == nil
if self.port != nil ||
self.user != nil ||
self.password != nil
raise InvalidURIError, "Hostname not supplied: '#{self.to_s}'"
end
end
if self.path != nil && !self.path.empty? && self.path[0..0] != SLASH &&
self.authority != nil
raise InvalidURIError,
"Cannot have a relative path with an authority set: '#{self.to_s}'"
end
if self.path != nil && !self.path.empty? &&
self.path[0..1] == SLASH + SLASH && self.authority == nil
raise InvalidURIError,
"Cannot have a path with two leading slashes " +
"without an authority set: '#{self.to_s}'"
end
unreserved = CharacterClasses::UNRESERVED
sub_delims = CharacterClasses::SUB_DELIMS
if !self.host.nil? && (self.host =~ /[<>{}\/\\\?\#\@"[[:space:]]]/ ||
(self.host[/^\[(.*)\]$/, 1] != nil && self.host[/^\[(.*)\]$/, 1] !~
Regexp.new("^[#{unreserved}#{sub_delims}:]*$")))
raise InvalidURIError, "Invalid character in host: '#{self.host.to_s}'"
end
return nil
end
##
# Replaces the internal state of self with the specified URI's state.
# Used in destructive operations to avoid massive code repetition.
#
# @param [Addressable::URI] uri The URI to replace <code>self</code> with.
#
# @return [Addressable::URI] <code>self</code>.
def replace_self(uri)
# Reset dependent values
instance_variables.each do |var|
if instance_variable_defined?(var) && var != :@validation_deferred
remove_instance_variable(var)
end
end
@scheme = uri.scheme
@user = uri.user
@password = uri.password
@host = uri.host
@port = uri.port
@path = uri.path
@query = uri.query
@fragment = uri.fragment
return self
end
##
# Splits path string with "/" (slash).
# It is considered that there is empty string after last slash when
# path ends with slash.
#
# @param [String] path The path to split.
#
# @return [Array<String>] An array of parts of path.
def split_path(path)
splitted = path.split(SLASH)
splitted << EMPTY_STR if path.end_with? SLASH
splitted
end
##
# Resets composite values for the entire URI
#
# @api private
def remove_composite_values
remove_instance_variable(:@uri_string) if defined?(@uri_string)
remove_instance_variable(:@hash) if defined?(@hash)
end
end
end
|
# frozen_string_literal: true
# encoding:utf-8
#--
# Copyright (C) Bob Aman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#++
require "addressable/version"
require "addressable/idna"
require "public_suffix"
##
# Addressable is a library for processing links and URIs.
module Addressable
##
# This is an implementation of a URI parser based on
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>,
# <a href="http://www.ietf.org/rfc/rfc3987.txt">RFC 3987</a>.
class URI
##
# Raised if something other than a uri is supplied.
class InvalidURIError < StandardError
end
##
# Container for the character classes specified in
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>.
module CharacterClasses
ALPHA = "a-zA-Z"
DIGIT = "0-9"
GEN_DELIMS = "\\:\\/\\?\\#\\[\\]\\@"
SUB_DELIMS = "\\!\\$\\&\\'\\(\\)\\*\\+\\,\\;\\="
RESERVED = GEN_DELIMS + SUB_DELIMS
UNRESERVED = ALPHA + DIGIT + "\\-\\.\\_\\~"
PCHAR = UNRESERVED + SUB_DELIMS + "\\:\\@"
SCHEME = ALPHA + DIGIT + "\\-\\+\\."
HOST = UNRESERVED + SUB_DELIMS + "\\[\\:\\]"
AUTHORITY = PCHAR + "\\[\\:\\]"
PATH = PCHAR + "\\/"
QUERY = PCHAR + "\\/\\?"
FRAGMENT = PCHAR + "\\/\\?"
end
module NormalizeCharacterClasses
HOST = /[^#{CharacterClasses::HOST}]/
UNRESERVED = /[^#{CharacterClasses::UNRESERVED}]/
PCHAR = /[^#{CharacterClasses::PCHAR}]/
SCHEME = /[^#{CharacterClasses::SCHEME}]/
FRAGMENT = /[^#{CharacterClasses::FRAGMENT}]/
QUERY = %r{[^a-zA-Z0-9\-\.\_\~\!\$\'\(\)\*\+\,\=\:\@\/\?%]|%(?!2B|2b)}
end
SLASH = '/'
EMPTY_STR = ''
URIREGEX = /^(([^:\/?#]+):)?(\/\/([^\/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?$/
PORT_MAPPING = {
"http" => 80,
"https" => 443,
"ftp" => 21,
"tftp" => 69,
"sftp" => 22,
"ssh" => 22,
"svn+ssh" => 22,
"telnet" => 23,
"nntp" => 119,
"gopher" => 70,
"wais" => 210,
"ldap" => 389,
"prospero" => 1525
}.freeze
##
# Returns a URI object based on the parsed string.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI string to parse.
# No parsing is performed if the object is already an
# <code>Addressable::URI</code>.
#
# @return [Addressable::URI] The parsed URI.
def self.parse(uri)
# If we were given nil, return nil.
return nil unless uri
# If a URI object is passed, just return itself.
return uri.dup if uri.kind_of?(self)
# If a URI object of the Ruby standard library variety is passed,
# convert it to a string, then parse the string.
# We do the check this way because we don't want to accidentally
# cause a missing constant exception to be thrown.
if uri.class.name =~ /^URI\b/
uri = uri.to_s
end
# Otherwise, convert to a String
begin
uri = uri.to_str
rescue TypeError, NoMethodError
raise TypeError, "Can't convert #{uri.class} into String."
end if not uri.is_a? String
# This Regexp supplied as an example in RFC 3986, and it works great.
scan = uri.scan(URIREGEX)
fragments = scan[0]
scheme = fragments[1]
authority = fragments[3]
path = fragments[4]
query = fragments[6]
fragment = fragments[8]
user = nil
password = nil
host = nil
port = nil
if authority != nil
# The Regexp above doesn't split apart the authority.
userinfo = authority[/^([^\[\]]*)@/, 1]
if userinfo != nil
user = userinfo.strip[/^([^:]*):?/, 1]
password = userinfo.strip[/:(.*)$/, 1]
end
host = authority.sub(
/^([^\[\]]*)@/, EMPTY_STR
).sub(
/:([^:@\[\]]*?)$/, EMPTY_STR
)
port = authority[/:([^:@\[\]]*?)$/, 1]
end
if port == EMPTY_STR
port = nil
end
return new(
:scheme => scheme,
:user => user,
:password => password,
:host => host,
:port => port,
:path => path,
:query => query,
:fragment => fragment
)
end
##
# Converts an input to a URI. The input does not have to be a valid
# URI — the method will use heuristics to guess what URI was intended.
# This is not standards-compliant, merely user-friendly.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI string to parse.
# No parsing is performed if the object is already an
# <code>Addressable::URI</code>.
# @param [Hash] hints
# A <code>Hash</code> of hints to the heuristic parser.
# Defaults to <code>{:scheme => "http"}</code>.
#
# @return [Addressable::URI] The parsed URI.
def self.heuristic_parse(uri, hints={})
# If we were given nil, return nil.
return nil unless uri
# If a URI object is passed, just return itself.
return uri.dup if uri.kind_of?(self)
# If a URI object of the Ruby standard library variety is passed,
# convert it to a string, then parse the string.
# We do the check this way because we don't want to accidentally
# cause a missing constant exception to be thrown.
if uri.class.name =~ /^URI\b/
uri = uri.to_s
end
if !uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{uri.class} into String."
end
# Otherwise, convert to a String
uri = uri.to_str.dup.strip
hints = {
:scheme => "http"
}.merge(hints)
case uri
when /^http:\//i
uri.sub!(/^http:\/+/i, "http://")
when /^https:\//i
uri.sub!(/^https:\/+/i, "https://")
when /^feed:\/+http:\//i
uri.sub!(/^feed:\/+http:\/+/i, "feed:http://")
when /^feed:\//i
uri.sub!(/^feed:\/+/i, "feed://")
when %r[^file:/{4}]i
uri.sub!(%r[^file:/+]i, "file:////")
when %r[^file://localhost/]i
uri.sub!(%r[^file://localhost/+]i, "file:///")
when %r[^file:/+]i
uri.sub!(%r[^file:/+]i, "file:///")
when /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/
uri.sub!(/^/, hints[:scheme] + "://")
when /\A\d+\..*:\d+\z/
uri = "#{hints[:scheme]}://#{uri}"
end
match = uri.match(URIREGEX)
fragments = match.captures
authority = fragments[3]
if authority && authority.length > 0
new_authority = authority.tr("\\", "/").gsub(" ", "%20")
# NOTE: We want offset 4, not 3!
offset = match.offset(4)
uri = uri.dup
uri[offset[0]...offset[1]] = new_authority
end
parsed = self.parse(uri)
if parsed.scheme =~ /^[^\/?#\.]+\.[^\/?#]+$/
parsed = self.parse(hints[:scheme] + "://" + uri)
end
if parsed.path.include?(".")
if parsed.path[/\b@\b/]
parsed.scheme = "mailto" unless parsed.scheme
elsif new_host = parsed.path[/^([^\/]+\.[^\/]*)/, 1]
parsed.defer_validation do
new_path = parsed.path.sub(
Regexp.new("^" + Regexp.escape(new_host)), EMPTY_STR)
parsed.host = new_host
parsed.path = new_path
parsed.scheme = hints[:scheme] unless parsed.scheme
end
end
end
return parsed
end
##
# Converts a path to a file scheme URI. If the path supplied is
# relative, it will be returned as a relative URI. If the path supplied
# is actually a non-file URI, it will parse the URI as if it had been
# parsed with <code>Addressable::URI.parse</code>. Handles all of the
# various Microsoft-specific formats for specifying paths.
#
# @param [String, Addressable::URI, #to_str] path
# Typically a <code>String</code> path to a file or directory, but
# will return a sensible return value if an absolute URI is supplied
# instead.
#
# @return [Addressable::URI]
# The parsed file scheme URI or the original URI if some other URI
# scheme was provided.
#
# @example
# base = Addressable::URI.convert_path("/absolute/path/")
# uri = Addressable::URI.convert_path("relative/path")
# (base + uri).to_s
# #=> "file:///absolute/path/relative/path"
#
# Addressable::URI.convert_path(
# "c:\\windows\\My Documents 100%20\\foo.txt"
# ).to_s
# #=> "file:///c:/windows/My%20Documents%20100%20/foo.txt"
#
# Addressable::URI.convert_path("http://example.com/").to_s
# #=> "http://example.com/"
def self.convert_path(path)
# If we were given nil, return nil.
return nil unless path
# If a URI object is passed, just return itself.
return path if path.kind_of?(self)
if !path.respond_to?(:to_str)
raise TypeError, "Can't convert #{path.class} into String."
end
# Otherwise, convert to a String
path = path.to_str.strip
path.sub!(/^file:\/?\/?/, EMPTY_STR) if path =~ /^file:\/?\/?/
path = SLASH + path if path =~ /^([a-zA-Z])[\|:]/
uri = self.parse(path)
if uri.scheme == nil
# Adjust windows-style uris
uri.path.sub!(/^\/?([a-zA-Z])[\|:][\\\/]/) do
"/#{$1.downcase}:/"
end
uri.path.tr!("\\", SLASH)
if File.exist?(uri.path) &&
File.stat(uri.path).directory?
uri.path.chomp!(SLASH)
uri.path = uri.path + '/'
end
# If the path is absolute, set the scheme and host.
if uri.path.start_with?(SLASH)
uri.scheme = "file"
uri.host = EMPTY_STR
end
uri.normalize!
end
return uri
end
##
# Joins several URIs together.
#
# @param [String, Addressable::URI, #to_str] *uris
# The URIs to join.
#
# @return [Addressable::URI] The joined URI.
#
# @example
# base = "http://example.com/"
# uri = Addressable::URI.parse("relative/path")
# Addressable::URI.join(base, uri)
# #=> #<Addressable::URI:0xcab390 URI:http://example.com/relative/path>
def self.join(*uris)
uri_objects = uris.collect do |uri|
if !uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{uri.class} into String."
end
uri.kind_of?(self) ? uri : self.parse(uri.to_str)
end
result = uri_objects.shift.dup
for uri in uri_objects
result.join!(uri)
end
return result
end
##
# Tables used to optimize encoding operations in `self.encode_component`
# and `self.normalize_component`
SEQUENCE_ENCODING_TABLE = Hash.new do |hash, sequence|
hash[sequence] = sequence.unpack("C*").map do |c|
format("%02x", c)
end.join
end
SEQUENCE_UPCASED_PERCENT_ENCODING_TABLE = Hash.new do |hash, sequence|
hash[sequence] = sequence.unpack("C*").map do |c|
format("%%%02X", c)
end.join
end
##
# Percent encodes a URI component.
#
# @param [String, #to_str] component The URI component to encode.
#
# @param [String, Regexp] character_class
# The characters which are not percent encoded. If a <code>String</code>
# is passed, the <code>String</code> must be formatted as a regular
# expression character class. (Do not include the surrounding square
# brackets.) For example, <code>"b-zB-Z0-9"</code> would cause
# everything but the letters 'b' through 'z' and the numbers '0' through
# '9' to be percent encoded. If a <code>Regexp</code> is passed, the
# value <code>/[^b-zB-Z0-9]/</code> would have the same effect. A set of
# useful <code>String</code> values may be found in the
# <code>Addressable::URI::CharacterClasses</code> module. The default
# value is the reserved plus unreserved character classes specified in
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>.
#
# @param [Regexp] upcase_encoded
# A string of characters that may already be percent encoded, and whose
# encodings should be upcased. This allows normalization of percent
# encodings for characters not included in the
# <code>character_class</code>.
#
# @return [String] The encoded component.
#
# @example
# Addressable::URI.encode_component("simple/example", "b-zB-Z0-9")
# => "simple%2Fex%61mple"
# Addressable::URI.encode_component("simple/example", /[^b-zB-Z0-9]/)
# => "simple%2Fex%61mple"
# Addressable::URI.encode_component(
# "simple/example", Addressable::URI::CharacterClasses::UNRESERVED
# )
# => "simple%2Fexample"
def self.encode_component(component, character_class=
CharacterClasses::RESERVED + CharacterClasses::UNRESERVED,
upcase_encoded='')
return nil if component.nil?
begin
if component.kind_of?(Symbol) ||
component.kind_of?(Numeric) ||
component.kind_of?(TrueClass) ||
component.kind_of?(FalseClass)
component = component.to_s
else
component = component.to_str
end
rescue TypeError, NoMethodError
raise TypeError, "Can't convert #{component.class} into String."
end if !component.is_a? String
if ![String, Regexp].include?(character_class.class)
raise TypeError,
"Expected String or Regexp, got #{character_class.inspect}"
end
if character_class.kind_of?(String)
character_class = /[^#{character_class}]/
end
# We can't perform regexps on invalid UTF sequences, but
# here we need to, so switch to ASCII.
component = component.dup
component.force_encoding(Encoding::ASCII_8BIT)
# Avoiding gsub! because there are edge cases with frozen strings
component = component.gsub(character_class) do |sequence|
SEQUENCE_UPCASED_PERCENT_ENCODING_TABLE[sequence]
end
if upcase_encoded.length > 0
upcase_encoded_chars = upcase_encoded.chars.map do |char|
SEQUENCE_ENCODING_TABLE[char]
end
component = component.gsub(/%(#{upcase_encoded_chars.join('|')})/,
&:upcase)
end
return component
end
class << self
alias_method :escape_component, :encode_component
end
##
# Unencodes any percent encoded characters within a URI component.
# This method may be used for unencoding either components or full URIs,
# however, it is recommended to use the <code>unencode_component</code>
# alias when unencoding components.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI or component to unencode.
#
# @param [Class] return_type
# The type of object to return.
# This value may only be set to <code>String</code> or
# <code>Addressable::URI</code>. All other values are invalid. Defaults
# to <code>String</code>.
#
# @param [String] leave_encoded
# A string of characters to leave encoded. If a percent encoded character
# in this list is encountered then it will remain percent encoded.
#
# @return [String, Addressable::URI]
# The unencoded component or URI.
# The return type is determined by the <code>return_type</code>
# parameter.
def self.unencode(uri, return_type=String, leave_encoded='')
return nil if uri.nil?
begin
uri = uri.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{uri.class} into String."
end if !uri.is_a? String
if ![String, ::Addressable::URI].include?(return_type)
raise TypeError,
"Expected Class (String or Addressable::URI), " +
"got #{return_type.inspect}"
end
uri = uri.dup
# Seriously, only use UTF-8. I'm really not kidding!
uri.force_encoding("utf-8")
unless leave_encoded.empty?
leave_encoded = leave_encoded.dup.force_encoding("utf-8")
end
result = uri.gsub(/%[0-9a-f]{2}/iu) do |sequence|
c = sequence[1..3].to_i(16).chr
c.force_encoding("utf-8")
leave_encoded.include?(c) ? sequence : c
end
result.force_encoding("utf-8")
if return_type == String
return result
elsif return_type == ::Addressable::URI
return ::Addressable::URI.parse(result)
end
end
class << self
alias_method :unescape, :unencode
alias_method :unencode_component, :unencode
alias_method :unescape_component, :unencode
end
##
# Normalizes the encoding of a URI component.
#
# @param [String, #to_str] component The URI component to encode.
#
# @param [String, Regexp] character_class
# The characters which are not percent encoded. If a <code>String</code>
# is passed, the <code>String</code> must be formatted as a regular
# expression character class. (Do not include the surrounding square
# brackets.) For example, <code>"b-zB-Z0-9"</code> would cause
# everything but the letters 'b' through 'z' and the numbers '0'
# through '9' to be percent encoded. If a <code>Regexp</code> is passed,
# the value <code>/[^b-zB-Z0-9]/</code> would have the same effect. A
# set of useful <code>String</code> values may be found in the
# <code>Addressable::URI::CharacterClasses</code> module. The default
# value is the reserved plus unreserved character classes specified in
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>.
#
# @param [String] leave_encoded
# When <code>character_class</code> is a <code>String</code> then
# <code>leave_encoded</code> is a string of characters that should remain
# percent encoded while normalizing the component; if they appear percent
# encoded in the original component, then they will be upcased ("%2f"
# normalized to "%2F") but otherwise left alone.
#
# @return [String] The normalized component.
#
# @example
# Addressable::URI.normalize_component("simpl%65/%65xampl%65", "b-zB-Z")
# => "simple%2Fex%61mple"
# Addressable::URI.normalize_component(
# "simpl%65/%65xampl%65", /[^b-zB-Z]/
# )
# => "simple%2Fex%61mple"
# Addressable::URI.normalize_component(
# "simpl%65/%65xampl%65",
# Addressable::URI::CharacterClasses::UNRESERVED
# )
# => "simple%2Fexample"
# Addressable::URI.normalize_component(
# "one%20two%2fthree%26four",
# "0-9a-zA-Z &/",
# "/"
# )
# => "one two%2Fthree&four"
def self.normalize_component(component, character_class=
CharacterClasses::RESERVED + CharacterClasses::UNRESERVED,
leave_encoded='')
return nil if component.nil?
begin
component = component.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{component.class} into String."
end if !component.is_a? String
if ![String, Regexp].include?(character_class.class)
raise TypeError,
"Expected String or Regexp, got #{character_class.inspect}"
end
if character_class.kind_of?(String)
leave_re = if leave_encoded.length > 0
character_class = "#{character_class}%" unless character_class.include?('%')
"|%(?!#{leave_encoded.chars.flat_map do |char|
seq = SEQUENCE_ENCODING_TABLE[char]
[seq.upcase, seq.downcase]
end.join('|')})"
end
character_class = if leave_re
/[^#{character_class}]#{leave_re}/
else
/[^#{character_class}]/
end
end
# We can't perform regexps on invalid UTF sequences, but
# here we need to, so switch to ASCII.
component = component.dup
component.force_encoding(Encoding::ASCII_8BIT)
unencoded = self.unencode_component(component, String, leave_encoded)
begin
encoded = self.encode_component(
Addressable::IDNA.unicode_normalize_kc(unencoded),
character_class,
leave_encoded
)
rescue ArgumentError
encoded = self.encode_component(unencoded)
end
encoded.force_encoding(Encoding::UTF_8)
return encoded
end
##
# Percent encodes any special characters in the URI.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI to encode.
#
# @param [Class] return_type
# The type of object to return.
# This value may only be set to <code>String</code> or
# <code>Addressable::URI</code>. All other values are invalid. Defaults
# to <code>String</code>.
#
# @return [String, Addressable::URI]
# The encoded URI.
# The return type is determined by the <code>return_type</code>
# parameter.
def self.encode(uri, return_type=String)
return nil if uri.nil?
begin
uri = uri.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{uri.class} into String."
end if !uri.is_a? String
if ![String, ::Addressable::URI].include?(return_type)
raise TypeError,
"Expected Class (String or Addressable::URI), " +
"got #{return_type.inspect}"
end
uri_object = uri.kind_of?(self) ? uri : self.parse(uri)
encoded_uri = Addressable::URI.new(
:scheme => self.encode_component(uri_object.scheme,
Addressable::URI::CharacterClasses::SCHEME),
:authority => self.encode_component(uri_object.authority,
Addressable::URI::CharacterClasses::AUTHORITY),
:path => self.encode_component(uri_object.path,
Addressable::URI::CharacterClasses::PATH),
:query => self.encode_component(uri_object.query,
Addressable::URI::CharacterClasses::QUERY),
:fragment => self.encode_component(uri_object.fragment,
Addressable::URI::CharacterClasses::FRAGMENT)
)
if return_type == String
return encoded_uri.to_s
elsif return_type == ::Addressable::URI
return encoded_uri
end
end
class << self
alias_method :escape, :encode
end
##
# Normalizes the encoding of a URI. Characters within a hostname are
# not percent encoded to allow for internationalized domain names.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI to encode.
#
# @param [Class] return_type
# The type of object to return.
# This value may only be set to <code>String</code> or
# <code>Addressable::URI</code>. All other values are invalid. Defaults
# to <code>String</code>.
#
# @return [String, Addressable::URI]
# The encoded URI.
# The return type is determined by the <code>return_type</code>
# parameter.
def self.normalized_encode(uri, return_type=String)
begin
uri = uri.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{uri.class} into String."
end if !uri.is_a? String
if ![String, ::Addressable::URI].include?(return_type)
raise TypeError,
"Expected Class (String or Addressable::URI), " +
"got #{return_type.inspect}"
end
uri_object = uri.kind_of?(self) ? uri : self.parse(uri)
components = {
:scheme => self.unencode_component(uri_object.scheme),
:user => self.unencode_component(uri_object.user),
:password => self.unencode_component(uri_object.password),
:host => self.unencode_component(uri_object.host),
:port => (uri_object.port.nil? ? nil : uri_object.port.to_s),
:path => self.unencode_component(uri_object.path),
:query => self.unencode_component(uri_object.query),
:fragment => self.unencode_component(uri_object.fragment)
}
components.each do |key, value|
if value != nil
begin
components[key] =
Addressable::IDNA.unicode_normalize_kc(value.to_str)
rescue ArgumentError
# Likely a malformed UTF-8 character, skip unicode normalization
components[key] = value.to_str
end
end
end
encoded_uri = Addressable::URI.new(
:scheme => self.encode_component(components[:scheme],
Addressable::URI::CharacterClasses::SCHEME),
:user => self.encode_component(components[:user],
Addressable::URI::CharacterClasses::UNRESERVED),
:password => self.encode_component(components[:password],
Addressable::URI::CharacterClasses::UNRESERVED),
:host => components[:host],
:port => components[:port],
:path => self.encode_component(components[:path],
Addressable::URI::CharacterClasses::PATH),
:query => self.encode_component(components[:query],
Addressable::URI::CharacterClasses::QUERY),
:fragment => self.encode_component(components[:fragment],
Addressable::URI::CharacterClasses::FRAGMENT)
)
if return_type == String
return encoded_uri.to_s
elsif return_type == ::Addressable::URI
return encoded_uri
end
end
##
# Encodes a set of key/value pairs according to the rules for the
# <code>application/x-www-form-urlencoded</code> MIME type.
#
# @param [#to_hash, #to_ary] form_values
# The form values to encode.
#
# @param [TrueClass, FalseClass] sort
# Sort the key/value pairs prior to encoding.
# Defaults to <code>false</code>.
#
# @return [String]
# The encoded value.
def self.form_encode(form_values, sort=false)
if form_values.respond_to?(:to_hash)
form_values = form_values.to_hash.to_a
elsif form_values.respond_to?(:to_ary)
form_values = form_values.to_ary
else
raise TypeError, "Can't convert #{form_values.class} into Array."
end
form_values = form_values.inject([]) do |accu, (key, value)|
if value.kind_of?(Array)
value.each do |v|
accu << [key.to_s, v.to_s]
end
else
accu << [key.to_s, value.to_s]
end
accu
end
if sort
# Useful for OAuth and optimizing caching systems
form_values = form_values.sort
end
escaped_form_values = form_values.map do |(key, value)|
# Line breaks are CRLF pairs
[
self.encode_component(
key.gsub(/(\r\n|\n|\r)/, "\r\n"),
CharacterClasses::UNRESERVED
).gsub("%20", "+"),
self.encode_component(
value.gsub(/(\r\n|\n|\r)/, "\r\n"),
CharacterClasses::UNRESERVED
).gsub("%20", "+")
]
end
return escaped_form_values.map do |(key, value)|
"#{key}=#{value}"
end.join("&")
end
##
# Decodes a <code>String</code> according to the rules for the
# <code>application/x-www-form-urlencoded</code> MIME type.
#
# @param [String, #to_str] encoded_value
# The form values to decode.
#
# @return [Array]
# The decoded values.
# This is not a <code>Hash</code> because of the possibility for
# duplicate keys.
def self.form_unencode(encoded_value)
if !encoded_value.respond_to?(:to_str)
raise TypeError, "Can't convert #{encoded_value.class} into String."
end
encoded_value = encoded_value.to_str
split_values = encoded_value.split("&").map do |pair|
pair.split("=", 2)
end
return split_values.map do |(key, value)|
[
key ? self.unencode_component(
key.gsub("+", "%20")).gsub(/(\r\n|\n|\r)/, "\n") : nil,
value ? (self.unencode_component(
value.gsub("+", "%20")).gsub(/(\r\n|\n|\r)/, "\n")) : nil
]
end
end
##
# Creates a new uri object from component parts.
#
# @option [String, #to_str] scheme The scheme component.
# @option [String, #to_str] user The user component.
# @option [String, #to_str] password The password component.
# @option [String, #to_str] userinfo
# The userinfo component. If this is supplied, the user and password
# components must be omitted.
# @option [String, #to_str] host The host component.
# @option [String, #to_str] port The port component.
# @option [String, #to_str] authority
# The authority component. If this is supplied, the user, password,
# userinfo, host, and port components must be omitted.
# @option [String, #to_str] path The path component.
# @option [String, #to_str] query The query component.
# @option [String, #to_str] fragment The fragment component.
#
# @return [Addressable::URI] The constructed URI object.
def initialize(options={})
if options.has_key?(:authority)
if (options.keys & [:userinfo, :user, :password, :host, :port]).any?
raise ArgumentError,
"Cannot specify both an authority and any of the components " +
"within the authority."
end
end
if options.has_key?(:userinfo)
if (options.keys & [:user, :password]).any?
raise ArgumentError,
"Cannot specify both a userinfo and either the user or password."
end
end
self.defer_validation do
# Bunch of crazy logic required because of the composite components
# like userinfo and authority.
self.scheme = options[:scheme] if options[:scheme]
self.user = options[:user] if options[:user]
self.password = options[:password] if options[:password]
self.userinfo = options[:userinfo] if options[:userinfo]
self.host = options[:host] if options[:host]
self.port = options[:port] if options[:port]
self.authority = options[:authority] if options[:authority]
self.path = options[:path] if options[:path]
self.query = options[:query] if options[:query]
self.query_values = options[:query_values] if options[:query_values]
self.fragment = options[:fragment] if options[:fragment]
end
self.to_s
end
##
# Freeze URI, initializing instance variables.
#
# @return [Addressable::URI] The frozen URI object.
def freeze
self.normalized_scheme
self.normalized_user
self.normalized_password
self.normalized_userinfo
self.normalized_host
self.normalized_port
self.normalized_authority
self.normalized_site
self.normalized_path
self.normalized_query
self.normalized_fragment
self.hash
super
end
##
# The scheme component for this URI.
#
# @return [String] The scheme component.
def scheme
return defined?(@scheme) ? @scheme : nil
end
##
# The scheme component for this URI, normalized.
#
# @return [String] The scheme component, normalized.
def normalized_scheme
return nil unless self.scheme
@normalized_scheme ||= begin
if self.scheme =~ /^\s*ssh\+svn\s*$/i
"svn+ssh".dup
else
Addressable::URI.normalize_component(
self.scheme.strip.downcase,
Addressable::URI::NormalizeCharacterClasses::SCHEME
)
end
end
# All normalized values should be UTF-8
@normalized_scheme.force_encoding(Encoding::UTF_8) if @normalized_scheme
@normalized_scheme
end
##
# Sets the scheme component for this URI.
#
# @param [String, #to_str] new_scheme The new scheme component.
def scheme=(new_scheme)
if new_scheme && !new_scheme.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_scheme.class} into String."
elsif new_scheme
new_scheme = new_scheme.to_str
end
if new_scheme && new_scheme !~ /\A[a-z][a-z0-9\.\+\-]*\z/i
raise InvalidURIError, "Invalid scheme format: '#{new_scheme}'"
end
@scheme = new_scheme
@scheme = nil if @scheme.to_s.strip.empty?
# Reset dependent values
remove_instance_variable(:@normalized_scheme) if defined?(@normalized_scheme)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The user component for this URI.
#
# @return [String] The user component.
def user
return defined?(@user) ? @user : nil
end
##
# The user component for this URI, normalized.
#
# @return [String] The user component, normalized.
def normalized_user
return nil unless self.user
return @normalized_user if defined?(@normalized_user)
@normalized_user ||= begin
if normalized_scheme =~ /https?/ && self.user.strip.empty? &&
(!self.password || self.password.strip.empty?)
nil
else
Addressable::URI.normalize_component(
self.user.strip,
Addressable::URI::NormalizeCharacterClasses::UNRESERVED
)
end
end
# All normalized values should be UTF-8
@normalized_user.force_encoding(Encoding::UTF_8) if @normalized_user
@normalized_user
end
##
# Sets the user component for this URI.
#
# @param [String, #to_str] new_user The new user component.
def user=(new_user)
if new_user && !new_user.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_user.class} into String."
end
@user = new_user ? new_user.to_str : nil
# You can't have a nil user with a non-nil password
if password != nil
@user = EMPTY_STR if @user.nil?
end
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_user) if defined?(@normalized_user)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The password component for this URI.
#
# @return [String] The password component.
def password
return defined?(@password) ? @password : nil
end
##
# The password component for this URI, normalized.
#
# @return [String] The password component, normalized.
def normalized_password
return nil unless self.password
return @normalized_password if defined?(@normalized_password)
@normalized_password ||= begin
if self.normalized_scheme =~ /https?/ && self.password.strip.empty? &&
(!self.user || self.user.strip.empty?)
nil
else
Addressable::URI.normalize_component(
self.password.strip,
Addressable::URI::NormalizeCharacterClasses::UNRESERVED
)
end
end
# All normalized values should be UTF-8
if @normalized_password
@normalized_password.force_encoding(Encoding::UTF_8)
end
@normalized_password
end
##
# Sets the password component for this URI.
#
# @param [String, #to_str] new_password The new password component.
def password=(new_password)
if new_password && !new_password.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_password.class} into String."
end
@password = new_password ? new_password.to_str : nil
# You can't have a nil user with a non-nil password
@password ||= nil
@user ||= nil
if @password != nil
@user = EMPTY_STR if @user.nil?
end
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_password) if defined?(@normalized_password)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The userinfo component for this URI.
# Combines the user and password components.
#
# @return [String] The userinfo component.
def userinfo
current_user = self.user
current_password = self.password
(current_user || current_password) && @userinfo ||= begin
if current_user && current_password
"#{current_user}:#{current_password}"
elsif current_user && !current_password
"#{current_user}"
end
end
end
##
# The userinfo component for this URI, normalized.
#
# @return [String] The userinfo component, normalized.
def normalized_userinfo
return nil unless self.userinfo
return @normalized_userinfo if defined?(@normalized_userinfo)
@normalized_userinfo ||= begin
current_user = self.normalized_user
current_password = self.normalized_password
if !current_user && !current_password
nil
elsif current_user && current_password
"#{current_user}:#{current_password}".dup
elsif current_user && !current_password
"#{current_user}".dup
end
end
# All normalized values should be UTF-8
if @normalized_userinfo
@normalized_userinfo.force_encoding(Encoding::UTF_8)
end
@normalized_userinfo
end
##
# Sets the userinfo component for this URI.
#
# @param [String, #to_str] new_userinfo The new userinfo component.
def userinfo=(new_userinfo)
if new_userinfo && !new_userinfo.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_userinfo.class} into String."
end
new_user, new_password = if new_userinfo
[
new_userinfo.to_str.strip[/^(.*):/, 1],
new_userinfo.to_str.strip[/:(.*)$/, 1]
]
else
[nil, nil]
end
# Password assigned first to ensure validity in case of nil
self.password = new_password
self.user = new_user
# Reset dependent values
remove_instance_variable(:@authority) if defined?(@authority)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The host component for this URI.
#
# @return [String] The host component.
def host
return defined?(@host) ? @host : nil
end
##
# The host component for this URI, normalized.
#
# @return [String] The host component, normalized.
def normalized_host
return nil unless self.host
@normalized_host ||= begin
if !self.host.strip.empty?
result = ::Addressable::IDNA.to_ascii(
URI.unencode_component(self.host.strip.downcase)
)
if result =~ /[^\.]\.$/
# Single trailing dots are unnecessary.
result = result[0...-1]
end
result = Addressable::URI.normalize_component(
result,
NormalizeCharacterClasses::HOST
)
result
else
EMPTY_STR.dup
end
end
# All normalized values should be UTF-8
if @normalized_host && !@normalized_host.empty?
@normalized_host.force_encoding(Encoding::UTF_8)
end
@normalized_host
end
##
# Sets the host component for this URI.
#
# @param [String, #to_str] new_host The new host component.
def host=(new_host)
if new_host && !new_host.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_host.class} into String."
end
@host = new_host ? new_host.to_str : nil
# Reset dependent values
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_host) if defined?(@normalized_host)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# This method is same as URI::Generic#host except
# brackets for IPv6 (and 'IPvFuture') addresses are removed.
#
# @see Addressable::URI#host
#
# @return [String] The hostname for this URI.
def hostname
v = self.host
/\A\[(.*)\]\z/ =~ v ? $1 : v
end
##
# This method is same as URI::Generic#host= except
# the argument can be a bare IPv6 address (or 'IPvFuture').
#
# @see Addressable::URI#host=
#
# @param [String, #to_str] new_hostname The new hostname for this URI.
def hostname=(new_hostname)
if new_hostname &&
(new_hostname.respond_to?(:ipv4?) || new_hostname.respond_to?(:ipv6?))
new_hostname = new_hostname.to_s
elsif new_hostname && !new_hostname.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_hostname.class} into String."
end
v = new_hostname ? new_hostname.to_str : nil
v = "[#{v}]" if /\A\[.*\]\z/ !~ v && /:/ =~ v
self.host = v
end
##
# Returns the top-level domain for this host.
#
# @example
# Addressable::URI.parse("http://www.example.co.uk").tld # => "co.uk"
def tld
PublicSuffix.parse(self.host, ignore_private: true).tld
end
##
# Sets the top-level domain for this URI.
#
# @param [String, #to_str] new_tld The new top-level domain.
def tld=(new_tld)
replaced_tld = host.sub(/#{tld}\z/, new_tld)
self.host = PublicSuffix::Domain.new(replaced_tld).to_s
end
##
# Returns the public suffix domain for this host.
#
# @example
# Addressable::URI.parse("http://www.example.co.uk").domain # => "example.co.uk"
def domain
PublicSuffix.domain(self.host, ignore_private: true)
end
##
# The authority component for this URI.
# Combines the user, password, host, and port components.
#
# @return [String] The authority component.
def authority
self.host && @authority ||= begin
authority = String.new
if self.userinfo != nil
authority << "#{self.userinfo}@"
end
authority << self.host
if self.port != nil
authority << ":#{self.port}"
end
authority
end
end
##
# The authority component for this URI, normalized.
#
# @return [String] The authority component, normalized.
def normalized_authority
return nil unless self.authority
@normalized_authority ||= begin
authority = String.new
if self.normalized_userinfo != nil
authority << "#{self.normalized_userinfo}@"
end
authority << self.normalized_host
if self.normalized_port != nil
authority << ":#{self.normalized_port}"
end
authority
end
# All normalized values should be UTF-8
if @normalized_authority
@normalized_authority.force_encoding(Encoding::UTF_8)
end
@normalized_authority
end
##
# Sets the authority component for this URI.
#
# @param [String, #to_str] new_authority The new authority component.
def authority=(new_authority)
if new_authority
if !new_authority.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_authority.class} into String."
end
new_authority = new_authority.to_str
new_userinfo = new_authority[/^([^\[\]]*)@/, 1]
if new_userinfo
new_user = new_userinfo.strip[/^([^:]*):?/, 1]
new_password = new_userinfo.strip[/:(.*)$/, 1]
end
new_host = new_authority.sub(
/^([^\[\]]*)@/, EMPTY_STR
).sub(
/:([^:@\[\]]*?)$/, EMPTY_STR
)
new_port =
new_authority[/:([^:@\[\]]*?)$/, 1]
end
# Password assigned first to ensure validity in case of nil
self.password = defined?(new_password) ? new_password : nil
self.user = defined?(new_user) ? new_user : nil
self.host = defined?(new_host) ? new_host : nil
self.port = defined?(new_port) ? new_port : nil
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The origin for this URI, serialized to ASCII, as per
# RFC 6454, section 6.2.
#
# @return [String] The serialized origin.
def origin
if self.scheme && self.authority
if self.normalized_port
"#{self.normalized_scheme}://#{self.normalized_host}" +
":#{self.normalized_port}"
else
"#{self.normalized_scheme}://#{self.normalized_host}"
end
else
"null"
end
end
##
# Sets the origin for this URI, serialized to ASCII, as per
# RFC 6454, section 6.2. This assignment will reset the `userinfo`
# component.
#
# @param [String, #to_str] new_origin The new origin component.
def origin=(new_origin)
if new_origin
if !new_origin.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_origin.class} into String."
end
new_origin = new_origin.to_str
new_scheme = new_origin[/^([^:\/?#]+):\/\//, 1]
unless new_scheme
raise InvalidURIError, 'An origin cannot omit the scheme.'
end
new_host = new_origin[/:\/\/([^\/?#:]+)/, 1]
unless new_host
raise InvalidURIError, 'An origin cannot omit the host.'
end
new_port = new_origin[/:([^:@\[\]\/]*?)$/, 1]
end
self.scheme = defined?(new_scheme) ? new_scheme : nil
self.host = defined?(new_host) ? new_host : nil
self.port = defined?(new_port) ? new_port : nil
self.userinfo = nil
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_authority) if defined?(@normalized_authority)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
# Returns an array of known ip-based schemes. These schemes typically
# use a similar URI form:
# <code>//<user>:<password>@<host>:<port>/<url-path></code>
def self.ip_based_schemes
return self.port_mapping.keys
end
# Returns a hash of common IP-based schemes and their default port
# numbers. Adding new schemes to this hash, as necessary, will allow
# for better URI normalization.
def self.port_mapping
PORT_MAPPING
end
##
# The port component for this URI.
# This is the port number actually given in the URI. This does not
# infer port numbers from default values.
#
# @return [Integer] The port component.
def port
return defined?(@port) ? @port : nil
end
##
# The port component for this URI, normalized.
#
# @return [Integer] The port component, normalized.
def normalized_port
return nil unless self.port
return @normalized_port if defined?(@normalized_port)
@normalized_port ||= begin
if URI.port_mapping[self.normalized_scheme] == self.port
nil
else
self.port
end
end
end
##
# Sets the port component for this URI.
#
# @param [String, Integer, #to_s] new_port The new port component.
def port=(new_port)
if new_port != nil && new_port.respond_to?(:to_str)
new_port = Addressable::URI.unencode_component(new_port.to_str)
end
if new_port.respond_to?(:valid_encoding?) && !new_port.valid_encoding?
raise InvalidURIError, "Invalid encoding in port"
end
if new_port != nil && !(new_port.to_s =~ /^\d+$/)
raise InvalidURIError,
"Invalid port number: #{new_port.inspect}"
end
@port = new_port.to_s.to_i
@port = nil if @port == 0
# Reset dependent values
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_port) if defined?(@normalized_port)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The inferred port component for this URI.
# This method will normalize to the default port for the URI's scheme if
# the port isn't explicitly specified in the URI.
#
# @return [Integer] The inferred port component.
def inferred_port
if self.port.to_i == 0
self.default_port
else
self.port.to_i
end
end
##
# The default port for this URI's scheme.
# This method will always returns the default port for the URI's scheme
# regardless of the presence of an explicit port in the URI.
#
# @return [Integer] The default port.
def default_port
URI.port_mapping[self.scheme.strip.downcase] if self.scheme
end
##
# The combination of components that represent a site.
# Combines the scheme, user, password, host, and port components.
# Primarily useful for HTTP and HTTPS.
#
# For example, <code>"http://example.com/path?query"</code> would have a
# <code>site</code> value of <code>"http://example.com"</code>.
#
# @return [String] The components that identify a site.
def site
(self.scheme || self.authority) && @site ||= begin
site_string = "".dup
site_string << "#{self.scheme}:" if self.scheme != nil
site_string << "//#{self.authority}" if self.authority != nil
site_string
end
end
##
# The normalized combination of components that represent a site.
# Combines the scheme, user, password, host, and port components.
# Primarily useful for HTTP and HTTPS.
#
# For example, <code>"http://example.com/path?query"</code> would have a
# <code>site</code> value of <code>"http://example.com"</code>.
#
# @return [String] The normalized components that identify a site.
def normalized_site
return nil unless self.site
@normalized_site ||= begin
site_string = "".dup
if self.normalized_scheme != nil
site_string << "#{self.normalized_scheme}:"
end
if self.normalized_authority != nil
site_string << "//#{self.normalized_authority}"
end
site_string
end
# All normalized values should be UTF-8
@normalized_site.force_encoding(Encoding::UTF_8) if @normalized_site
@normalized_site
end
##
# Sets the site value for this URI.
#
# @param [String, #to_str] new_site The new site value.
def site=(new_site)
if new_site
if !new_site.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_site.class} into String."
end
new_site = new_site.to_str
# These two regular expressions derived from the primary parsing
# expression
self.scheme = new_site[/^(?:([^:\/?#]+):)?(?:\/\/(?:[^\/?#]*))?$/, 1]
self.authority = new_site[
/^(?:(?:[^:\/?#]+):)?(?:\/\/([^\/?#]*))?$/, 1
]
else
self.scheme = nil
self.authority = nil
end
end
##
# The path component for this URI.
#
# @return [String] The path component.
def path
return defined?(@path) ? @path : EMPTY_STR
end
NORMPATH = /^(?!\/)[^\/:]*:.*$/
##
# The path component for this URI, normalized.
#
# @return [String] The path component, normalized.
def normalized_path
@normalized_path ||= begin
path = self.path.to_s
if self.scheme == nil && path =~ NORMPATH
# Relative paths with colons in the first segment are ambiguous.
path = path.sub(":", "%2F")
end
# String#split(delimeter, -1) uses the more strict splitting behavior
# found by default in Python.
result = path.strip.split(SLASH, -1).map do |segment|
Addressable::URI.normalize_component(
segment,
Addressable::URI::NormalizeCharacterClasses::PCHAR
)
end.join(SLASH)
result = URI.normalize_path(result)
if result.empty? &&
["http", "https", "ftp", "tftp"].include?(self.normalized_scheme)
result = SLASH.dup
end
result
end
# All normalized values should be UTF-8
@normalized_path.force_encoding(Encoding::UTF_8) if @normalized_path
@normalized_path
end
##
# Sets the path component for this URI.
#
# @param [String, #to_str] new_path The new path component.
def path=(new_path)
if new_path && !new_path.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_path.class} into String."
end
@path = (new_path || EMPTY_STR).to_str
if !@path.empty? && @path[0..0] != SLASH && host != nil
@path = "/#{@path}"
end
# Reset dependent values
remove_instance_variable(:@normalized_path) if defined?(@normalized_path)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The basename, if any, of the file in the path component.
#
# @return [String] The path's basename.
def basename
# Path cannot be nil
return File.basename(self.path).sub(/;[^\/]*$/, EMPTY_STR)
end
##
# The extname, if any, of the file in the path component.
# Empty string if there is no extension.
#
# @return [String] The path's extname.
def extname
return nil unless self.path
return File.extname(self.basename)
end
##
# The query component for this URI.
#
# @return [String] The query component.
def query
return defined?(@query) ? @query : nil
end
##
# The query component for this URI, normalized.
#
# @return [String] The query component, normalized.
def normalized_query(*flags)
return nil unless self.query
return @normalized_query if defined?(@normalized_query)
@normalized_query ||= begin
modified_query_class = Addressable::URI::CharacterClasses::QUERY.dup
# Make sure possible key-value pair delimiters are escaped.
modified_query_class.sub!("\\&", "").sub!("\\;", "")
pairs = (query || "").split("&", -1)
pairs.delete_if(&:empty?).uniq! if flags.include?(:compacted)
pairs.sort! if flags.include?(:sorted)
component = pairs.map do |pair|
Addressable::URI.normalize_component(
pair,
Addressable::URI::NormalizeCharacterClasses::QUERY,
"+"
)
end.join("&")
component == "" ? nil : component
end
# All normalized values should be UTF-8
@normalized_query.force_encoding(Encoding::UTF_8) if @normalized_query
@normalized_query
end
##
# Sets the query component for this URI.
#
# @param [String, #to_str] new_query The new query component.
def query=(new_query)
if new_query && !new_query.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_query.class} into String."
end
@query = new_query ? new_query.to_str : nil
# Reset dependent values
remove_instance_variable(:@normalized_query) if defined?(@normalized_query)
remove_composite_values
end
##
# Converts the query component to a Hash value.
#
# @param [Class] return_type The return type desired. Value must be either
# `Hash` or `Array`.
#
# @return [Hash, Array, nil] The query string parsed as a Hash or Array
# or nil if the query string is blank.
#
# @example
# Addressable::URI.parse("?one=1&two=2&three=3").query_values
# #=> {"one" => "1", "two" => "2", "three" => "3"}
# Addressable::URI.parse("?one=two&one=three").query_values(Array)
# #=> [["one", "two"], ["one", "three"]]
# Addressable::URI.parse("?one=two&one=three").query_values(Hash)
# #=> {"one" => "three"}
# Addressable::URI.parse("?").query_values
# #=> {}
# Addressable::URI.parse("").query_values
# #=> nil
def query_values(return_type=Hash)
empty_accumulator = Array == return_type ? [] : {}
if return_type != Hash && return_type != Array
raise ArgumentError, "Invalid return type. Must be Hash or Array."
end
return nil if self.query == nil
split_query = self.query.split("&").map do |pair|
pair.split("=", 2) if pair && !pair.empty?
end.compact
return split_query.inject(empty_accumulator.dup) do |accu, pair|
# I'd rather use key/value identifiers instead of array lookups,
# but in this case I really want to maintain the exact pair structure,
# so it's best to make all changes in-place.
pair[0] = URI.unencode_component(pair[0])
if pair[1].respond_to?(:to_str)
value = pair[1].to_str
# I loathe the fact that I have to do this. Stupid HTML 4.01.
# Treating '+' as a space was just an unbelievably bad idea.
# There was nothing wrong with '%20'!
# If it ain't broke, don't fix it!
value = value.tr("+", " ") if ["http", "https", nil].include?(scheme)
pair[1] = URI.unencode_component(value)
end
if return_type == Hash
accu[pair[0]] = pair[1]
else
accu << pair
end
accu
end
end
##
# Sets the query component for this URI from a Hash object.
# An empty Hash or Array will result in an empty query string.
#
# @param [Hash, #to_hash, Array] new_query_values The new query values.
#
# @example
# uri.query_values = {:a => "a", :b => ["c", "d", "e"]}
# uri.query
# # => "a=a&b=c&b=d&b=e"
# uri.query_values = [['a', 'a'], ['b', 'c'], ['b', 'd'], ['b', 'e']]
# uri.query
# # => "a=a&b=c&b=d&b=e"
# uri.query_values = [['a', 'a'], ['b', ['c', 'd', 'e']]]
# uri.query
# # => "a=a&b=c&b=d&b=e"
# uri.query_values = [['flag'], ['key', 'value']]
# uri.query
# # => "flag&key=value"
def query_values=(new_query_values)
if new_query_values == nil
self.query = nil
return nil
end
if !new_query_values.is_a?(Array)
if !new_query_values.respond_to?(:to_hash)
raise TypeError,
"Can't convert #{new_query_values.class} into Hash."
end
new_query_values = new_query_values.to_hash
new_query_values = new_query_values.map do |key, value|
key = key.to_s if key.kind_of?(Symbol)
[key, value]
end
# Useful default for OAuth and caching.
# Only to be used for non-Array inputs. Arrays should preserve order.
new_query_values.sort!
end
# new_query_values have form [['key1', 'value1'], ['key2', 'value2']]
buffer = "".dup
new_query_values.each do |key, value|
encoded_key = URI.encode_component(
key, CharacterClasses::UNRESERVED
)
if value == nil
buffer << "#{encoded_key}&"
elsif value.kind_of?(Array)
value.each do |sub_value|
encoded_value = URI.encode_component(
sub_value, CharacterClasses::UNRESERVED
)
buffer << "#{encoded_key}=#{encoded_value}&"
end
else
encoded_value = URI.encode_component(
value, CharacterClasses::UNRESERVED
)
buffer << "#{encoded_key}=#{encoded_value}&"
end
end
self.query = buffer.chop
end
##
# The HTTP request URI for this URI. This is the path and the
# query string.
#
# @return [String] The request URI required for an HTTP request.
def request_uri
return nil if self.absolute? && self.scheme !~ /^https?$/i
return (
(!self.path.empty? ? self.path : SLASH) +
(self.query ? "?#{self.query}" : EMPTY_STR)
)
end
##
# Sets the HTTP request URI for this URI.
#
# @param [String, #to_str] new_request_uri The new HTTP request URI.
def request_uri=(new_request_uri)
if !new_request_uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_request_uri.class} into String."
end
if self.absolute? && self.scheme !~ /^https?$/i
raise InvalidURIError,
"Cannot set an HTTP request URI for a non-HTTP URI."
end
new_request_uri = new_request_uri.to_str
path_component = new_request_uri[/^([^\?]*)\??(?:.*)$/, 1]
query_component = new_request_uri[/^(?:[^\?]*)\?(.*)$/, 1]
path_component = path_component.to_s
path_component = (!path_component.empty? ? path_component : SLASH)
self.path = path_component
self.query = query_component
# Reset dependent values
remove_composite_values
end
##
# The fragment component for this URI.
#
# @return [String] The fragment component.
def fragment
return defined?(@fragment) ? @fragment : nil
end
##
# The fragment component for this URI, normalized.
#
# @return [String] The fragment component, normalized.
def normalized_fragment
return nil unless self.fragment
return @normalized_fragment if defined?(@normalized_fragment)
@normalized_fragment ||= begin
component = Addressable::URI.normalize_component(
self.fragment,
Addressable::URI::NormalizeCharacterClasses::FRAGMENT
)
component == "" ? nil : component
end
# All normalized values should be UTF-8
if @normalized_fragment
@normalized_fragment.force_encoding(Encoding::UTF_8)
end
@normalized_fragment
end
##
# Sets the fragment component for this URI.
#
# @param [String, #to_str] new_fragment The new fragment component.
def fragment=(new_fragment)
if new_fragment && !new_fragment.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_fragment.class} into String."
end
@fragment = new_fragment ? new_fragment.to_str : nil
# Reset dependent values
remove_instance_variable(:@normalized_fragment) if defined?(@normalized_fragment)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# Determines if the scheme indicates an IP-based protocol.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the scheme indicates an IP-based protocol.
# <code>false</code> otherwise.
def ip_based?
if self.scheme
return URI.ip_based_schemes.include?(
self.scheme.strip.downcase)
end
return false
end
##
# Determines if the URI is relative.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URI is relative. <code>false</code>
# otherwise.
def relative?
return self.scheme.nil?
end
##
# Determines if the URI is absolute.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URI is absolute. <code>false</code>
# otherwise.
def absolute?
return !relative?
end
##
# Joins two URIs together.
#
# @param [String, Addressable::URI, #to_str] The URI to join with.
#
# @return [Addressable::URI] The joined URI.
def join(uri)
if !uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{uri.class} into String."
end
if !uri.kind_of?(URI)
# Otherwise, convert to a String, then parse.
uri = URI.parse(uri.to_str)
end
if uri.to_s.empty?
return self.dup
end
joined_scheme = nil
joined_user = nil
joined_password = nil
joined_host = nil
joined_port = nil
joined_path = nil
joined_query = nil
joined_fragment = nil
# Section 5.2.2 of RFC 3986
if uri.scheme != nil
joined_scheme = uri.scheme
joined_user = uri.user
joined_password = uri.password
joined_host = uri.host
joined_port = uri.port
joined_path = URI.normalize_path(uri.path)
joined_query = uri.query
else
if uri.authority != nil
joined_user = uri.user
joined_password = uri.password
joined_host = uri.host
joined_port = uri.port
joined_path = URI.normalize_path(uri.path)
joined_query = uri.query
else
if uri.path == nil || uri.path.empty?
joined_path = self.path
if uri.query != nil
joined_query = uri.query
else
joined_query = self.query
end
else
if uri.path[0..0] == SLASH
joined_path = URI.normalize_path(uri.path)
else
base_path = self.path.dup
base_path = EMPTY_STR if base_path == nil
base_path = URI.normalize_path(base_path)
# Section 5.2.3 of RFC 3986
#
# Removes the right-most path segment from the base path.
if base_path.include?(SLASH)
base_path.sub!(/\/[^\/]+$/, SLASH)
else
base_path = EMPTY_STR
end
# If the base path is empty and an authority segment has been
# defined, use a base path of SLASH
if base_path.empty? && self.authority != nil
base_path = SLASH
end
joined_path = URI.normalize_path(base_path + uri.path)
end
joined_query = uri.query
end
joined_user = self.user
joined_password = self.password
joined_host = self.host
joined_port = self.port
end
joined_scheme = self.scheme
end
joined_fragment = uri.fragment
return self.class.new(
:scheme => joined_scheme,
:user => joined_user,
:password => joined_password,
:host => joined_host,
:port => joined_port,
:path => joined_path,
:query => joined_query,
:fragment => joined_fragment
)
end
alias_method :+, :join
##
# Destructive form of <code>join</code>.
#
# @param [String, Addressable::URI, #to_str] The URI to join with.
#
# @return [Addressable::URI] The joined URI.
#
# @see Addressable::URI#join
def join!(uri)
replace_self(self.join(uri))
end
##
# Merges a URI with a <code>Hash</code> of components.
# This method has different behavior from <code>join</code>. Any
# components present in the <code>hash</code> parameter will override the
# original components. The path component is not treated specially.
#
# @param [Hash, Addressable::URI, #to_hash] The components to merge with.
#
# @return [Addressable::URI] The merged URI.
#
# @see Hash#merge
def merge(hash)
if !hash.respond_to?(:to_hash)
raise TypeError, "Can't convert #{hash.class} into Hash."
end
hash = hash.to_hash
if hash.has_key?(:authority)
if (hash.keys & [:userinfo, :user, :password, :host, :port]).any?
raise ArgumentError,
"Cannot specify both an authority and any of the components " +
"within the authority."
end
end
if hash.has_key?(:userinfo)
if (hash.keys & [:user, :password]).any?
raise ArgumentError,
"Cannot specify both a userinfo and either the user or password."
end
end
uri = self.class.new
uri.defer_validation do
# Bunch of crazy logic required because of the composite components
# like userinfo and authority.
uri.scheme =
hash.has_key?(:scheme) ? hash[:scheme] : self.scheme
if hash.has_key?(:authority)
uri.authority =
hash.has_key?(:authority) ? hash[:authority] : self.authority
end
if hash.has_key?(:userinfo)
uri.userinfo =
hash.has_key?(:userinfo) ? hash[:userinfo] : self.userinfo
end
if !hash.has_key?(:userinfo) && !hash.has_key?(:authority)
uri.user =
hash.has_key?(:user) ? hash[:user] : self.user
uri.password =
hash.has_key?(:password) ? hash[:password] : self.password
end
if !hash.has_key?(:authority)
uri.host =
hash.has_key?(:host) ? hash[:host] : self.host
uri.port =
hash.has_key?(:port) ? hash[:port] : self.port
end
uri.path =
hash.has_key?(:path) ? hash[:path] : self.path
uri.query =
hash.has_key?(:query) ? hash[:query] : self.query
uri.fragment =
hash.has_key?(:fragment) ? hash[:fragment] : self.fragment
end
return uri
end
##
# Destructive form of <code>merge</code>.
#
# @param [Hash, Addressable::URI, #to_hash] The components to merge with.
#
# @return [Addressable::URI] The merged URI.
#
# @see Addressable::URI#merge
def merge!(uri)
replace_self(self.merge(uri))
end
##
# Returns the shortest normalized relative form of this URI that uses the
# supplied URI as a base for resolution. Returns an absolute URI if
# necessary. This is effectively the opposite of <code>route_to</code>.
#
# @param [String, Addressable::URI, #to_str] uri The URI to route from.
#
# @return [Addressable::URI]
# The normalized relative URI that is equivalent to the original URI.
def route_from(uri)
uri = URI.parse(uri).normalize
normalized_self = self.normalize
if normalized_self.relative?
raise ArgumentError, "Expected absolute URI, got: #{self.to_s}"
end
if uri.relative?
raise ArgumentError, "Expected absolute URI, got: #{uri.to_s}"
end
if normalized_self == uri
return Addressable::URI.parse("##{normalized_self.fragment}")
end
components = normalized_self.to_hash
if normalized_self.scheme == uri.scheme
components[:scheme] = nil
if normalized_self.authority == uri.authority
components[:user] = nil
components[:password] = nil
components[:host] = nil
components[:port] = nil
if normalized_self.path == uri.path
components[:path] = nil
if normalized_self.query == uri.query
components[:query] = nil
end
else
if uri.path != SLASH and components[:path]
self_splitted_path = split_path(components[:path])
uri_splitted_path = split_path(uri.path)
self_dir = self_splitted_path.shift
uri_dir = uri_splitted_path.shift
while !self_splitted_path.empty? && !uri_splitted_path.empty? and self_dir == uri_dir
self_dir = self_splitted_path.shift
uri_dir = uri_splitted_path.shift
end
components[:path] = (uri_splitted_path.fill('..') + [self_dir] + self_splitted_path).join(SLASH)
end
end
end
end
# Avoid network-path references.
if components[:host] != nil
components[:scheme] = normalized_self.scheme
end
return Addressable::URI.new(
:scheme => components[:scheme],
:user => components[:user],
:password => components[:password],
:host => components[:host],
:port => components[:port],
:path => components[:path],
:query => components[:query],
:fragment => components[:fragment]
)
end
##
# Returns the shortest normalized relative form of the supplied URI that
# uses this URI as a base for resolution. Returns an absolute URI if
# necessary. This is effectively the opposite of <code>route_from</code>.
#
# @param [String, Addressable::URI, #to_str] uri The URI to route to.
#
# @return [Addressable::URI]
# The normalized relative URI that is equivalent to the supplied URI.
def route_to(uri)
return URI.parse(uri).route_from(self)
end
##
# Returns a normalized URI object.
#
# NOTE: This method does not attempt to fully conform to specifications.
# It exists largely to correct other people's failures to read the
# specifications, and also to deal with caching issues since several
# different URIs may represent the same resource and should not be
# cached multiple times.
#
# @return [Addressable::URI] The normalized URI.
def normalize
# This is a special exception for the frequently misused feed
# URI scheme.
if normalized_scheme == "feed"
if self.to_s =~ /^feed:\/*http:\/*/
return URI.parse(
self.to_s[/^feed:\/*(http:\/*.*)/, 1]
).normalize
end
end
return self.class.new(
:scheme => normalized_scheme,
:authority => normalized_authority,
:path => normalized_path,
:query => normalized_query,
:fragment => normalized_fragment
)
end
##
# Destructively normalizes this URI object.
#
# @return [Addressable::URI] The normalized URI.
#
# @see Addressable::URI#normalize
def normalize!
replace_self(self.normalize)
end
##
# Creates a URI suitable for display to users. If semantic attacks are
# likely, the application should try to detect these and warn the user.
# See <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>,
# section 7.6 for more information.
#
# @return [Addressable::URI] A URI suitable for display purposes.
def display_uri
display_uri = self.normalize
display_uri.host = ::Addressable::IDNA.to_unicode(display_uri.host)
return display_uri
end
##
# Returns <code>true</code> if the URI objects are equal. This method
# normalizes both URIs before doing the comparison, and allows comparison
# against <code>Strings</code>.
#
# @param [Object] uri The URI to compare.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URIs are equivalent, <code>false</code>
# otherwise.
def ===(uri)
if uri.respond_to?(:normalize)
uri_string = uri.normalize.to_s
else
begin
uri_string = ::Addressable::URI.parse(uri).normalize.to_s
rescue InvalidURIError, TypeError
return false
end
end
return self.normalize.to_s == uri_string
end
##
# Returns <code>true</code> if the URI objects are equal. This method
# normalizes both URIs before doing the comparison.
#
# @param [Object] uri The URI to compare.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URIs are equivalent, <code>false</code>
# otherwise.
def ==(uri)
return false unless uri.kind_of?(URI)
return self.normalize.to_s == uri.normalize.to_s
end
##
# Returns <code>true</code> if the URI objects are equal. This method
# does NOT normalize either URI before doing the comparison.
#
# @param [Object] uri The URI to compare.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URIs are equivalent, <code>false</code>
# otherwise.
def eql?(uri)
return false unless uri.kind_of?(URI)
return self.to_s == uri.to_s
end
##
# A hash value that will make a URI equivalent to its normalized
# form.
#
# @return [Integer] A hash of the URI.
def hash
@hash ||= self.to_s.hash * -1
end
##
# Clones the URI object.
#
# @return [Addressable::URI] The cloned URI.
def dup
duplicated_uri = self.class.new(
:scheme => self.scheme ? self.scheme.dup : nil,
:user => self.user ? self.user.dup : nil,
:password => self.password ? self.password.dup : nil,
:host => self.host ? self.host.dup : nil,
:port => self.port,
:path => self.path ? self.path.dup : nil,
:query => self.query ? self.query.dup : nil,
:fragment => self.fragment ? self.fragment.dup : nil
)
return duplicated_uri
end
##
# Omits components from a URI.
#
# @param [Symbol] *components The components to be omitted.
#
# @return [Addressable::URI] The URI with components omitted.
#
# @example
# uri = Addressable::URI.parse("http://example.com/path?query")
# #=> #<Addressable::URI:0xcc5e7a URI:http://example.com/path?query>
# uri.omit(:scheme, :authority)
# #=> #<Addressable::URI:0xcc4d86 URI:/path?query>
def omit(*components)
invalid_components = components - [
:scheme, :user, :password, :userinfo, :host, :port, :authority,
:path, :query, :fragment
]
unless invalid_components.empty?
raise ArgumentError,
"Invalid component names: #{invalid_components.inspect}."
end
duplicated_uri = self.dup
duplicated_uri.defer_validation do
components.each do |component|
duplicated_uri.send((component.to_s + "=").to_sym, nil)
end
duplicated_uri.user = duplicated_uri.normalized_user
end
duplicated_uri
end
##
# Destructive form of omit.
#
# @param [Symbol] *components The components to be omitted.
#
# @return [Addressable::URI] The URI with components omitted.
#
# @see Addressable::URI#omit
def omit!(*components)
replace_self(self.omit(*components))
end
##
# Determines if the URI is an empty string.
#
# @return [TrueClass, FalseClass]
# Returns <code>true</code> if empty, <code>false</code> otherwise.
def empty?
return self.to_s.empty?
end
##
# Converts the URI to a <code>String</code>.
#
# @return [String] The URI's <code>String</code> representation.
def to_s
if self.scheme == nil && self.path != nil && !self.path.empty? &&
self.path =~ NORMPATH
raise InvalidURIError,
"Cannot assemble URI string with ambiguous path: '#{self.path}'"
end
@uri_string ||= begin
uri_string = String.new
uri_string << "#{self.scheme}:" if self.scheme != nil
uri_string << "//#{self.authority}" if self.authority != nil
uri_string << self.path.to_s
uri_string << "?#{self.query}" if self.query != nil
uri_string << "##{self.fragment}" if self.fragment != nil
uri_string.force_encoding(Encoding::UTF_8)
uri_string
end
end
##
# URI's are glorified <code>Strings</code>. Allow implicit conversion.
alias_method :to_str, :to_s
##
# Returns a Hash of the URI components.
#
# @return [Hash] The URI as a <code>Hash</code> of components.
def to_hash
return {
:scheme => self.scheme,
:user => self.user,
:password => self.password,
:host => self.host,
:port => self.port,
:path => self.path,
:query => self.query,
:fragment => self.fragment
}
end
##
# Returns a <code>String</code> representation of the URI object's state.
#
# @return [String] The URI object's state, as a <code>String</code>.
def inspect
sprintf("#<%s:%#0x URI:%s>", URI.to_s, self.object_id, self.to_s)
end
##
# This method allows you to make several changes to a URI simultaneously,
# which separately would cause validation errors, but in conjunction,
# are valid. The URI will be revalidated as soon as the entire block has
# been executed.
#
# @param [Proc] block
# A set of operations to perform on a given URI.
def defer_validation
raise LocalJumpError, "No block given." unless block_given?
@validation_deferred = true
yield
@validation_deferred = false
validate
return nil
end
protected
SELF_REF = '.'
PARENT = '..'
RULE_2A = /\/\.\/|\/\.$/
RULE_2B_2C = /\/([^\/]*)\/\.\.\/|\/([^\/]*)\/\.\.$/
RULE_2D = /^\.\.?\/?/
RULE_PREFIXED_PARENT = /^\/\.\.?\/|^(\/\.\.?)+\/?$/
##
# Resolves paths to their simplest form.
#
# @param [String] path The path to normalize.
#
# @return [String] The normalized path.
def self.normalize_path(path)
# Section 5.2.4 of RFC 3986
return if path.nil?
normalized_path = path.dup
loop do
mod ||= normalized_path.gsub!(RULE_2A, SLASH)
pair = normalized_path.match(RULE_2B_2C)
if pair
parent = pair[1]
current = pair[2]
else
parent = nil
current = nil
end
regexp = "/#{Regexp.escape(parent.to_s)}/\\.\\./|"
regexp += "(/#{Regexp.escape(current.to_s)}/\\.\\.$)"
if pair && ((parent != SELF_REF && parent != PARENT) ||
(current != SELF_REF && current != PARENT))
mod ||= normalized_path.gsub!(Regexp.new(regexp), SLASH)
end
mod ||= normalized_path.gsub!(RULE_2D, EMPTY_STR)
# Non-standard, removes prefixed dotted segments from path.
mod ||= normalized_path.gsub!(RULE_PREFIXED_PARENT, SLASH)
break if mod.nil?
end
normalized_path
end
##
# Ensures that the URI is valid.
def validate
return if !!@validation_deferred
if self.scheme != nil && self.ip_based? &&
(self.host == nil || self.host.empty?) &&
(self.path == nil || self.path.empty?)
raise InvalidURIError,
"Absolute URI missing hierarchical segment: '#{self.to_s}'"
end
if self.host == nil
if self.port != nil ||
self.user != nil ||
self.password != nil
raise InvalidURIError, "Hostname not supplied: '#{self.to_s}'"
end
end
if self.path != nil && !self.path.empty? && self.path[0..0] != SLASH &&
self.authority != nil
raise InvalidURIError,
"Cannot have a relative path with an authority set: '#{self.to_s}'"
end
if self.path != nil && !self.path.empty? &&
self.path[0..1] == SLASH + SLASH && self.authority == nil
raise InvalidURIError,
"Cannot have a path with two leading slashes " +
"without an authority set: '#{self.to_s}'"
end
unreserved = CharacterClasses::UNRESERVED
sub_delims = CharacterClasses::SUB_DELIMS
if !self.host.nil? && (self.host =~ /[<>{}\/\\\?\#\@"[[:space:]]]/ ||
(self.host[/^\[(.*)\]$/, 1] != nil && self.host[/^\[(.*)\]$/, 1] !~
Regexp.new("^[#{unreserved}#{sub_delims}:]*$")))
raise InvalidURIError, "Invalid character in host: '#{self.host.to_s}'"
end
return nil
end
##
# Replaces the internal state of self with the specified URI's state.
# Used in destructive operations to avoid massive code repetition.
#
# @param [Addressable::URI] uri The URI to replace <code>self</code> with.
#
# @return [Addressable::URI] <code>self</code>.
def replace_self(uri)
# Reset dependent values
instance_variables.each do |var|
if instance_variable_defined?(var) && var != :@validation_deferred
remove_instance_variable(var)
end
end
@scheme = uri.scheme
@user = uri.user
@password = uri.password
@host = uri.host
@port = uri.port
@path = uri.path
@query = uri.query
@fragment = uri.fragment
return self
end
##
# Splits path string with "/" (slash).
# It is considered that there is empty string after last slash when
# path ends with slash.
#
# @param [String] path The path to split.
#
# @return [Array<String>] An array of parts of path.
def split_path(path)
splitted = path.split(SLASH)
splitted << EMPTY_STR if path.end_with? SLASH
splitted
end
##
# Resets composite values for the entire URI
#
# @api private
def remove_composite_values
remove_instance_variable(:@uri_string) if defined?(@uri_string)
remove_instance_variable(:@hash) if defined?(@hash)
end
end
end
Remove redundant colon in `CharacterClasses::AUTHORITY` (#438)
Co-authored-by: Patrik Ragnarsson <0bb9e58cdc407b04c2147ccd2df0fd50d2c9bbe4@starkast.net>
# frozen_string_literal: true
# encoding:utf-8
#--
# Copyright (C) Bob Aman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#++
require "addressable/version"
require "addressable/idna"
require "public_suffix"
##
# Addressable is a library for processing links and URIs.
module Addressable
##
# This is an implementation of a URI parser based on
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>,
# <a href="http://www.ietf.org/rfc/rfc3987.txt">RFC 3987</a>.
class URI
##
# Raised if something other than a uri is supplied.
class InvalidURIError < StandardError
end
##
# Container for the character classes specified in
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>.
module CharacterClasses
ALPHA = "a-zA-Z"
DIGIT = "0-9"
GEN_DELIMS = "\\:\\/\\?\\#\\[\\]\\@"
SUB_DELIMS = "\\!\\$\\&\\'\\(\\)\\*\\+\\,\\;\\="
RESERVED = GEN_DELIMS + SUB_DELIMS
UNRESERVED = ALPHA + DIGIT + "\\-\\.\\_\\~"
PCHAR = UNRESERVED + SUB_DELIMS + "\\:\\@"
SCHEME = ALPHA + DIGIT + "\\-\\+\\."
HOST = UNRESERVED + SUB_DELIMS + "\\[\\:\\]"
AUTHORITY = PCHAR + "\\[\\]"
PATH = PCHAR + "\\/"
QUERY = PCHAR + "\\/\\?"
FRAGMENT = PCHAR + "\\/\\?"
end
module NormalizeCharacterClasses
HOST = /[^#{CharacterClasses::HOST}]/
UNRESERVED = /[^#{CharacterClasses::UNRESERVED}]/
PCHAR = /[^#{CharacterClasses::PCHAR}]/
SCHEME = /[^#{CharacterClasses::SCHEME}]/
FRAGMENT = /[^#{CharacterClasses::FRAGMENT}]/
QUERY = %r{[^a-zA-Z0-9\-\.\_\~\!\$\'\(\)\*\+\,\=\:\@\/\?%]|%(?!2B|2b)}
end
SLASH = '/'
EMPTY_STR = ''
URIREGEX = /^(([^:\/?#]+):)?(\/\/([^\/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?$/
PORT_MAPPING = {
"http" => 80,
"https" => 443,
"ftp" => 21,
"tftp" => 69,
"sftp" => 22,
"ssh" => 22,
"svn+ssh" => 22,
"telnet" => 23,
"nntp" => 119,
"gopher" => 70,
"wais" => 210,
"ldap" => 389,
"prospero" => 1525
}.freeze
##
# Returns a URI object based on the parsed string.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI string to parse.
# No parsing is performed if the object is already an
# <code>Addressable::URI</code>.
#
# @return [Addressable::URI] The parsed URI.
def self.parse(uri)
# If we were given nil, return nil.
return nil unless uri
# If a URI object is passed, just return itself.
return uri.dup if uri.kind_of?(self)
# If a URI object of the Ruby standard library variety is passed,
# convert it to a string, then parse the string.
# We do the check this way because we don't want to accidentally
# cause a missing constant exception to be thrown.
if uri.class.name =~ /^URI\b/
uri = uri.to_s
end
# Otherwise, convert to a String
begin
uri = uri.to_str
rescue TypeError, NoMethodError
raise TypeError, "Can't convert #{uri.class} into String."
end if not uri.is_a? String
# This Regexp supplied as an example in RFC 3986, and it works great.
scan = uri.scan(URIREGEX)
fragments = scan[0]
scheme = fragments[1]
authority = fragments[3]
path = fragments[4]
query = fragments[6]
fragment = fragments[8]
user = nil
password = nil
host = nil
port = nil
if authority != nil
# The Regexp above doesn't split apart the authority.
userinfo = authority[/^([^\[\]]*)@/, 1]
if userinfo != nil
user = userinfo.strip[/^([^:]*):?/, 1]
password = userinfo.strip[/:(.*)$/, 1]
end
host = authority.sub(
/^([^\[\]]*)@/, EMPTY_STR
).sub(
/:([^:@\[\]]*?)$/, EMPTY_STR
)
port = authority[/:([^:@\[\]]*?)$/, 1]
end
if port == EMPTY_STR
port = nil
end
return new(
:scheme => scheme,
:user => user,
:password => password,
:host => host,
:port => port,
:path => path,
:query => query,
:fragment => fragment
)
end
##
# Converts an input to a URI. The input does not have to be a valid
# URI — the method will use heuristics to guess what URI was intended.
# This is not standards-compliant, merely user-friendly.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI string to parse.
# No parsing is performed if the object is already an
# <code>Addressable::URI</code>.
# @param [Hash] hints
# A <code>Hash</code> of hints to the heuristic parser.
# Defaults to <code>{:scheme => "http"}</code>.
#
# @return [Addressable::URI] The parsed URI.
def self.heuristic_parse(uri, hints={})
# If we were given nil, return nil.
return nil unless uri
# If a URI object is passed, just return itself.
return uri.dup if uri.kind_of?(self)
# If a URI object of the Ruby standard library variety is passed,
# convert it to a string, then parse the string.
# We do the check this way because we don't want to accidentally
# cause a missing constant exception to be thrown.
if uri.class.name =~ /^URI\b/
uri = uri.to_s
end
if !uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{uri.class} into String."
end
# Otherwise, convert to a String
uri = uri.to_str.dup.strip
hints = {
:scheme => "http"
}.merge(hints)
case uri
when /^http:\//i
uri.sub!(/^http:\/+/i, "http://")
when /^https:\//i
uri.sub!(/^https:\/+/i, "https://")
when /^feed:\/+http:\//i
uri.sub!(/^feed:\/+http:\/+/i, "feed:http://")
when /^feed:\//i
uri.sub!(/^feed:\/+/i, "feed://")
when %r[^file:/{4}]i
uri.sub!(%r[^file:/+]i, "file:////")
when %r[^file://localhost/]i
uri.sub!(%r[^file://localhost/+]i, "file:///")
when %r[^file:/+]i
uri.sub!(%r[^file:/+]i, "file:///")
when /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/
uri.sub!(/^/, hints[:scheme] + "://")
when /\A\d+\..*:\d+\z/
uri = "#{hints[:scheme]}://#{uri}"
end
match = uri.match(URIREGEX)
fragments = match.captures
authority = fragments[3]
if authority && authority.length > 0
new_authority = authority.tr("\\", "/").gsub(" ", "%20")
# NOTE: We want offset 4, not 3!
offset = match.offset(4)
uri = uri.dup
uri[offset[0]...offset[1]] = new_authority
end
parsed = self.parse(uri)
if parsed.scheme =~ /^[^\/?#\.]+\.[^\/?#]+$/
parsed = self.parse(hints[:scheme] + "://" + uri)
end
if parsed.path.include?(".")
if parsed.path[/\b@\b/]
parsed.scheme = "mailto" unless parsed.scheme
elsif new_host = parsed.path[/^([^\/]+\.[^\/]*)/, 1]
parsed.defer_validation do
new_path = parsed.path.sub(
Regexp.new("^" + Regexp.escape(new_host)), EMPTY_STR)
parsed.host = new_host
parsed.path = new_path
parsed.scheme = hints[:scheme] unless parsed.scheme
end
end
end
return parsed
end
##
# Converts a path to a file scheme URI. If the path supplied is
# relative, it will be returned as a relative URI. If the path supplied
# is actually a non-file URI, it will parse the URI as if it had been
# parsed with <code>Addressable::URI.parse</code>. Handles all of the
# various Microsoft-specific formats for specifying paths.
#
# @param [String, Addressable::URI, #to_str] path
# Typically a <code>String</code> path to a file or directory, but
# will return a sensible return value if an absolute URI is supplied
# instead.
#
# @return [Addressable::URI]
# The parsed file scheme URI or the original URI if some other URI
# scheme was provided.
#
# @example
# base = Addressable::URI.convert_path("/absolute/path/")
# uri = Addressable::URI.convert_path("relative/path")
# (base + uri).to_s
# #=> "file:///absolute/path/relative/path"
#
# Addressable::URI.convert_path(
# "c:\\windows\\My Documents 100%20\\foo.txt"
# ).to_s
# #=> "file:///c:/windows/My%20Documents%20100%20/foo.txt"
#
# Addressable::URI.convert_path("http://example.com/").to_s
# #=> "http://example.com/"
def self.convert_path(path)
# If we were given nil, return nil.
return nil unless path
# If a URI object is passed, just return itself.
return path if path.kind_of?(self)
if !path.respond_to?(:to_str)
raise TypeError, "Can't convert #{path.class} into String."
end
# Otherwise, convert to a String
path = path.to_str.strip
path.sub!(/^file:\/?\/?/, EMPTY_STR) if path =~ /^file:\/?\/?/
path = SLASH + path if path =~ /^([a-zA-Z])[\|:]/
uri = self.parse(path)
if uri.scheme == nil
# Adjust windows-style uris
uri.path.sub!(/^\/?([a-zA-Z])[\|:][\\\/]/) do
"/#{$1.downcase}:/"
end
uri.path.tr!("\\", SLASH)
if File.exist?(uri.path) &&
File.stat(uri.path).directory?
uri.path.chomp!(SLASH)
uri.path = uri.path + '/'
end
# If the path is absolute, set the scheme and host.
if uri.path.start_with?(SLASH)
uri.scheme = "file"
uri.host = EMPTY_STR
end
uri.normalize!
end
return uri
end
##
# Joins several URIs together.
#
# @param [String, Addressable::URI, #to_str] *uris
# The URIs to join.
#
# @return [Addressable::URI] The joined URI.
#
# @example
# base = "http://example.com/"
# uri = Addressable::URI.parse("relative/path")
# Addressable::URI.join(base, uri)
# #=> #<Addressable::URI:0xcab390 URI:http://example.com/relative/path>
def self.join(*uris)
uri_objects = uris.collect do |uri|
if !uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{uri.class} into String."
end
uri.kind_of?(self) ? uri : self.parse(uri.to_str)
end
result = uri_objects.shift.dup
for uri in uri_objects
result.join!(uri)
end
return result
end
##
# Tables used to optimize encoding operations in `self.encode_component`
# and `self.normalize_component`
SEQUENCE_ENCODING_TABLE = Hash.new do |hash, sequence|
hash[sequence] = sequence.unpack("C*").map do |c|
format("%02x", c)
end.join
end
SEQUENCE_UPCASED_PERCENT_ENCODING_TABLE = Hash.new do |hash, sequence|
hash[sequence] = sequence.unpack("C*").map do |c|
format("%%%02X", c)
end.join
end
##
# Percent encodes a URI component.
#
# @param [String, #to_str] component The URI component to encode.
#
# @param [String, Regexp] character_class
# The characters which are not percent encoded. If a <code>String</code>
# is passed, the <code>String</code> must be formatted as a regular
# expression character class. (Do not include the surrounding square
# brackets.) For example, <code>"b-zB-Z0-9"</code> would cause
# everything but the letters 'b' through 'z' and the numbers '0' through
# '9' to be percent encoded. If a <code>Regexp</code> is passed, the
# value <code>/[^b-zB-Z0-9]/</code> would have the same effect. A set of
# useful <code>String</code> values may be found in the
# <code>Addressable::URI::CharacterClasses</code> module. The default
# value is the reserved plus unreserved character classes specified in
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>.
#
# @param [Regexp] upcase_encoded
# A string of characters that may already be percent encoded, and whose
# encodings should be upcased. This allows normalization of percent
# encodings for characters not included in the
# <code>character_class</code>.
#
# @return [String] The encoded component.
#
# @example
# Addressable::URI.encode_component("simple/example", "b-zB-Z0-9")
# => "simple%2Fex%61mple"
# Addressable::URI.encode_component("simple/example", /[^b-zB-Z0-9]/)
# => "simple%2Fex%61mple"
# Addressable::URI.encode_component(
# "simple/example", Addressable::URI::CharacterClasses::UNRESERVED
# )
# => "simple%2Fexample"
def self.encode_component(component, character_class=
CharacterClasses::RESERVED + CharacterClasses::UNRESERVED,
upcase_encoded='')
return nil if component.nil?
begin
if component.kind_of?(Symbol) ||
component.kind_of?(Numeric) ||
component.kind_of?(TrueClass) ||
component.kind_of?(FalseClass)
component = component.to_s
else
component = component.to_str
end
rescue TypeError, NoMethodError
raise TypeError, "Can't convert #{component.class} into String."
end if !component.is_a? String
if ![String, Regexp].include?(character_class.class)
raise TypeError,
"Expected String or Regexp, got #{character_class.inspect}"
end
if character_class.kind_of?(String)
character_class = /[^#{character_class}]/
end
# We can't perform regexps on invalid UTF sequences, but
# here we need to, so switch to ASCII.
component = component.dup
component.force_encoding(Encoding::ASCII_8BIT)
# Avoiding gsub! because there are edge cases with frozen strings
component = component.gsub(character_class) do |sequence|
SEQUENCE_UPCASED_PERCENT_ENCODING_TABLE[sequence]
end
if upcase_encoded.length > 0
upcase_encoded_chars = upcase_encoded.chars.map do |char|
SEQUENCE_ENCODING_TABLE[char]
end
component = component.gsub(/%(#{upcase_encoded_chars.join('|')})/,
&:upcase)
end
return component
end
class << self
alias_method :escape_component, :encode_component
end
##
# Unencodes any percent encoded characters within a URI component.
# This method may be used for unencoding either components or full URIs,
# however, it is recommended to use the <code>unencode_component</code>
# alias when unencoding components.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI or component to unencode.
#
# @param [Class] return_type
# The type of object to return.
# This value may only be set to <code>String</code> or
# <code>Addressable::URI</code>. All other values are invalid. Defaults
# to <code>String</code>.
#
# @param [String] leave_encoded
# A string of characters to leave encoded. If a percent encoded character
# in this list is encountered then it will remain percent encoded.
#
# @return [String, Addressable::URI]
# The unencoded component or URI.
# The return type is determined by the <code>return_type</code>
# parameter.
def self.unencode(uri, return_type=String, leave_encoded='')
return nil if uri.nil?
begin
uri = uri.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{uri.class} into String."
end if !uri.is_a? String
if ![String, ::Addressable::URI].include?(return_type)
raise TypeError,
"Expected Class (String or Addressable::URI), " +
"got #{return_type.inspect}"
end
uri = uri.dup
# Seriously, only use UTF-8. I'm really not kidding!
uri.force_encoding("utf-8")
unless leave_encoded.empty?
leave_encoded = leave_encoded.dup.force_encoding("utf-8")
end
result = uri.gsub(/%[0-9a-f]{2}/iu) do |sequence|
c = sequence[1..3].to_i(16).chr
c.force_encoding("utf-8")
leave_encoded.include?(c) ? sequence : c
end
result.force_encoding("utf-8")
if return_type == String
return result
elsif return_type == ::Addressable::URI
return ::Addressable::URI.parse(result)
end
end
class << self
alias_method :unescape, :unencode
alias_method :unencode_component, :unencode
alias_method :unescape_component, :unencode
end
##
# Normalizes the encoding of a URI component.
#
# @param [String, #to_str] component The URI component to encode.
#
# @param [String, Regexp] character_class
# The characters which are not percent encoded. If a <code>String</code>
# is passed, the <code>String</code> must be formatted as a regular
# expression character class. (Do not include the surrounding square
# brackets.) For example, <code>"b-zB-Z0-9"</code> would cause
# everything but the letters 'b' through 'z' and the numbers '0'
# through '9' to be percent encoded. If a <code>Regexp</code> is passed,
# the value <code>/[^b-zB-Z0-9]/</code> would have the same effect. A
# set of useful <code>String</code> values may be found in the
# <code>Addressable::URI::CharacterClasses</code> module. The default
# value is the reserved plus unreserved character classes specified in
# <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>.
#
# @param [String] leave_encoded
# When <code>character_class</code> is a <code>String</code> then
# <code>leave_encoded</code> is a string of characters that should remain
# percent encoded while normalizing the component; if they appear percent
# encoded in the original component, then they will be upcased ("%2f"
# normalized to "%2F") but otherwise left alone.
#
# @return [String] The normalized component.
#
# @example
# Addressable::URI.normalize_component("simpl%65/%65xampl%65", "b-zB-Z")
# => "simple%2Fex%61mple"
# Addressable::URI.normalize_component(
# "simpl%65/%65xampl%65", /[^b-zB-Z]/
# )
# => "simple%2Fex%61mple"
# Addressable::URI.normalize_component(
# "simpl%65/%65xampl%65",
# Addressable::URI::CharacterClasses::UNRESERVED
# )
# => "simple%2Fexample"
# Addressable::URI.normalize_component(
# "one%20two%2fthree%26four",
# "0-9a-zA-Z &/",
# "/"
# )
# => "one two%2Fthree&four"
def self.normalize_component(component, character_class=
CharacterClasses::RESERVED + CharacterClasses::UNRESERVED,
leave_encoded='')
return nil if component.nil?
begin
component = component.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{component.class} into String."
end if !component.is_a? String
if ![String, Regexp].include?(character_class.class)
raise TypeError,
"Expected String or Regexp, got #{character_class.inspect}"
end
if character_class.kind_of?(String)
leave_re = if leave_encoded.length > 0
character_class = "#{character_class}%" unless character_class.include?('%')
"|%(?!#{leave_encoded.chars.flat_map do |char|
seq = SEQUENCE_ENCODING_TABLE[char]
[seq.upcase, seq.downcase]
end.join('|')})"
end
character_class = if leave_re
/[^#{character_class}]#{leave_re}/
else
/[^#{character_class}]/
end
end
# We can't perform regexps on invalid UTF sequences, but
# here we need to, so switch to ASCII.
component = component.dup
component.force_encoding(Encoding::ASCII_8BIT)
unencoded = self.unencode_component(component, String, leave_encoded)
begin
encoded = self.encode_component(
Addressable::IDNA.unicode_normalize_kc(unencoded),
character_class,
leave_encoded
)
rescue ArgumentError
encoded = self.encode_component(unencoded)
end
encoded.force_encoding(Encoding::UTF_8)
return encoded
end
##
# Percent encodes any special characters in the URI.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI to encode.
#
# @param [Class] return_type
# The type of object to return.
# This value may only be set to <code>String</code> or
# <code>Addressable::URI</code>. All other values are invalid. Defaults
# to <code>String</code>.
#
# @return [String, Addressable::URI]
# The encoded URI.
# The return type is determined by the <code>return_type</code>
# parameter.
def self.encode(uri, return_type=String)
return nil if uri.nil?
begin
uri = uri.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{uri.class} into String."
end if !uri.is_a? String
if ![String, ::Addressable::URI].include?(return_type)
raise TypeError,
"Expected Class (String or Addressable::URI), " +
"got #{return_type.inspect}"
end
uri_object = uri.kind_of?(self) ? uri : self.parse(uri)
encoded_uri = Addressable::URI.new(
:scheme => self.encode_component(uri_object.scheme,
Addressable::URI::CharacterClasses::SCHEME),
:authority => self.encode_component(uri_object.authority,
Addressable::URI::CharacterClasses::AUTHORITY),
:path => self.encode_component(uri_object.path,
Addressable::URI::CharacterClasses::PATH),
:query => self.encode_component(uri_object.query,
Addressable::URI::CharacterClasses::QUERY),
:fragment => self.encode_component(uri_object.fragment,
Addressable::URI::CharacterClasses::FRAGMENT)
)
if return_type == String
return encoded_uri.to_s
elsif return_type == ::Addressable::URI
return encoded_uri
end
end
class << self
alias_method :escape, :encode
end
##
# Normalizes the encoding of a URI. Characters within a hostname are
# not percent encoded to allow for internationalized domain names.
#
# @param [String, Addressable::URI, #to_str] uri
# The URI to encode.
#
# @param [Class] return_type
# The type of object to return.
# This value may only be set to <code>String</code> or
# <code>Addressable::URI</code>. All other values are invalid. Defaults
# to <code>String</code>.
#
# @return [String, Addressable::URI]
# The encoded URI.
# The return type is determined by the <code>return_type</code>
# parameter.
def self.normalized_encode(uri, return_type=String)
begin
uri = uri.to_str
rescue NoMethodError, TypeError
raise TypeError, "Can't convert #{uri.class} into String."
end if !uri.is_a? String
if ![String, ::Addressable::URI].include?(return_type)
raise TypeError,
"Expected Class (String or Addressable::URI), " +
"got #{return_type.inspect}"
end
uri_object = uri.kind_of?(self) ? uri : self.parse(uri)
components = {
:scheme => self.unencode_component(uri_object.scheme),
:user => self.unencode_component(uri_object.user),
:password => self.unencode_component(uri_object.password),
:host => self.unencode_component(uri_object.host),
:port => (uri_object.port.nil? ? nil : uri_object.port.to_s),
:path => self.unencode_component(uri_object.path),
:query => self.unencode_component(uri_object.query),
:fragment => self.unencode_component(uri_object.fragment)
}
components.each do |key, value|
if value != nil
begin
components[key] =
Addressable::IDNA.unicode_normalize_kc(value.to_str)
rescue ArgumentError
# Likely a malformed UTF-8 character, skip unicode normalization
components[key] = value.to_str
end
end
end
encoded_uri = Addressable::URI.new(
:scheme => self.encode_component(components[:scheme],
Addressable::URI::CharacterClasses::SCHEME),
:user => self.encode_component(components[:user],
Addressable::URI::CharacterClasses::UNRESERVED),
:password => self.encode_component(components[:password],
Addressable::URI::CharacterClasses::UNRESERVED),
:host => components[:host],
:port => components[:port],
:path => self.encode_component(components[:path],
Addressable::URI::CharacterClasses::PATH),
:query => self.encode_component(components[:query],
Addressable::URI::CharacterClasses::QUERY),
:fragment => self.encode_component(components[:fragment],
Addressable::URI::CharacterClasses::FRAGMENT)
)
if return_type == String
return encoded_uri.to_s
elsif return_type == ::Addressable::URI
return encoded_uri
end
end
##
# Encodes a set of key/value pairs according to the rules for the
# <code>application/x-www-form-urlencoded</code> MIME type.
#
# @param [#to_hash, #to_ary] form_values
# The form values to encode.
#
# @param [TrueClass, FalseClass] sort
# Sort the key/value pairs prior to encoding.
# Defaults to <code>false</code>.
#
# @return [String]
# The encoded value.
def self.form_encode(form_values, sort=false)
if form_values.respond_to?(:to_hash)
form_values = form_values.to_hash.to_a
elsif form_values.respond_to?(:to_ary)
form_values = form_values.to_ary
else
raise TypeError, "Can't convert #{form_values.class} into Array."
end
form_values = form_values.inject([]) do |accu, (key, value)|
if value.kind_of?(Array)
value.each do |v|
accu << [key.to_s, v.to_s]
end
else
accu << [key.to_s, value.to_s]
end
accu
end
if sort
# Useful for OAuth and optimizing caching systems
form_values = form_values.sort
end
escaped_form_values = form_values.map do |(key, value)|
# Line breaks are CRLF pairs
[
self.encode_component(
key.gsub(/(\r\n|\n|\r)/, "\r\n"),
CharacterClasses::UNRESERVED
).gsub("%20", "+"),
self.encode_component(
value.gsub(/(\r\n|\n|\r)/, "\r\n"),
CharacterClasses::UNRESERVED
).gsub("%20", "+")
]
end
return escaped_form_values.map do |(key, value)|
"#{key}=#{value}"
end.join("&")
end
##
# Decodes a <code>String</code> according to the rules for the
# <code>application/x-www-form-urlencoded</code> MIME type.
#
# @param [String, #to_str] encoded_value
# The form values to decode.
#
# @return [Array]
# The decoded values.
# This is not a <code>Hash</code> because of the possibility for
# duplicate keys.
def self.form_unencode(encoded_value)
if !encoded_value.respond_to?(:to_str)
raise TypeError, "Can't convert #{encoded_value.class} into String."
end
encoded_value = encoded_value.to_str
split_values = encoded_value.split("&").map do |pair|
pair.split("=", 2)
end
return split_values.map do |(key, value)|
[
key ? self.unencode_component(
key.gsub("+", "%20")).gsub(/(\r\n|\n|\r)/, "\n") : nil,
value ? (self.unencode_component(
value.gsub("+", "%20")).gsub(/(\r\n|\n|\r)/, "\n")) : nil
]
end
end
##
# Creates a new uri object from component parts.
#
# @option [String, #to_str] scheme The scheme component.
# @option [String, #to_str] user The user component.
# @option [String, #to_str] password The password component.
# @option [String, #to_str] userinfo
# The userinfo component. If this is supplied, the user and password
# components must be omitted.
# @option [String, #to_str] host The host component.
# @option [String, #to_str] port The port component.
# @option [String, #to_str] authority
# The authority component. If this is supplied, the user, password,
# userinfo, host, and port components must be omitted.
# @option [String, #to_str] path The path component.
# @option [String, #to_str] query The query component.
# @option [String, #to_str] fragment The fragment component.
#
# @return [Addressable::URI] The constructed URI object.
def initialize(options={})
if options.has_key?(:authority)
if (options.keys & [:userinfo, :user, :password, :host, :port]).any?
raise ArgumentError,
"Cannot specify both an authority and any of the components " +
"within the authority."
end
end
if options.has_key?(:userinfo)
if (options.keys & [:user, :password]).any?
raise ArgumentError,
"Cannot specify both a userinfo and either the user or password."
end
end
self.defer_validation do
# Bunch of crazy logic required because of the composite components
# like userinfo and authority.
self.scheme = options[:scheme] if options[:scheme]
self.user = options[:user] if options[:user]
self.password = options[:password] if options[:password]
self.userinfo = options[:userinfo] if options[:userinfo]
self.host = options[:host] if options[:host]
self.port = options[:port] if options[:port]
self.authority = options[:authority] if options[:authority]
self.path = options[:path] if options[:path]
self.query = options[:query] if options[:query]
self.query_values = options[:query_values] if options[:query_values]
self.fragment = options[:fragment] if options[:fragment]
end
self.to_s
end
##
# Freeze URI, initializing instance variables.
#
# @return [Addressable::URI] The frozen URI object.
def freeze
self.normalized_scheme
self.normalized_user
self.normalized_password
self.normalized_userinfo
self.normalized_host
self.normalized_port
self.normalized_authority
self.normalized_site
self.normalized_path
self.normalized_query
self.normalized_fragment
self.hash
super
end
##
# The scheme component for this URI.
#
# @return [String] The scheme component.
def scheme
return defined?(@scheme) ? @scheme : nil
end
##
# The scheme component for this URI, normalized.
#
# @return [String] The scheme component, normalized.
def normalized_scheme
return nil unless self.scheme
@normalized_scheme ||= begin
if self.scheme =~ /^\s*ssh\+svn\s*$/i
"svn+ssh".dup
else
Addressable::URI.normalize_component(
self.scheme.strip.downcase,
Addressable::URI::NormalizeCharacterClasses::SCHEME
)
end
end
# All normalized values should be UTF-8
@normalized_scheme.force_encoding(Encoding::UTF_8) if @normalized_scheme
@normalized_scheme
end
##
# Sets the scheme component for this URI.
#
# @param [String, #to_str] new_scheme The new scheme component.
def scheme=(new_scheme)
if new_scheme && !new_scheme.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_scheme.class} into String."
elsif new_scheme
new_scheme = new_scheme.to_str
end
if new_scheme && new_scheme !~ /\A[a-z][a-z0-9\.\+\-]*\z/i
raise InvalidURIError, "Invalid scheme format: '#{new_scheme}'"
end
@scheme = new_scheme
@scheme = nil if @scheme.to_s.strip.empty?
# Reset dependent values
remove_instance_variable(:@normalized_scheme) if defined?(@normalized_scheme)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The user component for this URI.
#
# @return [String] The user component.
def user
return defined?(@user) ? @user : nil
end
##
# The user component for this URI, normalized.
#
# @return [String] The user component, normalized.
def normalized_user
return nil unless self.user
return @normalized_user if defined?(@normalized_user)
@normalized_user ||= begin
if normalized_scheme =~ /https?/ && self.user.strip.empty? &&
(!self.password || self.password.strip.empty?)
nil
else
Addressable::URI.normalize_component(
self.user.strip,
Addressable::URI::NormalizeCharacterClasses::UNRESERVED
)
end
end
# All normalized values should be UTF-8
@normalized_user.force_encoding(Encoding::UTF_8) if @normalized_user
@normalized_user
end
##
# Sets the user component for this URI.
#
# @param [String, #to_str] new_user The new user component.
def user=(new_user)
if new_user && !new_user.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_user.class} into String."
end
@user = new_user ? new_user.to_str : nil
# You can't have a nil user with a non-nil password
if password != nil
@user = EMPTY_STR if @user.nil?
end
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_user) if defined?(@normalized_user)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The password component for this URI.
#
# @return [String] The password component.
def password
return defined?(@password) ? @password : nil
end
##
# The password component for this URI, normalized.
#
# @return [String] The password component, normalized.
def normalized_password
return nil unless self.password
return @normalized_password if defined?(@normalized_password)
@normalized_password ||= begin
if self.normalized_scheme =~ /https?/ && self.password.strip.empty? &&
(!self.user || self.user.strip.empty?)
nil
else
Addressable::URI.normalize_component(
self.password.strip,
Addressable::URI::NormalizeCharacterClasses::UNRESERVED
)
end
end
# All normalized values should be UTF-8
if @normalized_password
@normalized_password.force_encoding(Encoding::UTF_8)
end
@normalized_password
end
##
# Sets the password component for this URI.
#
# @param [String, #to_str] new_password The new password component.
def password=(new_password)
if new_password && !new_password.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_password.class} into String."
end
@password = new_password ? new_password.to_str : nil
# You can't have a nil user with a non-nil password
@password ||= nil
@user ||= nil
if @password != nil
@user = EMPTY_STR if @user.nil?
end
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_password) if defined?(@normalized_password)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The userinfo component for this URI.
# Combines the user and password components.
#
# @return [String] The userinfo component.
def userinfo
current_user = self.user
current_password = self.password
(current_user || current_password) && @userinfo ||= begin
if current_user && current_password
"#{current_user}:#{current_password}"
elsif current_user && !current_password
"#{current_user}"
end
end
end
##
# The userinfo component for this URI, normalized.
#
# @return [String] The userinfo component, normalized.
def normalized_userinfo
return nil unless self.userinfo
return @normalized_userinfo if defined?(@normalized_userinfo)
@normalized_userinfo ||= begin
current_user = self.normalized_user
current_password = self.normalized_password
if !current_user && !current_password
nil
elsif current_user && current_password
"#{current_user}:#{current_password}".dup
elsif current_user && !current_password
"#{current_user}".dup
end
end
# All normalized values should be UTF-8
if @normalized_userinfo
@normalized_userinfo.force_encoding(Encoding::UTF_8)
end
@normalized_userinfo
end
##
# Sets the userinfo component for this URI.
#
# @param [String, #to_str] new_userinfo The new userinfo component.
def userinfo=(new_userinfo)
if new_userinfo && !new_userinfo.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_userinfo.class} into String."
end
new_user, new_password = if new_userinfo
[
new_userinfo.to_str.strip[/^(.*):/, 1],
new_userinfo.to_str.strip[/:(.*)$/, 1]
]
else
[nil, nil]
end
# Password assigned first to ensure validity in case of nil
self.password = new_password
self.user = new_user
# Reset dependent values
remove_instance_variable(:@authority) if defined?(@authority)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The host component for this URI.
#
# @return [String] The host component.
def host
return defined?(@host) ? @host : nil
end
##
# The host component for this URI, normalized.
#
# @return [String] The host component, normalized.
def normalized_host
return nil unless self.host
@normalized_host ||= begin
if !self.host.strip.empty?
result = ::Addressable::IDNA.to_ascii(
URI.unencode_component(self.host.strip.downcase)
)
if result =~ /[^\.]\.$/
# Single trailing dots are unnecessary.
result = result[0...-1]
end
result = Addressable::URI.normalize_component(
result,
NormalizeCharacterClasses::HOST
)
result
else
EMPTY_STR.dup
end
end
# All normalized values should be UTF-8
if @normalized_host && !@normalized_host.empty?
@normalized_host.force_encoding(Encoding::UTF_8)
end
@normalized_host
end
##
# Sets the host component for this URI.
#
# @param [String, #to_str] new_host The new host component.
def host=(new_host)
if new_host && !new_host.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_host.class} into String."
end
@host = new_host ? new_host.to_str : nil
# Reset dependent values
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_host) if defined?(@normalized_host)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# This method is same as URI::Generic#host except
# brackets for IPv6 (and 'IPvFuture') addresses are removed.
#
# @see Addressable::URI#host
#
# @return [String] The hostname for this URI.
def hostname
v = self.host
/\A\[(.*)\]\z/ =~ v ? $1 : v
end
##
# This method is same as URI::Generic#host= except
# the argument can be a bare IPv6 address (or 'IPvFuture').
#
# @see Addressable::URI#host=
#
# @param [String, #to_str] new_hostname The new hostname for this URI.
def hostname=(new_hostname)
if new_hostname &&
(new_hostname.respond_to?(:ipv4?) || new_hostname.respond_to?(:ipv6?))
new_hostname = new_hostname.to_s
elsif new_hostname && !new_hostname.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_hostname.class} into String."
end
v = new_hostname ? new_hostname.to_str : nil
v = "[#{v}]" if /\A\[.*\]\z/ !~ v && /:/ =~ v
self.host = v
end
##
# Returns the top-level domain for this host.
#
# @example
# Addressable::URI.parse("http://www.example.co.uk").tld # => "co.uk"
def tld
PublicSuffix.parse(self.host, ignore_private: true).tld
end
##
# Sets the top-level domain for this URI.
#
# @param [String, #to_str] new_tld The new top-level domain.
def tld=(new_tld)
replaced_tld = host.sub(/#{tld}\z/, new_tld)
self.host = PublicSuffix::Domain.new(replaced_tld).to_s
end
##
# Returns the public suffix domain for this host.
#
# @example
# Addressable::URI.parse("http://www.example.co.uk").domain # => "example.co.uk"
def domain
PublicSuffix.domain(self.host, ignore_private: true)
end
##
# The authority component for this URI.
# Combines the user, password, host, and port components.
#
# @return [String] The authority component.
def authority
self.host && @authority ||= begin
authority = String.new
if self.userinfo != nil
authority << "#{self.userinfo}@"
end
authority << self.host
if self.port != nil
authority << ":#{self.port}"
end
authority
end
end
##
# The authority component for this URI, normalized.
#
# @return [String] The authority component, normalized.
def normalized_authority
return nil unless self.authority
@normalized_authority ||= begin
authority = String.new
if self.normalized_userinfo != nil
authority << "#{self.normalized_userinfo}@"
end
authority << self.normalized_host
if self.normalized_port != nil
authority << ":#{self.normalized_port}"
end
authority
end
# All normalized values should be UTF-8
if @normalized_authority
@normalized_authority.force_encoding(Encoding::UTF_8)
end
@normalized_authority
end
##
# Sets the authority component for this URI.
#
# @param [String, #to_str] new_authority The new authority component.
def authority=(new_authority)
if new_authority
if !new_authority.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_authority.class} into String."
end
new_authority = new_authority.to_str
new_userinfo = new_authority[/^([^\[\]]*)@/, 1]
if new_userinfo
new_user = new_userinfo.strip[/^([^:]*):?/, 1]
new_password = new_userinfo.strip[/:(.*)$/, 1]
end
new_host = new_authority.sub(
/^([^\[\]]*)@/, EMPTY_STR
).sub(
/:([^:@\[\]]*?)$/, EMPTY_STR
)
new_port =
new_authority[/:([^:@\[\]]*?)$/, 1]
end
# Password assigned first to ensure validity in case of nil
self.password = defined?(new_password) ? new_password : nil
self.user = defined?(new_user) ? new_user : nil
self.host = defined?(new_host) ? new_host : nil
self.port = defined?(new_port) ? new_port : nil
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The origin for this URI, serialized to ASCII, as per
# RFC 6454, section 6.2.
#
# @return [String] The serialized origin.
def origin
if self.scheme && self.authority
if self.normalized_port
"#{self.normalized_scheme}://#{self.normalized_host}" +
":#{self.normalized_port}"
else
"#{self.normalized_scheme}://#{self.normalized_host}"
end
else
"null"
end
end
##
# Sets the origin for this URI, serialized to ASCII, as per
# RFC 6454, section 6.2. This assignment will reset the `userinfo`
# component.
#
# @param [String, #to_str] new_origin The new origin component.
def origin=(new_origin)
if new_origin
if !new_origin.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_origin.class} into String."
end
new_origin = new_origin.to_str
new_scheme = new_origin[/^([^:\/?#]+):\/\//, 1]
unless new_scheme
raise InvalidURIError, 'An origin cannot omit the scheme.'
end
new_host = new_origin[/:\/\/([^\/?#:]+)/, 1]
unless new_host
raise InvalidURIError, 'An origin cannot omit the host.'
end
new_port = new_origin[/:([^:@\[\]\/]*?)$/, 1]
end
self.scheme = defined?(new_scheme) ? new_scheme : nil
self.host = defined?(new_host) ? new_host : nil
self.port = defined?(new_port) ? new_port : nil
self.userinfo = nil
# Reset dependent values
remove_instance_variable(:@userinfo) if defined?(@userinfo)
remove_instance_variable(:@normalized_userinfo) if defined?(@normalized_userinfo)
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_authority) if defined?(@normalized_authority)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
# Returns an array of known ip-based schemes. These schemes typically
# use a similar URI form:
# <code>//<user>:<password>@<host>:<port>/<url-path></code>
def self.ip_based_schemes
return self.port_mapping.keys
end
# Returns a hash of common IP-based schemes and their default port
# numbers. Adding new schemes to this hash, as necessary, will allow
# for better URI normalization.
def self.port_mapping
PORT_MAPPING
end
##
# The port component for this URI.
# This is the port number actually given in the URI. This does not
# infer port numbers from default values.
#
# @return [Integer] The port component.
def port
return defined?(@port) ? @port : nil
end
##
# The port component for this URI, normalized.
#
# @return [Integer] The port component, normalized.
def normalized_port
return nil unless self.port
return @normalized_port if defined?(@normalized_port)
@normalized_port ||= begin
if URI.port_mapping[self.normalized_scheme] == self.port
nil
else
self.port
end
end
end
##
# Sets the port component for this URI.
#
# @param [String, Integer, #to_s] new_port The new port component.
def port=(new_port)
if new_port != nil && new_port.respond_to?(:to_str)
new_port = Addressable::URI.unencode_component(new_port.to_str)
end
if new_port.respond_to?(:valid_encoding?) && !new_port.valid_encoding?
raise InvalidURIError, "Invalid encoding in port"
end
if new_port != nil && !(new_port.to_s =~ /^\d+$/)
raise InvalidURIError,
"Invalid port number: #{new_port.inspect}"
end
@port = new_port.to_s.to_i
@port = nil if @port == 0
# Reset dependent values
remove_instance_variable(:@authority) if defined?(@authority)
remove_instance_variable(:@normalized_port) if defined?(@normalized_port)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The inferred port component for this URI.
# This method will normalize to the default port for the URI's scheme if
# the port isn't explicitly specified in the URI.
#
# @return [Integer] The inferred port component.
def inferred_port
if self.port.to_i == 0
self.default_port
else
self.port.to_i
end
end
##
# The default port for this URI's scheme.
# This method will always returns the default port for the URI's scheme
# regardless of the presence of an explicit port in the URI.
#
# @return [Integer] The default port.
def default_port
URI.port_mapping[self.scheme.strip.downcase] if self.scheme
end
##
# The combination of components that represent a site.
# Combines the scheme, user, password, host, and port components.
# Primarily useful for HTTP and HTTPS.
#
# For example, <code>"http://example.com/path?query"</code> would have a
# <code>site</code> value of <code>"http://example.com"</code>.
#
# @return [String] The components that identify a site.
def site
(self.scheme || self.authority) && @site ||= begin
site_string = "".dup
site_string << "#{self.scheme}:" if self.scheme != nil
site_string << "//#{self.authority}" if self.authority != nil
site_string
end
end
##
# The normalized combination of components that represent a site.
# Combines the scheme, user, password, host, and port components.
# Primarily useful for HTTP and HTTPS.
#
# For example, <code>"http://example.com/path?query"</code> would have a
# <code>site</code> value of <code>"http://example.com"</code>.
#
# @return [String] The normalized components that identify a site.
def normalized_site
return nil unless self.site
@normalized_site ||= begin
site_string = "".dup
if self.normalized_scheme != nil
site_string << "#{self.normalized_scheme}:"
end
if self.normalized_authority != nil
site_string << "//#{self.normalized_authority}"
end
site_string
end
# All normalized values should be UTF-8
@normalized_site.force_encoding(Encoding::UTF_8) if @normalized_site
@normalized_site
end
##
# Sets the site value for this URI.
#
# @param [String, #to_str] new_site The new site value.
def site=(new_site)
if new_site
if !new_site.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_site.class} into String."
end
new_site = new_site.to_str
# These two regular expressions derived from the primary parsing
# expression
self.scheme = new_site[/^(?:([^:\/?#]+):)?(?:\/\/(?:[^\/?#]*))?$/, 1]
self.authority = new_site[
/^(?:(?:[^:\/?#]+):)?(?:\/\/([^\/?#]*))?$/, 1
]
else
self.scheme = nil
self.authority = nil
end
end
##
# The path component for this URI.
#
# @return [String] The path component.
def path
return defined?(@path) ? @path : EMPTY_STR
end
NORMPATH = /^(?!\/)[^\/:]*:.*$/
##
# The path component for this URI, normalized.
#
# @return [String] The path component, normalized.
def normalized_path
@normalized_path ||= begin
path = self.path.to_s
if self.scheme == nil && path =~ NORMPATH
# Relative paths with colons in the first segment are ambiguous.
path = path.sub(":", "%2F")
end
# String#split(delimeter, -1) uses the more strict splitting behavior
# found by default in Python.
result = path.strip.split(SLASH, -1).map do |segment|
Addressable::URI.normalize_component(
segment,
Addressable::URI::NormalizeCharacterClasses::PCHAR
)
end.join(SLASH)
result = URI.normalize_path(result)
if result.empty? &&
["http", "https", "ftp", "tftp"].include?(self.normalized_scheme)
result = SLASH.dup
end
result
end
# All normalized values should be UTF-8
@normalized_path.force_encoding(Encoding::UTF_8) if @normalized_path
@normalized_path
end
##
# Sets the path component for this URI.
#
# @param [String, #to_str] new_path The new path component.
def path=(new_path)
if new_path && !new_path.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_path.class} into String."
end
@path = (new_path || EMPTY_STR).to_str
if !@path.empty? && @path[0..0] != SLASH && host != nil
@path = "/#{@path}"
end
# Reset dependent values
remove_instance_variable(:@normalized_path) if defined?(@normalized_path)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# The basename, if any, of the file in the path component.
#
# @return [String] The path's basename.
def basename
# Path cannot be nil
return File.basename(self.path).sub(/;[^\/]*$/, EMPTY_STR)
end
##
# The extname, if any, of the file in the path component.
# Empty string if there is no extension.
#
# @return [String] The path's extname.
def extname
return nil unless self.path
return File.extname(self.basename)
end
##
# The query component for this URI.
#
# @return [String] The query component.
def query
return defined?(@query) ? @query : nil
end
##
# The query component for this URI, normalized.
#
# @return [String] The query component, normalized.
def normalized_query(*flags)
return nil unless self.query
return @normalized_query if defined?(@normalized_query)
@normalized_query ||= begin
modified_query_class = Addressable::URI::CharacterClasses::QUERY.dup
# Make sure possible key-value pair delimiters are escaped.
modified_query_class.sub!("\\&", "").sub!("\\;", "")
pairs = (query || "").split("&", -1)
pairs.delete_if(&:empty?).uniq! if flags.include?(:compacted)
pairs.sort! if flags.include?(:sorted)
component = pairs.map do |pair|
Addressable::URI.normalize_component(
pair,
Addressable::URI::NormalizeCharacterClasses::QUERY,
"+"
)
end.join("&")
component == "" ? nil : component
end
# All normalized values should be UTF-8
@normalized_query.force_encoding(Encoding::UTF_8) if @normalized_query
@normalized_query
end
##
# Sets the query component for this URI.
#
# @param [String, #to_str] new_query The new query component.
def query=(new_query)
if new_query && !new_query.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_query.class} into String."
end
@query = new_query ? new_query.to_str : nil
# Reset dependent values
remove_instance_variable(:@normalized_query) if defined?(@normalized_query)
remove_composite_values
end
##
# Converts the query component to a Hash value.
#
# @param [Class] return_type The return type desired. Value must be either
# `Hash` or `Array`.
#
# @return [Hash, Array, nil] The query string parsed as a Hash or Array
# or nil if the query string is blank.
#
# @example
# Addressable::URI.parse("?one=1&two=2&three=3").query_values
# #=> {"one" => "1", "two" => "2", "three" => "3"}
# Addressable::URI.parse("?one=two&one=three").query_values(Array)
# #=> [["one", "two"], ["one", "three"]]
# Addressable::URI.parse("?one=two&one=three").query_values(Hash)
# #=> {"one" => "three"}
# Addressable::URI.parse("?").query_values
# #=> {}
# Addressable::URI.parse("").query_values
# #=> nil
def query_values(return_type=Hash)
empty_accumulator = Array == return_type ? [] : {}
if return_type != Hash && return_type != Array
raise ArgumentError, "Invalid return type. Must be Hash or Array."
end
return nil if self.query == nil
split_query = self.query.split("&").map do |pair|
pair.split("=", 2) if pair && !pair.empty?
end.compact
return split_query.inject(empty_accumulator.dup) do |accu, pair|
# I'd rather use key/value identifiers instead of array lookups,
# but in this case I really want to maintain the exact pair structure,
# so it's best to make all changes in-place.
pair[0] = URI.unencode_component(pair[0])
if pair[1].respond_to?(:to_str)
value = pair[1].to_str
# I loathe the fact that I have to do this. Stupid HTML 4.01.
# Treating '+' as a space was just an unbelievably bad idea.
# There was nothing wrong with '%20'!
# If it ain't broke, don't fix it!
value = value.tr("+", " ") if ["http", "https", nil].include?(scheme)
pair[1] = URI.unencode_component(value)
end
if return_type == Hash
accu[pair[0]] = pair[1]
else
accu << pair
end
accu
end
end
##
# Sets the query component for this URI from a Hash object.
# An empty Hash or Array will result in an empty query string.
#
# @param [Hash, #to_hash, Array] new_query_values The new query values.
#
# @example
# uri.query_values = {:a => "a", :b => ["c", "d", "e"]}
# uri.query
# # => "a=a&b=c&b=d&b=e"
# uri.query_values = [['a', 'a'], ['b', 'c'], ['b', 'd'], ['b', 'e']]
# uri.query
# # => "a=a&b=c&b=d&b=e"
# uri.query_values = [['a', 'a'], ['b', ['c', 'd', 'e']]]
# uri.query
# # => "a=a&b=c&b=d&b=e"
# uri.query_values = [['flag'], ['key', 'value']]
# uri.query
# # => "flag&key=value"
def query_values=(new_query_values)
if new_query_values == nil
self.query = nil
return nil
end
if !new_query_values.is_a?(Array)
if !new_query_values.respond_to?(:to_hash)
raise TypeError,
"Can't convert #{new_query_values.class} into Hash."
end
new_query_values = new_query_values.to_hash
new_query_values = new_query_values.map do |key, value|
key = key.to_s if key.kind_of?(Symbol)
[key, value]
end
# Useful default for OAuth and caching.
# Only to be used for non-Array inputs. Arrays should preserve order.
new_query_values.sort!
end
# new_query_values have form [['key1', 'value1'], ['key2', 'value2']]
buffer = "".dup
new_query_values.each do |key, value|
encoded_key = URI.encode_component(
key, CharacterClasses::UNRESERVED
)
if value == nil
buffer << "#{encoded_key}&"
elsif value.kind_of?(Array)
value.each do |sub_value|
encoded_value = URI.encode_component(
sub_value, CharacterClasses::UNRESERVED
)
buffer << "#{encoded_key}=#{encoded_value}&"
end
else
encoded_value = URI.encode_component(
value, CharacterClasses::UNRESERVED
)
buffer << "#{encoded_key}=#{encoded_value}&"
end
end
self.query = buffer.chop
end
##
# The HTTP request URI for this URI. This is the path and the
# query string.
#
# @return [String] The request URI required for an HTTP request.
def request_uri
return nil if self.absolute? && self.scheme !~ /^https?$/i
return (
(!self.path.empty? ? self.path : SLASH) +
(self.query ? "?#{self.query}" : EMPTY_STR)
)
end
##
# Sets the HTTP request URI for this URI.
#
# @param [String, #to_str] new_request_uri The new HTTP request URI.
def request_uri=(new_request_uri)
if !new_request_uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_request_uri.class} into String."
end
if self.absolute? && self.scheme !~ /^https?$/i
raise InvalidURIError,
"Cannot set an HTTP request URI for a non-HTTP URI."
end
new_request_uri = new_request_uri.to_str
path_component = new_request_uri[/^([^\?]*)\??(?:.*)$/, 1]
query_component = new_request_uri[/^(?:[^\?]*)\?(.*)$/, 1]
path_component = path_component.to_s
path_component = (!path_component.empty? ? path_component : SLASH)
self.path = path_component
self.query = query_component
# Reset dependent values
remove_composite_values
end
##
# The fragment component for this URI.
#
# @return [String] The fragment component.
def fragment
return defined?(@fragment) ? @fragment : nil
end
##
# The fragment component for this URI, normalized.
#
# @return [String] The fragment component, normalized.
def normalized_fragment
return nil unless self.fragment
return @normalized_fragment if defined?(@normalized_fragment)
@normalized_fragment ||= begin
component = Addressable::URI.normalize_component(
self.fragment,
Addressable::URI::NormalizeCharacterClasses::FRAGMENT
)
component == "" ? nil : component
end
# All normalized values should be UTF-8
if @normalized_fragment
@normalized_fragment.force_encoding(Encoding::UTF_8)
end
@normalized_fragment
end
##
# Sets the fragment component for this URI.
#
# @param [String, #to_str] new_fragment The new fragment component.
def fragment=(new_fragment)
if new_fragment && !new_fragment.respond_to?(:to_str)
raise TypeError, "Can't convert #{new_fragment.class} into String."
end
@fragment = new_fragment ? new_fragment.to_str : nil
# Reset dependent values
remove_instance_variable(:@normalized_fragment) if defined?(@normalized_fragment)
remove_composite_values
# Ensure we haven't created an invalid URI
validate()
end
##
# Determines if the scheme indicates an IP-based protocol.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the scheme indicates an IP-based protocol.
# <code>false</code> otherwise.
def ip_based?
if self.scheme
return URI.ip_based_schemes.include?(
self.scheme.strip.downcase)
end
return false
end
##
# Determines if the URI is relative.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URI is relative. <code>false</code>
# otherwise.
def relative?
return self.scheme.nil?
end
##
# Determines if the URI is absolute.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URI is absolute. <code>false</code>
# otherwise.
def absolute?
return !relative?
end
##
# Joins two URIs together.
#
# @param [String, Addressable::URI, #to_str] The URI to join with.
#
# @return [Addressable::URI] The joined URI.
def join(uri)
if !uri.respond_to?(:to_str)
raise TypeError, "Can't convert #{uri.class} into String."
end
if !uri.kind_of?(URI)
# Otherwise, convert to a String, then parse.
uri = URI.parse(uri.to_str)
end
if uri.to_s.empty?
return self.dup
end
joined_scheme = nil
joined_user = nil
joined_password = nil
joined_host = nil
joined_port = nil
joined_path = nil
joined_query = nil
joined_fragment = nil
# Section 5.2.2 of RFC 3986
if uri.scheme != nil
joined_scheme = uri.scheme
joined_user = uri.user
joined_password = uri.password
joined_host = uri.host
joined_port = uri.port
joined_path = URI.normalize_path(uri.path)
joined_query = uri.query
else
if uri.authority != nil
joined_user = uri.user
joined_password = uri.password
joined_host = uri.host
joined_port = uri.port
joined_path = URI.normalize_path(uri.path)
joined_query = uri.query
else
if uri.path == nil || uri.path.empty?
joined_path = self.path
if uri.query != nil
joined_query = uri.query
else
joined_query = self.query
end
else
if uri.path[0..0] == SLASH
joined_path = URI.normalize_path(uri.path)
else
base_path = self.path.dup
base_path = EMPTY_STR if base_path == nil
base_path = URI.normalize_path(base_path)
# Section 5.2.3 of RFC 3986
#
# Removes the right-most path segment from the base path.
if base_path.include?(SLASH)
base_path.sub!(/\/[^\/]+$/, SLASH)
else
base_path = EMPTY_STR
end
# If the base path is empty and an authority segment has been
# defined, use a base path of SLASH
if base_path.empty? && self.authority != nil
base_path = SLASH
end
joined_path = URI.normalize_path(base_path + uri.path)
end
joined_query = uri.query
end
joined_user = self.user
joined_password = self.password
joined_host = self.host
joined_port = self.port
end
joined_scheme = self.scheme
end
joined_fragment = uri.fragment
return self.class.new(
:scheme => joined_scheme,
:user => joined_user,
:password => joined_password,
:host => joined_host,
:port => joined_port,
:path => joined_path,
:query => joined_query,
:fragment => joined_fragment
)
end
alias_method :+, :join
##
# Destructive form of <code>join</code>.
#
# @param [String, Addressable::URI, #to_str] The URI to join with.
#
# @return [Addressable::URI] The joined URI.
#
# @see Addressable::URI#join
def join!(uri)
replace_self(self.join(uri))
end
##
# Merges a URI with a <code>Hash</code> of components.
# This method has different behavior from <code>join</code>. Any
# components present in the <code>hash</code> parameter will override the
# original components. The path component is not treated specially.
#
# @param [Hash, Addressable::URI, #to_hash] The components to merge with.
#
# @return [Addressable::URI] The merged URI.
#
# @see Hash#merge
def merge(hash)
if !hash.respond_to?(:to_hash)
raise TypeError, "Can't convert #{hash.class} into Hash."
end
hash = hash.to_hash
if hash.has_key?(:authority)
if (hash.keys & [:userinfo, :user, :password, :host, :port]).any?
raise ArgumentError,
"Cannot specify both an authority and any of the components " +
"within the authority."
end
end
if hash.has_key?(:userinfo)
if (hash.keys & [:user, :password]).any?
raise ArgumentError,
"Cannot specify both a userinfo and either the user or password."
end
end
uri = self.class.new
uri.defer_validation do
# Bunch of crazy logic required because of the composite components
# like userinfo and authority.
uri.scheme =
hash.has_key?(:scheme) ? hash[:scheme] : self.scheme
if hash.has_key?(:authority)
uri.authority =
hash.has_key?(:authority) ? hash[:authority] : self.authority
end
if hash.has_key?(:userinfo)
uri.userinfo =
hash.has_key?(:userinfo) ? hash[:userinfo] : self.userinfo
end
if !hash.has_key?(:userinfo) && !hash.has_key?(:authority)
uri.user =
hash.has_key?(:user) ? hash[:user] : self.user
uri.password =
hash.has_key?(:password) ? hash[:password] : self.password
end
if !hash.has_key?(:authority)
uri.host =
hash.has_key?(:host) ? hash[:host] : self.host
uri.port =
hash.has_key?(:port) ? hash[:port] : self.port
end
uri.path =
hash.has_key?(:path) ? hash[:path] : self.path
uri.query =
hash.has_key?(:query) ? hash[:query] : self.query
uri.fragment =
hash.has_key?(:fragment) ? hash[:fragment] : self.fragment
end
return uri
end
##
# Destructive form of <code>merge</code>.
#
# @param [Hash, Addressable::URI, #to_hash] The components to merge with.
#
# @return [Addressable::URI] The merged URI.
#
# @see Addressable::URI#merge
def merge!(uri)
replace_self(self.merge(uri))
end
##
# Returns the shortest normalized relative form of this URI that uses the
# supplied URI as a base for resolution. Returns an absolute URI if
# necessary. This is effectively the opposite of <code>route_to</code>.
#
# @param [String, Addressable::URI, #to_str] uri The URI to route from.
#
# @return [Addressable::URI]
# The normalized relative URI that is equivalent to the original URI.
def route_from(uri)
uri = URI.parse(uri).normalize
normalized_self = self.normalize
if normalized_self.relative?
raise ArgumentError, "Expected absolute URI, got: #{self.to_s}"
end
if uri.relative?
raise ArgumentError, "Expected absolute URI, got: #{uri.to_s}"
end
if normalized_self == uri
return Addressable::URI.parse("##{normalized_self.fragment}")
end
components = normalized_self.to_hash
if normalized_self.scheme == uri.scheme
components[:scheme] = nil
if normalized_self.authority == uri.authority
components[:user] = nil
components[:password] = nil
components[:host] = nil
components[:port] = nil
if normalized_self.path == uri.path
components[:path] = nil
if normalized_self.query == uri.query
components[:query] = nil
end
else
if uri.path != SLASH and components[:path]
self_splitted_path = split_path(components[:path])
uri_splitted_path = split_path(uri.path)
self_dir = self_splitted_path.shift
uri_dir = uri_splitted_path.shift
while !self_splitted_path.empty? && !uri_splitted_path.empty? and self_dir == uri_dir
self_dir = self_splitted_path.shift
uri_dir = uri_splitted_path.shift
end
components[:path] = (uri_splitted_path.fill('..') + [self_dir] + self_splitted_path).join(SLASH)
end
end
end
end
# Avoid network-path references.
if components[:host] != nil
components[:scheme] = normalized_self.scheme
end
return Addressable::URI.new(
:scheme => components[:scheme],
:user => components[:user],
:password => components[:password],
:host => components[:host],
:port => components[:port],
:path => components[:path],
:query => components[:query],
:fragment => components[:fragment]
)
end
##
# Returns the shortest normalized relative form of the supplied URI that
# uses this URI as a base for resolution. Returns an absolute URI if
# necessary. This is effectively the opposite of <code>route_from</code>.
#
# @param [String, Addressable::URI, #to_str] uri The URI to route to.
#
# @return [Addressable::URI]
# The normalized relative URI that is equivalent to the supplied URI.
def route_to(uri)
return URI.parse(uri).route_from(self)
end
##
# Returns a normalized URI object.
#
# NOTE: This method does not attempt to fully conform to specifications.
# It exists largely to correct other people's failures to read the
# specifications, and also to deal with caching issues since several
# different URIs may represent the same resource and should not be
# cached multiple times.
#
# @return [Addressable::URI] The normalized URI.
def normalize
# This is a special exception for the frequently misused feed
# URI scheme.
if normalized_scheme == "feed"
if self.to_s =~ /^feed:\/*http:\/*/
return URI.parse(
self.to_s[/^feed:\/*(http:\/*.*)/, 1]
).normalize
end
end
return self.class.new(
:scheme => normalized_scheme,
:authority => normalized_authority,
:path => normalized_path,
:query => normalized_query,
:fragment => normalized_fragment
)
end
##
# Destructively normalizes this URI object.
#
# @return [Addressable::URI] The normalized URI.
#
# @see Addressable::URI#normalize
def normalize!
replace_self(self.normalize)
end
##
# Creates a URI suitable for display to users. If semantic attacks are
# likely, the application should try to detect these and warn the user.
# See <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a>,
# section 7.6 for more information.
#
# @return [Addressable::URI] A URI suitable for display purposes.
def display_uri
display_uri = self.normalize
display_uri.host = ::Addressable::IDNA.to_unicode(display_uri.host)
return display_uri
end
##
# Returns <code>true</code> if the URI objects are equal. This method
# normalizes both URIs before doing the comparison, and allows comparison
# against <code>Strings</code>.
#
# @param [Object] uri The URI to compare.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URIs are equivalent, <code>false</code>
# otherwise.
def ===(uri)
if uri.respond_to?(:normalize)
uri_string = uri.normalize.to_s
else
begin
uri_string = ::Addressable::URI.parse(uri).normalize.to_s
rescue InvalidURIError, TypeError
return false
end
end
return self.normalize.to_s == uri_string
end
##
# Returns <code>true</code> if the URI objects are equal. This method
# normalizes both URIs before doing the comparison.
#
# @param [Object] uri The URI to compare.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URIs are equivalent, <code>false</code>
# otherwise.
def ==(uri)
return false unless uri.kind_of?(URI)
return self.normalize.to_s == uri.normalize.to_s
end
##
# Returns <code>true</code> if the URI objects are equal. This method
# does NOT normalize either URI before doing the comparison.
#
# @param [Object] uri The URI to compare.
#
# @return [TrueClass, FalseClass]
# <code>true</code> if the URIs are equivalent, <code>false</code>
# otherwise.
def eql?(uri)
return false unless uri.kind_of?(URI)
return self.to_s == uri.to_s
end
##
# A hash value that will make a URI equivalent to its normalized
# form.
#
# @return [Integer] A hash of the URI.
def hash
@hash ||= self.to_s.hash * -1
end
##
# Clones the URI object.
#
# @return [Addressable::URI] The cloned URI.
def dup
duplicated_uri = self.class.new(
:scheme => self.scheme ? self.scheme.dup : nil,
:user => self.user ? self.user.dup : nil,
:password => self.password ? self.password.dup : nil,
:host => self.host ? self.host.dup : nil,
:port => self.port,
:path => self.path ? self.path.dup : nil,
:query => self.query ? self.query.dup : nil,
:fragment => self.fragment ? self.fragment.dup : nil
)
return duplicated_uri
end
##
# Omits components from a URI.
#
# @param [Symbol] *components The components to be omitted.
#
# @return [Addressable::URI] The URI with components omitted.
#
# @example
# uri = Addressable::URI.parse("http://example.com/path?query")
# #=> #<Addressable::URI:0xcc5e7a URI:http://example.com/path?query>
# uri.omit(:scheme, :authority)
# #=> #<Addressable::URI:0xcc4d86 URI:/path?query>
def omit(*components)
invalid_components = components - [
:scheme, :user, :password, :userinfo, :host, :port, :authority,
:path, :query, :fragment
]
unless invalid_components.empty?
raise ArgumentError,
"Invalid component names: #{invalid_components.inspect}."
end
duplicated_uri = self.dup
duplicated_uri.defer_validation do
components.each do |component|
duplicated_uri.send((component.to_s + "=").to_sym, nil)
end
duplicated_uri.user = duplicated_uri.normalized_user
end
duplicated_uri
end
##
# Destructive form of omit.
#
# @param [Symbol] *components The components to be omitted.
#
# @return [Addressable::URI] The URI with components omitted.
#
# @see Addressable::URI#omit
def omit!(*components)
replace_self(self.omit(*components))
end
##
# Determines if the URI is an empty string.
#
# @return [TrueClass, FalseClass]
# Returns <code>true</code> if empty, <code>false</code> otherwise.
def empty?
return self.to_s.empty?
end
##
# Converts the URI to a <code>String</code>.
#
# @return [String] The URI's <code>String</code> representation.
def to_s
if self.scheme == nil && self.path != nil && !self.path.empty? &&
self.path =~ NORMPATH
raise InvalidURIError,
"Cannot assemble URI string with ambiguous path: '#{self.path}'"
end
@uri_string ||= begin
uri_string = String.new
uri_string << "#{self.scheme}:" if self.scheme != nil
uri_string << "//#{self.authority}" if self.authority != nil
uri_string << self.path.to_s
uri_string << "?#{self.query}" if self.query != nil
uri_string << "##{self.fragment}" if self.fragment != nil
uri_string.force_encoding(Encoding::UTF_8)
uri_string
end
end
##
# URI's are glorified <code>Strings</code>. Allow implicit conversion.
alias_method :to_str, :to_s
##
# Returns a Hash of the URI components.
#
# @return [Hash] The URI as a <code>Hash</code> of components.
def to_hash
return {
:scheme => self.scheme,
:user => self.user,
:password => self.password,
:host => self.host,
:port => self.port,
:path => self.path,
:query => self.query,
:fragment => self.fragment
}
end
##
# Returns a <code>String</code> representation of the URI object's state.
#
# @return [String] The URI object's state, as a <code>String</code>.
def inspect
sprintf("#<%s:%#0x URI:%s>", URI.to_s, self.object_id, self.to_s)
end
##
# This method allows you to make several changes to a URI simultaneously,
# which separately would cause validation errors, but in conjunction,
# are valid. The URI will be revalidated as soon as the entire block has
# been executed.
#
# @param [Proc] block
# A set of operations to perform on a given URI.
def defer_validation
raise LocalJumpError, "No block given." unless block_given?
@validation_deferred = true
yield
@validation_deferred = false
validate
return nil
end
protected
SELF_REF = '.'
PARENT = '..'
RULE_2A = /\/\.\/|\/\.$/
RULE_2B_2C = /\/([^\/]*)\/\.\.\/|\/([^\/]*)\/\.\.$/
RULE_2D = /^\.\.?\/?/
RULE_PREFIXED_PARENT = /^\/\.\.?\/|^(\/\.\.?)+\/?$/
##
# Resolves paths to their simplest form.
#
# @param [String] path The path to normalize.
#
# @return [String] The normalized path.
def self.normalize_path(path)
# Section 5.2.4 of RFC 3986
return if path.nil?
normalized_path = path.dup
loop do
mod ||= normalized_path.gsub!(RULE_2A, SLASH)
pair = normalized_path.match(RULE_2B_2C)
if pair
parent = pair[1]
current = pair[2]
else
parent = nil
current = nil
end
regexp = "/#{Regexp.escape(parent.to_s)}/\\.\\./|"
regexp += "(/#{Regexp.escape(current.to_s)}/\\.\\.$)"
if pair && ((parent != SELF_REF && parent != PARENT) ||
(current != SELF_REF && current != PARENT))
mod ||= normalized_path.gsub!(Regexp.new(regexp), SLASH)
end
mod ||= normalized_path.gsub!(RULE_2D, EMPTY_STR)
# Non-standard, removes prefixed dotted segments from path.
mod ||= normalized_path.gsub!(RULE_PREFIXED_PARENT, SLASH)
break if mod.nil?
end
normalized_path
end
##
# Ensures that the URI is valid.
def validate
return if !!@validation_deferred
if self.scheme != nil && self.ip_based? &&
(self.host == nil || self.host.empty?) &&
(self.path == nil || self.path.empty?)
raise InvalidURIError,
"Absolute URI missing hierarchical segment: '#{self.to_s}'"
end
if self.host == nil
if self.port != nil ||
self.user != nil ||
self.password != nil
raise InvalidURIError, "Hostname not supplied: '#{self.to_s}'"
end
end
if self.path != nil && !self.path.empty? && self.path[0..0] != SLASH &&
self.authority != nil
raise InvalidURIError,
"Cannot have a relative path with an authority set: '#{self.to_s}'"
end
if self.path != nil && !self.path.empty? &&
self.path[0..1] == SLASH + SLASH && self.authority == nil
raise InvalidURIError,
"Cannot have a path with two leading slashes " +
"without an authority set: '#{self.to_s}'"
end
unreserved = CharacterClasses::UNRESERVED
sub_delims = CharacterClasses::SUB_DELIMS
if !self.host.nil? && (self.host =~ /[<>{}\/\\\?\#\@"[[:space:]]]/ ||
(self.host[/^\[(.*)\]$/, 1] != nil && self.host[/^\[(.*)\]$/, 1] !~
Regexp.new("^[#{unreserved}#{sub_delims}:]*$")))
raise InvalidURIError, "Invalid character in host: '#{self.host.to_s}'"
end
return nil
end
##
# Replaces the internal state of self with the specified URI's state.
# Used in destructive operations to avoid massive code repetition.
#
# @param [Addressable::URI] uri The URI to replace <code>self</code> with.
#
# @return [Addressable::URI] <code>self</code>.
def replace_self(uri)
# Reset dependent values
instance_variables.each do |var|
if instance_variable_defined?(var) && var != :@validation_deferred
remove_instance_variable(var)
end
end
@scheme = uri.scheme
@user = uri.user
@password = uri.password
@host = uri.host
@port = uri.port
@path = uri.path
@query = uri.query
@fragment = uri.fragment
return self
end
##
# Splits path string with "/" (slash).
# It is considered that there is empty string after last slash when
# path ends with slash.
#
# @param [String] path The path to split.
#
# @return [Array<String>] An array of parts of path.
def split_path(path)
splitted = path.split(SLASH)
splitted << EMPTY_STR if path.end_with? SLASH
splitted
end
##
# Resets composite values for the entire URI
#
# @api private
def remove_composite_values
remove_instance_variable(:@uri_string) if defined?(@uri_string)
remove_instance_variable(:@hash) if defined?(@hash)
end
end
end
|
require 'thread'
module Adhearsion
##
# Encapsulates call-related data and behavior.
#
class Call
include HasGuardedHandlers
attr_accessor :offer, :originating_voip_platform, :context, :connection, :end_reason, :commands
def initialize(offer = nil)
if offer
@offer = offer
@connection = offer.connection
end
@tag_mutex = Mutex.new
@tags = []
@context = :adhearsion
@end_reason_mutex = Mutex.new
end_reason = nil
@commands = CommandRegistry.new
set_originating_voip_platform!
register_initial_handlers
end
def id
@offer.call_id
end
def tags
@tag_mutex.synchronize { @tags.clone }
end
# This may still be a symbol, but no longer requires the tag to be a symbol although beware
# that using a symbol would create a memory leak if used improperly
# @param [String, Symbol] label String or Symbol with which to tag this call
def tag(label)
raise ArgumentError, "Tag must be a String or Symbol" unless [String, Symbol].include?(label.class)
@tag_mutex.synchronize { @tags << label }
end
def remove_tag(symbol)
@tag_mutex.synchronize do
@tags.reject! { |tag| tag == symbol }
end
end
def tagged_with?(symbol)
@tag_mutex.synchronize { @tags.include? symbol }
end
def register_event_handler(*guards, &block)
register_handler :event, *guards, &block
end
def deliver_message(message)
trigger_handler :event, message
end
alias << deliver_message
def register_initial_handlers
on_end do |event|
hangup
@end_reason_mutex.synchronize { @end_reason = event.reason }
commands.terminate
end
end
def on_end(&block)
register_event_handler :class => Punchblock::Event::End do |event|
block.call event
throw :pass
end
end
def active?
@end_reason_mutex.synchronize { !end_reason }
end
def accept(headers = nil)
write_and_await_response Punchblock::Command::Accept.new(:headers => headers)
end
def answer(headers = nil)
write_and_await_response Punchblock::Command::Answer.new(:headers => headers)
end
def reject(reason = :busy, headers = nil)
write_and_await_response Punchblock::Command::Reject.new(:reason => reason, :headers => headers)
end
def hangup!(headers = nil)
return unless active?
@end_reason_mutex.synchronize { @end_reason = true }
write_and_await_response Punchblock::Command::Hangup.new(:headers => headers)
end
def hangup
Adhearsion.remove_inactive_call self
end
def join(other_call_id)
write_and_await_response Punchblock::Command::Join.new :other_call_id => other_call_id
end
# Lock the socket for a command. Can be used to allow the console to take
# control of the thread in between AGI commands coming from the dialplan.
def with_command_lock
@command_monitor ||= Monitor.new
@command_monitor.synchronize { yield }
end
def write_and_await_response(command, timeout = 60)
commands << command
write_command command
response = command.response timeout
raise response if response.is_a? Exception
command
end
def write_command(command)
raise Hangup unless active? || command.is_a?(Punchblock::Command::Hangup)
connection.async_write id, command
end
def variables
offer.headers_hash
end
def define_variable_accessors(recipient = self)
variables.each do |key, value|
define_singleton_accessor_with_pair key, value, recipient
end
end
private
def define_singleton_accessor_with_pair(key, value, recipient = self)
recipient.metaclass.send :attr_accessor, key unless recipient.class.respond_to?("#{key}=")
recipient.metaclass.send :public, key, "#{key}=".to_sym
recipient.send "#{key}=", value
end
def set_originating_voip_platform!
# TODO: Determine this from the headers somehow
self.originating_voip_platform = :punchblock
end
class CommandRegistry
include Enumerable
def initialize
@commands = []
end
def self.synchronized_delegate(*args)
args.each do |method_name|
class_eval <<-EOS
def #{method_name}(*args, &block)
synchronize { @commands.__send__ #{method_name.inspect}, *args, &block }
end
EOS
end
end
synchronized_delegate :empty?, :<<, :delete, :each
def terminate
hangup = Hangup.new
each { |command| command.response = hangup if command.requested? }
end
end
end
end
MINOR added call_id to call logs
require 'thread'
module Adhearsion
##
# Encapsulates call-related data and behavior.
#
class Call
include HasGuardedHandlers
attr_accessor :offer, :originating_voip_platform, :context, :connection, :end_reason, :commands
def initialize(offer = nil)
if offer
@offer = offer
@connection = offer.connection
end
@tag_mutex = Mutex.new
@tags = []
@context = :adhearsion
@end_reason_mutex = Mutex.new
end_reason = nil
@commands = CommandRegistry.new
set_originating_voip_platform!
register_initial_handlers
end
def id
@offer.call_id
end
def tags
@tag_mutex.synchronize { @tags.clone }
end
# This may still be a symbol, but no longer requires the tag to be a symbol although beware
# that using a symbol would create a memory leak if used improperly
# @param [String, Symbol] label String or Symbol with which to tag this call
def tag(label)
raise ArgumentError, "Tag must be a String or Symbol" unless [String, Symbol].include?(label.class)
@tag_mutex.synchronize { @tags << label }
end
def remove_tag(symbol)
@tag_mutex.synchronize do
@tags.reject! { |tag| tag == symbol }
end
end
def tagged_with?(symbol)
@tag_mutex.synchronize { @tags.include? symbol }
end
def register_event_handler(*guards, &block)
register_handler :event, *guards, &block
end
def deliver_message(message)
trigger_handler :event, message
end
alias << deliver_message
def register_initial_handlers
on_end do |event|
hangup
@end_reason_mutex.synchronize { @end_reason = event.reason }
commands.terminate
end
end
def on_end(&block)
register_event_handler :class => Punchblock::Event::End do |event|
block.call event
throw :pass
end
end
def active?
@end_reason_mutex.synchronize { !end_reason }
end
def accept(headers = nil)
write_and_await_response Punchblock::Command::Accept.new(:headers => headers)
end
def answer(headers = nil)
write_and_await_response Punchblock::Command::Answer.new(:headers => headers)
end
def reject(reason = :busy, headers = nil)
write_and_await_response Punchblock::Command::Reject.new(:reason => reason, :headers => headers)
end
def hangup!(headers = nil)
return unless active?
@end_reason_mutex.synchronize { @end_reason = true }
write_and_await_response Punchblock::Command::Hangup.new(:headers => headers)
end
def hangup
Adhearsion.remove_inactive_call self
end
def join(other_call_id)
write_and_await_response Punchblock::Command::Join.new :other_call_id => other_call_id
end
# Lock the socket for a command. Can be used to allow the console to take
# control of the thread in between AGI commands coming from the dialplan.
def with_command_lock
@command_monitor ||= Monitor.new
@command_monitor.synchronize { yield }
end
def write_and_await_response(command, timeout = 60)
commands << command
write_command command
response = command.response timeout
raise response if response.is_a? Exception
command
end
def write_command(command)
raise Hangup unless active? || command.is_a?(Punchblock::Command::Hangup)
connection.async_write id, command
end
# Logger per instance to log the call_id
def logger
@logger ||= Adhearsion::Logging::get_logger(self.class.to_s.concat(" ").concat(logger_id))
end
# Sanitize the offer id
def logger_id
Adhearsion::Logging.sanitized_logger_name(id)
end
def variables
offer.headers_hash
end
def define_variable_accessors(recipient = self)
variables.each do |key, value|
define_singleton_accessor_with_pair key, value, recipient
end
end
private
def define_singleton_accessor_with_pair(key, value, recipient = self)
recipient.metaclass.send :attr_accessor, key unless recipient.class.respond_to?("#{key}=")
recipient.metaclass.send :public, key, "#{key}=".to_sym
recipient.send "#{key}=", value
end
def set_originating_voip_platform!
# TODO: Determine this from the headers somehow
self.originating_voip_platform = :punchblock
end
class CommandRegistry
include Enumerable
def initialize
@commands = []
end
def self.synchronized_delegate(*args)
args.each do |method_name|
class_eval <<-EOS
def #{method_name}(*args, &block)
synchronize { @commands.__send__ #{method_name.inspect}, *args, &block }
end
EOS
end
end
synchronized_delegate :empty?, :<<, :delete, :each
def terminate
hangup = Hangup.new
each { |command| command.response = hangup if command.requested? }
end
end
end
end
|
# encoding: utf-8
require 'thread'
module Adhearsion
##
# Encapsulates call-related data and behavior.
#
class Call
ExpiredError = Class.new Celluloid::DeadActorError
CommandTimeout = Class.new StandardError
include Celluloid
include HasGuardedHandlers
def self.new(*args, &block)
super.tap do |proxy|
def proxy.method_missing(*args)
super
rescue Celluloid::DeadActorError
raise ExpiredError, "This call is expired and is no longer accessible"
end
end
end
attr_accessor :offer, :client, :end_reason, :commands, :variables, :controllers
delegate :[], :[]=, :to => :variables
delegate :to, :from, :to => :offer, :allow_nil => true
def initialize(offer = nil)
register_initial_handlers
@tags = []
@commands = CommandRegistry.new
@variables = {}
@controllers = []
@end_reason = nil
self << offer if offer
end
def id
offer.call_id if offer
end
def tags
@tags.clone
end
# This may still be a symbol, but no longer requires the tag to be a symbol although beware
# that using a symbol would create a memory leak if used improperly
# @param [String, Symbol] label String or Symbol with which to tag this call
def tag(label)
abort ArgumentError.new "Tag must be a String or Symbol" unless [String, Symbol].include?(label.class)
@tags << label
end
def remove_tag(symbol)
@tags.reject! { |tag| tag == symbol }
end
def tagged_with?(symbol)
@tags.include? symbol
end
def register_event_handler(*guards, &block)
register_handler :event, *guards, &block
end
def deliver_message(message)
logger.debug "Receiving message: #{message.inspect}"
trigger_handler :event, message
end
alias << deliver_message
def register_initial_handlers # :nodoc:
register_event_handler Punchblock::Event::Offer do |offer|
@offer = offer
@client = offer.client
throw :pass
end
register_event_handler Punchblock::HasHeaders do |event|
variables.merge! event.headers_hash
throw :pass
end
register_event_handler Punchblock::Event::Joined do |event|
target = event.other_call_id || event.mixer_name
signal :joined, target
throw :pass
end
register_event_handler Punchblock::Event::Unjoined do |event|
target = event.other_call_id || event.mixer_name
signal :unjoined, target
throw :pass
end
on_end do |event|
logger.info "Call ended"
clear_from_active_calls
@end_reason = event.reason
commands.terminate
after(after_end_hold_time) { current_actor.terminate! }
end
end
def after_end_hold_time # :nodoc:
30
end
def on_end(&block)
register_event_handler Punchblock::Event::End do |event|
block.call event
throw :pass
end
end
def active?
!end_reason
end
def accept(headers = nil)
@accept_command ||= write_and_await_response Punchblock::Command::Accept.new(:headers => headers)
end
def answer(headers = nil)
write_and_await_response Punchblock::Command::Answer.new(:headers => headers)
end
def reject(reason = :busy, headers = nil)
write_and_await_response Punchblock::Command::Reject.new(:reason => reason, :headers => headers)
end
def hangup(headers = nil)
return false unless active?
logger.info "Hanging up"
@end_reason = true
write_and_await_response Punchblock::Command::Hangup.new(:headers => headers)
end
def clear_from_active_calls # :nodoc:
Adhearsion.active_calls.remove_inactive_call current_actor
end
##
# Joins this call to another call or a mixer
#
# @param [Call, String, Hash] target the target to join to. May be a Call object, a call ID (String, Hash) or a mixer name (Hash)
# @option target [String] call_id The call ID to join to
# @option target [String] mixer_name The mixer to join to
# @param [Hash, Optional] options further options to be joined with
#
def join(target, options = {})
command = Punchblock::Command::Join.new join_options_with_target(target, options)
write_and_await_response command
end
##
# Unjoins this call from another call or a mixer
#
# @param [Call, String, Hash] target the target to unjoin from. May be a Call object, a call ID (String, Hash) or a mixer name (Hash)
# @option target [String] call_id The call ID to unjoin from
# @option target [String] mixer_name The mixer to unjoin from
#
def unjoin(target)
command = Punchblock::Command::Unjoin.new join_options_with_target(target)
write_and_await_response command
end
def join_options_with_target(target, options = {})
options.merge(case target
when Call
{ :other_call_id => target.id }
when String
{ :other_call_id => target }
when Hash
abort ArgumentError.new "You cannot specify both a call ID and mixer name" if target.has_key?(:call_id) && target.has_key?(:mixer_name)
target.tap do |t|
t[:other_call_id] = t[:call_id]
t.delete :call_id
end
else
abort ArgumentError.new "Don't know how to join to #{target.inspect}"
end)
end
def wait_for_joined(expected_target)
target = nil
until target == expected_target do
target = wait :joined
end
end
def wait_for_unjoined(expected_target)
target = nil
until target == expected_target do
target = wait :unjoined
end
end
def mute
write_and_await_response ::Punchblock::Command::Mute.new
end
def unmute
write_and_await_response ::Punchblock::Command::Unmute.new
end
def write_and_await_response(command, timeout = 60)
commands << command
write_command command
begin
response = command.response timeout
rescue Timeout::Error => e
abort CommandTimeout.new(command.to_s)
end
case response
when Punchblock::ProtocolError
if response.name == :item_not_found
abort Hangup.new(@end_reason)
else
abort response
end
when Exception
abort response
end
command
end
def write_command(command)
abort Hangup.new(@end_reason) unless active? || command.is_a?(Punchblock::Command::Hangup)
variables.merge! command.headers_hash if command.respond_to? :headers_hash
logger.debug "Executing command #{command.inspect}"
client.execute_command command, :call_id => id, :async => true
end
def logger_id # :nodoc:
"#{self.class}: #{id}"
end
def logger # :nodoc:
super
end
def to_ary
[current_actor]
end
def inspect
attrs = [:offer, :end_reason, :commands, :variables, :controllers, :to, :from].map do |attr|
"#{attr}=#{send(attr).inspect}"
end
"#<#{self.class}:#{id} #{attrs.join ', '}>"
end
def execute_controller(controller, latch = nil)
Thread.new do
catching_standard_errors do
begin
CallController.exec controller
ensure
hangup
end
latch.countdown! if latch
end
end.tap { |t| Adhearsion::Process.important_threads << t }
end
def register_controller(controller)
@controllers << controller
end
def pause_controllers
controllers.each(&:pause!)
end
def resume_controllers
controllers.each(&:resume!)
end
class CommandRegistry < ThreadSafeArray # :nodoc:
def terminate
hangup = Hangup.new
each { |command| command.response = hangup if command.requested? }
end
end
end#Call
end#Adhearsion
[CS] Clean up Call#write_and_await_response a little
# encoding: utf-8
require 'thread'
module Adhearsion
##
# Encapsulates call-related data and behavior.
#
class Call
ExpiredError = Class.new Celluloid::DeadActorError
CommandTimeout = Class.new StandardError
include Celluloid
include HasGuardedHandlers
def self.new(*args, &block)
super.tap do |proxy|
def proxy.method_missing(*args)
super
rescue Celluloid::DeadActorError
raise ExpiredError, "This call is expired and is no longer accessible"
end
end
end
attr_accessor :offer, :client, :end_reason, :commands, :variables, :controllers
delegate :[], :[]=, :to => :variables
delegate :to, :from, :to => :offer, :allow_nil => true
def initialize(offer = nil)
register_initial_handlers
@tags = []
@commands = CommandRegistry.new
@variables = {}
@controllers = []
@end_reason = nil
self << offer if offer
end
def id
offer.call_id if offer
end
def tags
@tags.clone
end
# This may still be a symbol, but no longer requires the tag to be a symbol although beware
# that using a symbol would create a memory leak if used improperly
# @param [String, Symbol] label String or Symbol with which to tag this call
def tag(label)
abort ArgumentError.new "Tag must be a String or Symbol" unless [String, Symbol].include?(label.class)
@tags << label
end
def remove_tag(symbol)
@tags.reject! { |tag| tag == symbol }
end
def tagged_with?(symbol)
@tags.include? symbol
end
def register_event_handler(*guards, &block)
register_handler :event, *guards, &block
end
def deliver_message(message)
logger.debug "Receiving message: #{message.inspect}"
trigger_handler :event, message
end
alias << deliver_message
def register_initial_handlers # :nodoc:
register_event_handler Punchblock::Event::Offer do |offer|
@offer = offer
@client = offer.client
throw :pass
end
register_event_handler Punchblock::HasHeaders do |event|
variables.merge! event.headers_hash
throw :pass
end
register_event_handler Punchblock::Event::Joined do |event|
target = event.other_call_id || event.mixer_name
signal :joined, target
throw :pass
end
register_event_handler Punchblock::Event::Unjoined do |event|
target = event.other_call_id || event.mixer_name
signal :unjoined, target
throw :pass
end
on_end do |event|
logger.info "Call ended"
clear_from_active_calls
@end_reason = event.reason
commands.terminate
after(after_end_hold_time) { current_actor.terminate! }
end
end
def after_end_hold_time # :nodoc:
30
end
def on_end(&block)
register_event_handler Punchblock::Event::End do |event|
block.call event
throw :pass
end
end
def active?
!end_reason
end
def accept(headers = nil)
@accept_command ||= write_and_await_response Punchblock::Command::Accept.new(:headers => headers)
end
def answer(headers = nil)
write_and_await_response Punchblock::Command::Answer.new(:headers => headers)
end
def reject(reason = :busy, headers = nil)
write_and_await_response Punchblock::Command::Reject.new(:reason => reason, :headers => headers)
end
def hangup(headers = nil)
return false unless active?
logger.info "Hanging up"
@end_reason = true
write_and_await_response Punchblock::Command::Hangup.new(:headers => headers)
end
def clear_from_active_calls # :nodoc:
Adhearsion.active_calls.remove_inactive_call current_actor
end
##
# Joins this call to another call or a mixer
#
# @param [Call, String, Hash] target the target to join to. May be a Call object, a call ID (String, Hash) or a mixer name (Hash)
# @option target [String] call_id The call ID to join to
# @option target [String] mixer_name The mixer to join to
# @param [Hash, Optional] options further options to be joined with
#
def join(target, options = {})
command = Punchblock::Command::Join.new join_options_with_target(target, options)
write_and_await_response command
end
##
# Unjoins this call from another call or a mixer
#
# @param [Call, String, Hash] target the target to unjoin from. May be a Call object, a call ID (String, Hash) or a mixer name (Hash)
# @option target [String] call_id The call ID to unjoin from
# @option target [String] mixer_name The mixer to unjoin from
#
def unjoin(target)
command = Punchblock::Command::Unjoin.new join_options_with_target(target)
write_and_await_response command
end
def join_options_with_target(target, options = {})
options.merge(case target
when Call
{ :other_call_id => target.id }
when String
{ :other_call_id => target }
when Hash
abort ArgumentError.new "You cannot specify both a call ID and mixer name" if target.has_key?(:call_id) && target.has_key?(:mixer_name)
target.tap do |t|
t[:other_call_id] = t[:call_id]
t.delete :call_id
end
else
abort ArgumentError.new "Don't know how to join to #{target.inspect}"
end)
end
def wait_for_joined(expected_target)
target = nil
until target == expected_target do
target = wait :joined
end
end
def wait_for_unjoined(expected_target)
target = nil
until target == expected_target do
target = wait :unjoined
end
end
def mute
write_and_await_response ::Punchblock::Command::Mute.new
end
def unmute
write_and_await_response ::Punchblock::Command::Unmute.new
end
def write_and_await_response(command, timeout = 60)
commands << command
write_command command
case (response = command.response timeout)
when Punchblock::ProtocolError
if response.name == :item_not_found
abort Hangup.new(@end_reason)
else
abort response
end
when Exception
abort response
end
command
rescue Timeout::Error => e
abort CommandTimeout.new(command.to_s)
end
def write_command(command)
abort Hangup.new(@end_reason) unless active? || command.is_a?(Punchblock::Command::Hangup)
variables.merge! command.headers_hash if command.respond_to? :headers_hash
logger.debug "Executing command #{command.inspect}"
client.execute_command command, :call_id => id, :async => true
end
def logger_id # :nodoc:
"#{self.class}: #{id}"
end
def logger # :nodoc:
super
end
def to_ary
[current_actor]
end
def inspect
attrs = [:offer, :end_reason, :commands, :variables, :controllers, :to, :from].map do |attr|
"#{attr}=#{send(attr).inspect}"
end
"#<#{self.class}:#{id} #{attrs.join ', '}>"
end
def execute_controller(controller, latch = nil)
Thread.new do
catching_standard_errors do
begin
CallController.exec controller
ensure
hangup
end
latch.countdown! if latch
end
end.tap { |t| Adhearsion::Process.important_threads << t }
end
def register_controller(controller)
@controllers << controller
end
def pause_controllers
controllers.each(&:pause!)
end
def resume_controllers
controllers.each(&:resume!)
end
class CommandRegistry < ThreadSafeArray # :nodoc:
def terminate
hangup = Hangup.new
each { |command| command.response = hangup if command.requested? }
end
end
end#Call
end#Adhearsion
|
# encoding: utf-8
require 'has_guarded_handlers'
require 'thread'
require 'active_support/hash_with_indifferent_access'
require 'active_support/core_ext/hash/indifferent_access'
require 'adhearsion'
module Adhearsion
##
# Encapsulates call-related data and behavior.
#
class Call
Hangup = Class.new Adhearsion::Error
CommandTimeout = Class.new Adhearsion::Error
ExpiredError = Class.new Celluloid::DeadActorError
include Celluloid
include HasGuardedHandlers
execute_block_on_receiver :register_handler, :register_tmp_handler, :register_handler_with_priority, :register_handler_with_options, :register_event_handler, :on_joined, :on_unjoined, :on_end, :execute_controller, *execute_block_on_receiver
finalizer :finalize
def self.new(*args, &block)
super.tap do |proxy|
def proxy.method_missing(*args)
super
rescue Celluloid::DeadActorError
raise ExpiredError, "This call is expired and is no longer accessible. See http://adhearsion.com/docs/calls for further details."
end
end
end
# @return [Symbol] the reason for the call ending
attr_reader :end_reason
# @return [String] the reason code for the call ending
attr_reader :end_code
# @return [Array<Adhearsion::CallController>] the set of call controllers executing on the call
attr_reader :controllers
# @return [Hash<String => String>] a collection of SIP headers set during the call
attr_reader :variables
# @return [Time] the time at which the call began. For inbound calls this is the time at which the call was offered to Adhearsion. For outbound calls it is the time at which the remote party answered.
attr_reader :start_time
# @return [Time] the time at which the call began. For inbound calls this is the time at which the call was offered to Adhearsion. For outbound calls it is the time at which the remote party answered.
attr_reader :end_time
# @return [true, false] whether or not the call should be automatically hung up after executing its controller
attr_accessor :auto_hangup
# @return [Integer] the number of seconds after the call is hung up that the controller will remain active
attr_accessor :after_hangup_lifetime
delegate :[], :[]=, :to => :variables
# @return [String] the value of the To header from the signaling protocol
delegate :to, to: :offer, allow_nil: true
# @return [String] the value of the From header from the signaling protocol
delegate :from, to: :offer, allow_nil: true
def self.uri(transport, id, domain)
return nil unless id
s = ""
s << transport << ":" if transport
s << id
s << "@" << domain if domain
s
end
def initialize(offer = nil)
register_initial_handlers
@offer = nil
@tags = []
@commands = CommandRegistry.new
@variables = HashWithIndifferentAccess.new
@controllers = []
@end_reason = nil
@end_code = nil
@end_blocker = Celluloid::Condition.new
@peers = {}
@duration = nil
@auto_hangup = true
@after_hangup_lifetime = nil
self << offer if offer
end
#
# @return [String, nil] The globally unique ID for the call
#
def id
offer.target_call_id if offer
end
alias :to_s :id
#
# @return [String, nil] The domain on which the call resides
#
def domain
offer.domain if offer
end
#
# @return [String, nil] The uri at which the call resides
#
def uri
self.class.uri(transport, id, domain)
end
#
# @return [Array] The set of labels with which this call has been tagged.
#
def tags
@tags.clone
end
#
# Tag a call with an arbitrary label
#
# @param [String, Symbol] label String or Symbol with which to tag this call
#
def tag(label)
abort ArgumentError.new "Tag must be a String or Symbol" unless [String, Symbol].include?(label.class)
@tags << label
end
#
# Remove a label
#
# @param [String, Symbol] label
#
def remove_tag(label)
@tags.reject! { |tag| tag == label }
end
#
# Establish if the call is tagged with the provided label
#
# @param [String, Symbol] label
#
def tagged_with?(label)
@tags.include? label
end
#
# Hash of joined peers
# @return [Hash<String => Adhearsion::Call>]
#
def peers
@peers.clone
end
#
# Wait for the call to end. Returns immediately if the call has already ended, else blocks until it does so.
# @return [Symbol] the reason for the call ending
#
def wait_for_end
if end_reason
end_reason
else
@end_blocker.wait
end
end
#
# Register a handler for events on this call. Note that Adhearsion::Call implements the has-guarded-handlers API, and all of its methods are available. Specifically, all Adhearsion events are available on the `:event` channel.
#
# @param [guards] guards take a look at the guards documentation
#
# @yield [Object] trigger_object the incoming event
#
# @return [String] handler ID for later manipulation
#
# @see http://adhearsion.github.io/has-guarded-handlers for more details
#
def register_event_handler(*guards, &block)
register_handler :event, *guards, &block
end
def deliver_message(message)
logger.debug "Receiving message: #{message.inspect}"
catching_standard_errors do
trigger_handler :event, message, broadcast: true, exception_callback: ->(e) { Adhearsion::Events.trigger :exception, [e, logger] }
end
end
alias << deliver_message
def commands
@commands.clone
end
# @private
def register_initial_handlers
register_event_handler Punchblock::Event::Offer do |offer|
@offer = offer
@client = offer.client
@start_time = offer.timestamp.to_time
end
register_event_handler Punchblock::HasHeaders do |event|
merge_headers event.headers
end
on_joined do |event|
if event.call_uri
target = event.call_uri
type = :call
else
target = event.mixer_name
type = :mixer
end
logger.info "Joined to #{type} #{target}"
call = Adhearsion.active_calls.with_uri(target)
@peers[target] = call
signal :joined, target
end
on_unjoined do |event|
if event.call_uri
target = event.call_uri
type = :call
else
target = event.mixer_name
type = :mixer
end
logger.info "Unjoined from #{type} #{target}"
@peers.delete target
signal :unjoined, target
end
on_end do |event|
logger.info "Call #{from} -> #{to} ended due to #{event.reason}#{" (code #{event.platform_code})" if event.platform_code}"
@end_time = event.timestamp.to_time
@duration = @end_time - @start_time if @start_time
clear_from_active_calls
@end_reason = event.reason
@end_code = event.platform_code
@end_blocker.broadcast event.reason
@commands.terminate
after(@after_hangup_lifetime || Adhearsion.config.platform.after_hangup_lifetime) { terminate }
end
end
# @return [Float] The call duration until the current time, or until the call was disconnected, whichever is earlier
def duration
if @duration
@duration
elsif @start_time
Time.now - @start_time
else
0.0
end
end
##
# Registers a callback for when this call is joined to another call or a mixer
#
# @param [Call, String, Hash, nil] target the target to guard on. May be a Call object, a call ID (String, Hash) or a mixer name (Hash)
# @option target [String] call_uri The call ID to guard on
# @option target [String] mixer_name The mixer name to guard on
#
def on_joined(target = nil, &block)
register_event_handler Punchblock::Event::Joined, *guards_for_target(target) do |event|
block.call event
end
end
##
# Registers a callback for when this call is unjoined from another call or a mixer
#
# @param [Call, String, Hash, nil] target the target to guard on. May be a Call object, a call ID (String, Hash) or a mixer name (Hash)
# @option target [String] call_uri The call ID to guard on
# @option target [String] mixer_name The mixer name to guard on
#
def on_unjoined(target = nil, &block)
register_event_handler Punchblock::Event::Unjoined, *guards_for_target(target), &block
end
# @private
def guards_for_target(target)
target ? [target_from_join_options(join_options_with_target(target))] : []
end
def on_end(&block)
register_event_handler Punchblock::Event::End, &block
end
#
# @return [Boolean] if the call is currently active or not (disconnected)
#
def active?
!end_reason
end
def accept(headers = nil)
@accept_command ||= write_and_await_response Punchblock::Command::Accept.new(:headers => headers)
rescue Punchblock::ProtocolError => e
abort e
end
def answer(headers = nil)
write_and_await_response Punchblock::Command::Answer.new(:headers => headers)
rescue Punchblock::ProtocolError => e
abort e
end
def reject(reason = :busy, headers = nil)
write_and_await_response Punchblock::Command::Reject.new(:reason => reason, :headers => headers)
Adhearsion::Events.trigger_immediately :call_rejected, call: current_actor, reason: reason
rescue Punchblock::ProtocolError => e
abort e
end
#
# Redirect the call to some other target system.
#
# If the redirect is successful, the call will be released from the
# telephony engine and Adhearsion will lose control of the call.
#
# Note that for the common case, this will result in a SIP 302 or
# SIP REFER, which provides the caller with a new URI to dial. As such,
# the redirect target cannot be any telephony-engine specific address
# (such as sofia/gateway, agent/101, or SIP/mypeer); instead it should be a
# fully-qualified external SIP URI that the caller can independently reach.
#
# @param [String] to the target to redirect to, eg a SIP URI
# @param [Hash, optional] headers a set of headers to send along with the redirect instruction
def redirect(to, headers = nil)
write_and_await_response Punchblock::Command::Redirect.new(to: to, headers: headers)
rescue Punchblock::ProtocolError => e
abort e
end
def hangup(headers = nil)
return false unless active?
logger.info "Hanging up"
@end_reason = true
write_and_await_response Punchblock::Command::Hangup.new(:headers => headers)
rescue Punchblock::ProtocolError => e
abort e
end
# @private
def clear_from_active_calls
Adhearsion.active_calls.remove_inactive_call current_actor
end
##
# Joins this call to another call or a mixer
#
# @param [Call, String, Hash] target the target to join to. May be a Call object, a call ID (String, Hash) or a mixer name (Hash)
# @option target [String] call_uri The call ID to join to
# @option target [String] mixer_name The mixer to join to
# @param [Hash, Optional] options further options to be joined with
#
# @return [Hash] where :command is the issued command, :joined_waiter is a #wait responder which is triggered when the join is complete, and :unjoined_waiter is a #wait responder which is triggered when the entities are unjoined
#
def join(target, options = {})
logger.debug "Joining to #{target}"
joined_condition = CountDownLatch.new(1)
on_joined target do
joined_condition.countdown!
end
unjoined_condition = CountDownLatch.new(1)
on_unjoined target do
unjoined_condition.countdown!
end
on_end do
joined_condition.countdown!
unjoined_condition.countdown!
end
command = Punchblock::Command::Join.new options.merge(join_options_with_target(target))
write_and_await_response command
{command: command, joined_condition: joined_condition, unjoined_condition: unjoined_condition}
rescue Punchblock::ProtocolError => e
abort e
end
##
# Unjoins this call from another call or a mixer
#
# @param [Call, String, Hash, nil] target the target to unjoin from. May be a Call object, a call ID (String, Hash), a mixer name (Hash) or missing to unjoin from every existing join (nil)
# @option target [String] call_uri The call ID to unjoin from
# @option target [String] mixer_name The mixer to unjoin from
#
def unjoin(target = nil)
logger.info "Unjoining from #{target}"
command = Punchblock::Command::Unjoin.new join_options_with_target(target)
write_and_await_response command
rescue Punchblock::ProtocolError => e
abort e
end
# @private
def join_options_with_target(target)
case target
when nil
{}
when Call
{ :call_uri => target.uri }
when String
{ :call_uri => self.class.uri(transport, target, domain) }
when Hash
abort ArgumentError.new "You cannot specify both a call URI and mixer name" if target.has_key?(:call_uri) && target.has_key?(:mixer_name)
target
else
abort ArgumentError.new "Don't know how to join to #{target.inspect}"
end
end
# @private
def target_from_join_options(options)
call_uri = options[:call_uri]
return {call_uri: call_uri} if call_uri
{mixer_name: options[:mixer_name]}
end
def wait_for_joined(expected_target)
target = nil
until target == expected_target do
target = wait :joined
end
end
def wait_for_unjoined(expected_target)
target = nil
until target == expected_target do
target = wait :unjoined
end
end
def mute
write_and_await_response Punchblock::Command::Mute.new
rescue Punchblock::ProtocolError => e
abort e
end
def unmute
write_and_await_response Punchblock::Command::Unmute.new
rescue Punchblock::ProtocolError => e
abort e
end
# @private
def write_and_await_response(command, timeout = 60, fatal = false)
@commands << command
write_command command
error_handler = fatal ? ->(error) { raise error } : ->(error) { abort error }
response = defer { command.response timeout }
case response
when Punchblock::ProtocolError
if response.name == :item_not_found
error_handler[Hangup.new(@end_reason)]
else
error_handler[response]
end
when Exception
error_handler[response]
end
command
rescue Timeout::Error
error_handler[CommandTimeout.new(command.to_s)]
ensure
@commands.delete command
end
# @private
def write_command(command)
abort Hangup.new(@end_reason) unless active? || command.is_a?(Punchblock::Command::Hangup)
merge_headers command.headers if command.respond_to? :headers
logger.debug "Executing command #{command.inspect}"
unless command.is_a?(Punchblock::Command::Dial)
command.target_call_id = id
command.domain = domain
end
client.execute_command command
end
##
# Sends a message to the caller
#
# @param [String] body The message text.
# @param [Hash, Optional] options The message options.
# @option options [String] subject The message subject.
#
def send_message(body, options = {})
logger.debug "Sending message: #{body}"
client.send_message id, domain, body, options
end
# @private
def logger_id
"#{self.class}: #{id}@#{domain}"
end
# @private
def inspect
return "..." if Celluloid.detect_recursion
attrs = [:offer, :end_reason, :commands, :variables, :controllers, :to, :from].map do |attr|
"#{attr}=#{send(attr).inspect}"
end
"#<#{self.class}:#{id}@#{domain} #{attrs.join ', '}>"
end
#
# Execute a call controller asynchronously against this call.
#
# @param [Adhearsion::CallController] controller an instance of a controller initialized for this call
# @param [Proc] a callback to be executed when the controller finishes execution
#
# @yield execute the current block as the body of a controller by specifying no controller instance
#
def execute_controller(controller = nil, completion_callback = nil, &block)
raise ArgumentError, "Cannot supply a controller and a block at the same time" if controller && block_given?
controller ||= CallController.new current_actor, &block
logger.info "Executing controller #{controller.inspect}"
controller.bg_exec completion_callback
end
# @private
def register_controller(controller)
@controllers << controller
end
# @private
def pause_controllers
controllers.each(&:pause!)
end
# @private
def resume_controllers
controllers.each(&:resume!)
end
private
def offer
@offer
end
def client
@client
end
def transport
offer.transport if offer
end
def merge_headers(headers)
headers.each do |name, value|
variables[name.to_s.downcase.gsub('-', '_')] = value
end
end
def finalize
::Logging::Repository.instance.delete logger_id
end
# @private
class CommandRegistry < Array
def terminate
hangup = Hangup.new
each { |command| command.response = hangup if command.requested? }
end
end
end
end
API doc enhancement for Call#execute_controller
# encoding: utf-8
require 'has_guarded_handlers'
require 'thread'
require 'active_support/hash_with_indifferent_access'
require 'active_support/core_ext/hash/indifferent_access'
require 'adhearsion'
module Adhearsion
##
# Encapsulates call-related data and behavior.
#
class Call
Hangup = Class.new Adhearsion::Error
CommandTimeout = Class.new Adhearsion::Error
ExpiredError = Class.new Celluloid::DeadActorError
include Celluloid
include HasGuardedHandlers
execute_block_on_receiver :register_handler, :register_tmp_handler, :register_handler_with_priority, :register_handler_with_options, :register_event_handler, :on_joined, :on_unjoined, :on_end, :execute_controller, *execute_block_on_receiver
finalizer :finalize
def self.new(*args, &block)
super.tap do |proxy|
def proxy.method_missing(*args)
super
rescue Celluloid::DeadActorError
raise ExpiredError, "This call is expired and is no longer accessible. See http://adhearsion.com/docs/calls for further details."
end
end
end
# @return [Symbol] the reason for the call ending
attr_reader :end_reason
# @return [String] the reason code for the call ending
attr_reader :end_code
# @return [Array<Adhearsion::CallController>] the set of call controllers executing on the call
attr_reader :controllers
# @return [Hash<String => String>] a collection of SIP headers set during the call
attr_reader :variables
# @return [Time] the time at which the call began. For inbound calls this is the time at which the call was offered to Adhearsion. For outbound calls it is the time at which the remote party answered.
attr_reader :start_time
# @return [Time] the time at which the call began. For inbound calls this is the time at which the call was offered to Adhearsion. For outbound calls it is the time at which the remote party answered.
attr_reader :end_time
# @return [true, false] whether or not the call should be automatically hung up after executing its controller
attr_accessor :auto_hangup
# @return [Integer] the number of seconds after the call is hung up that the controller will remain active
attr_accessor :after_hangup_lifetime
delegate :[], :[]=, :to => :variables
# @return [String] the value of the To header from the signaling protocol
delegate :to, to: :offer, allow_nil: true
# @return [String] the value of the From header from the signaling protocol
delegate :from, to: :offer, allow_nil: true
def self.uri(transport, id, domain)
return nil unless id
s = ""
s << transport << ":" if transport
s << id
s << "@" << domain if domain
s
end
def initialize(offer = nil)
register_initial_handlers
@offer = nil
@tags = []
@commands = CommandRegistry.new
@variables = HashWithIndifferentAccess.new
@controllers = []
@end_reason = nil
@end_code = nil
@end_blocker = Celluloid::Condition.new
@peers = {}
@duration = nil
@auto_hangup = true
@after_hangup_lifetime = nil
self << offer if offer
end
#
# @return [String, nil] The globally unique ID for the call
#
def id
offer.target_call_id if offer
end
alias :to_s :id
#
# @return [String, nil] The domain on which the call resides
#
def domain
offer.domain if offer
end
#
# @return [String, nil] The uri at which the call resides
#
def uri
self.class.uri(transport, id, domain)
end
#
# @return [Array] The set of labels with which this call has been tagged.
#
def tags
@tags.clone
end
#
# Tag a call with an arbitrary label
#
# @param [String, Symbol] label String or Symbol with which to tag this call
#
def tag(label)
abort ArgumentError.new "Tag must be a String or Symbol" unless [String, Symbol].include?(label.class)
@tags << label
end
#
# Remove a label
#
# @param [String, Symbol] label
#
def remove_tag(label)
@tags.reject! { |tag| tag == label }
end
#
# Establish if the call is tagged with the provided label
#
# @param [String, Symbol] label
#
def tagged_with?(label)
@tags.include? label
end
#
# Hash of joined peers
# @return [Hash<String => Adhearsion::Call>]
#
def peers
@peers.clone
end
#
# Wait for the call to end. Returns immediately if the call has already ended, else blocks until it does so.
# @return [Symbol] the reason for the call ending
#
def wait_for_end
if end_reason
end_reason
else
@end_blocker.wait
end
end
#
# Register a handler for events on this call. Note that Adhearsion::Call implements the has-guarded-handlers API, and all of its methods are available. Specifically, all Adhearsion events are available on the `:event` channel.
#
# @param [guards] guards take a look at the guards documentation
#
# @yield [Object] trigger_object the incoming event
#
# @return [String] handler ID for later manipulation
#
# @see http://adhearsion.github.io/has-guarded-handlers for more details
#
def register_event_handler(*guards, &block)
register_handler :event, *guards, &block
end
def deliver_message(message)
logger.debug "Receiving message: #{message.inspect}"
catching_standard_errors do
trigger_handler :event, message, broadcast: true, exception_callback: ->(e) { Adhearsion::Events.trigger :exception, [e, logger] }
end
end
alias << deliver_message
def commands
@commands.clone
end
# @private
def register_initial_handlers
register_event_handler Punchblock::Event::Offer do |offer|
@offer = offer
@client = offer.client
@start_time = offer.timestamp.to_time
end
register_event_handler Punchblock::HasHeaders do |event|
merge_headers event.headers
end
on_joined do |event|
if event.call_uri
target = event.call_uri
type = :call
else
target = event.mixer_name
type = :mixer
end
logger.info "Joined to #{type} #{target}"
call = Adhearsion.active_calls.with_uri(target)
@peers[target] = call
signal :joined, target
end
on_unjoined do |event|
if event.call_uri
target = event.call_uri
type = :call
else
target = event.mixer_name
type = :mixer
end
logger.info "Unjoined from #{type} #{target}"
@peers.delete target
signal :unjoined, target
end
on_end do |event|
logger.info "Call #{from} -> #{to} ended due to #{event.reason}#{" (code #{event.platform_code})" if event.platform_code}"
@end_time = event.timestamp.to_time
@duration = @end_time - @start_time if @start_time
clear_from_active_calls
@end_reason = event.reason
@end_code = event.platform_code
@end_blocker.broadcast event.reason
@commands.terminate
after(@after_hangup_lifetime || Adhearsion.config.platform.after_hangup_lifetime) { terminate }
end
end
# @return [Float] The call duration until the current time, or until the call was disconnected, whichever is earlier
def duration
if @duration
@duration
elsif @start_time
Time.now - @start_time
else
0.0
end
end
##
# Registers a callback for when this call is joined to another call or a mixer
#
# @param [Call, String, Hash, nil] target the target to guard on. May be a Call object, a call ID (String, Hash) or a mixer name (Hash)
# @option target [String] call_uri The call ID to guard on
# @option target [String] mixer_name The mixer name to guard on
#
def on_joined(target = nil, &block)
register_event_handler Punchblock::Event::Joined, *guards_for_target(target) do |event|
block.call event
end
end
##
# Registers a callback for when this call is unjoined from another call or a mixer
#
# @param [Call, String, Hash, nil] target the target to guard on. May be a Call object, a call ID (String, Hash) or a mixer name (Hash)
# @option target [String] call_uri The call ID to guard on
# @option target [String] mixer_name The mixer name to guard on
#
def on_unjoined(target = nil, &block)
register_event_handler Punchblock::Event::Unjoined, *guards_for_target(target), &block
end
# @private
def guards_for_target(target)
target ? [target_from_join_options(join_options_with_target(target))] : []
end
def on_end(&block)
register_event_handler Punchblock::Event::End, &block
end
#
# @return [Boolean] if the call is currently active or not (disconnected)
#
def active?
!end_reason
end
def accept(headers = nil)
@accept_command ||= write_and_await_response Punchblock::Command::Accept.new(:headers => headers)
rescue Punchblock::ProtocolError => e
abort e
end
def answer(headers = nil)
write_and_await_response Punchblock::Command::Answer.new(:headers => headers)
rescue Punchblock::ProtocolError => e
abort e
end
def reject(reason = :busy, headers = nil)
write_and_await_response Punchblock::Command::Reject.new(:reason => reason, :headers => headers)
Adhearsion::Events.trigger_immediately :call_rejected, call: current_actor, reason: reason
rescue Punchblock::ProtocolError => e
abort e
end
#
# Redirect the call to some other target system.
#
# If the redirect is successful, the call will be released from the
# telephony engine and Adhearsion will lose control of the call.
#
# Note that for the common case, this will result in a SIP 302 or
# SIP REFER, which provides the caller with a new URI to dial. As such,
# the redirect target cannot be any telephony-engine specific address
# (such as sofia/gateway, agent/101, or SIP/mypeer); instead it should be a
# fully-qualified external SIP URI that the caller can independently reach.
#
# @param [String] to the target to redirect to, eg a SIP URI
# @param [Hash, optional] headers a set of headers to send along with the redirect instruction
def redirect(to, headers = nil)
write_and_await_response Punchblock::Command::Redirect.new(to: to, headers: headers)
rescue Punchblock::ProtocolError => e
abort e
end
def hangup(headers = nil)
return false unless active?
logger.info "Hanging up"
@end_reason = true
write_and_await_response Punchblock::Command::Hangup.new(:headers => headers)
rescue Punchblock::ProtocolError => e
abort e
end
# @private
def clear_from_active_calls
Adhearsion.active_calls.remove_inactive_call current_actor
end
##
# Joins this call to another call or a mixer
#
# @param [Call, String, Hash] target the target to join to. May be a Call object, a call ID (String, Hash) or a mixer name (Hash)
# @option target [String] call_uri The call ID to join to
# @option target [String] mixer_name The mixer to join to
# @param [Hash, Optional] options further options to be joined with
#
# @return [Hash] where :command is the issued command, :joined_waiter is a #wait responder which is triggered when the join is complete, and :unjoined_waiter is a #wait responder which is triggered when the entities are unjoined
#
def join(target, options = {})
logger.debug "Joining to #{target}"
joined_condition = CountDownLatch.new(1)
on_joined target do
joined_condition.countdown!
end
unjoined_condition = CountDownLatch.new(1)
on_unjoined target do
unjoined_condition.countdown!
end
on_end do
joined_condition.countdown!
unjoined_condition.countdown!
end
command = Punchblock::Command::Join.new options.merge(join_options_with_target(target))
write_and_await_response command
{command: command, joined_condition: joined_condition, unjoined_condition: unjoined_condition}
rescue Punchblock::ProtocolError => e
abort e
end
##
# Unjoins this call from another call or a mixer
#
# @param [Call, String, Hash, nil] target the target to unjoin from. May be a Call object, a call ID (String, Hash), a mixer name (Hash) or missing to unjoin from every existing join (nil)
# @option target [String] call_uri The call ID to unjoin from
# @option target [String] mixer_name The mixer to unjoin from
#
def unjoin(target = nil)
logger.info "Unjoining from #{target}"
command = Punchblock::Command::Unjoin.new join_options_with_target(target)
write_and_await_response command
rescue Punchblock::ProtocolError => e
abort e
end
# @private
def join_options_with_target(target)
case target
when nil
{}
when Call
{ :call_uri => target.uri }
when String
{ :call_uri => self.class.uri(transport, target, domain) }
when Hash
abort ArgumentError.new "You cannot specify both a call URI and mixer name" if target.has_key?(:call_uri) && target.has_key?(:mixer_name)
target
else
abort ArgumentError.new "Don't know how to join to #{target.inspect}"
end
end
# @private
def target_from_join_options(options)
call_uri = options[:call_uri]
return {call_uri: call_uri} if call_uri
{mixer_name: options[:mixer_name]}
end
def wait_for_joined(expected_target)
target = nil
until target == expected_target do
target = wait :joined
end
end
def wait_for_unjoined(expected_target)
target = nil
until target == expected_target do
target = wait :unjoined
end
end
def mute
write_and_await_response Punchblock::Command::Mute.new
rescue Punchblock::ProtocolError => e
abort e
end
def unmute
write_and_await_response Punchblock::Command::Unmute.new
rescue Punchblock::ProtocolError => e
abort e
end
# @private
def write_and_await_response(command, timeout = 60, fatal = false)
@commands << command
write_command command
error_handler = fatal ? ->(error) { raise error } : ->(error) { abort error }
response = defer { command.response timeout }
case response
when Punchblock::ProtocolError
if response.name == :item_not_found
error_handler[Hangup.new(@end_reason)]
else
error_handler[response]
end
when Exception
error_handler[response]
end
command
rescue Timeout::Error
error_handler[CommandTimeout.new(command.to_s)]
ensure
@commands.delete command
end
# @private
def write_command(command)
abort Hangup.new(@end_reason) unless active? || command.is_a?(Punchblock::Command::Hangup)
merge_headers command.headers if command.respond_to? :headers
logger.debug "Executing command #{command.inspect}"
unless command.is_a?(Punchblock::Command::Dial)
command.target_call_id = id
command.domain = domain
end
client.execute_command command
end
##
# Sends a message to the caller
#
# @param [String] body The message text.
# @param [Hash, Optional] options The message options.
# @option options [String] subject The message subject.
#
def send_message(body, options = {})
logger.debug "Sending message: #{body}"
client.send_message id, domain, body, options
end
# @private
def logger_id
"#{self.class}: #{id}@#{domain}"
end
# @private
def inspect
return "..." if Celluloid.detect_recursion
attrs = [:offer, :end_reason, :commands, :variables, :controllers, :to, :from].map do |attr|
"#{attr}=#{send(attr).inspect}"
end
"#<#{self.class}:#{id}@#{domain} #{attrs.join ', '}>"
end
#
# Execute a call controller asynchronously against this call.
#
# To block and wait until the controller completes, call `#join` on the result of this method.
#
# @param [Adhearsion::CallController] controller an instance of a controller initialized for this call
# @param [Proc] a callback to be executed when the controller finishes execution
#
# @yield execute the current block as the body of a controller by specifying no controller instance
#
# @return [Celluloid::ThreadHandle]
#
def execute_controller(controller = nil, completion_callback = nil, &block)
raise ArgumentError, "Cannot supply a controller and a block at the same time" if controller && block_given?
controller ||= CallController.new current_actor, &block
logger.info "Executing controller #{controller.inspect}"
controller.bg_exec completion_callback
end
# @private
def register_controller(controller)
@controllers << controller
end
# @private
def pause_controllers
controllers.each(&:pause!)
end
# @private
def resume_controllers
controllers.each(&:resume!)
end
private
def offer
@offer
end
def client
@client
end
def transport
offer.transport if offer
end
def merge_headers(headers)
headers.each do |name, value|
variables[name.to_s.downcase.gsub('-', '_')] = value
end
end
def finalize
::Logging::Repository.instance.delete logger_id
end
# @private
class CommandRegistry < Array
def terminate
hangup = Hangup.new
each { |command| command.response = hangup if command.requested? }
end
end
end
end
|
# frozen_string_literal: true
module Alchemy
VERSION = "4.0.0.rc2"
def self.version
VERSION
end
end
Raise version to 4.1 beta
# frozen_string_literal: true
module Alchemy
VERSION = "4.1.0.beta"
def self.version
VERSION
end
end
|
module Allens
class Interval
# Regarding the clock ticks mentioned below, e.g. timestamps could profitably use:
# chronon = 0.000001 (find out what Ruby's granularity is for timestamp)
# clocktick = 0.000001 (choose your preference, multiple of chronon)
# forever = "999/12/31 23:59:59.999999"
# BEWARE: that .999999 stuff should be exactly the last possible clock-tick at the
# granularity you are choosing to use. (and to understand this, you need to know
# about the concepts of 'atomic clock tick' ('chronon'), 'clock tick' and the finer
# points about their interection).
Whinge = "Do not use Allens::Interval directly. Subclass it, and define class methods 'chronon', 'clocktick' and 'forever' returning non-nil values"
def self.chronon; raise Allens::Interval::Whinge; end
def self.clocktick; raise Allens::Interval::Whinge; end
def self.forever; raise Allens::Interval::Whinge; end
def initialize(starts, ends = nil)
# Passing in a nil 'ends' (ie not passing) manufactures a forever Interval.
# Programmers may pass the forever value of their particular subclass; cope with that too
#
ends ||= self.class.forever
ends.nil? and raise Allens::Inteval::Winge
starts > ends and raise ArgumentError, "Expected starts <= ends. Got starts=#{starts}, ends=#{ends}"
ends > self.class.forever and raise ArgumentError, "Expected ends <= 'FOREVER' (#{self.class.forever}). Got starts=#{starts}, ends=#{ends}"
@starts, @ends = starts, ends
end
def hash
return starts.hash ^ ends.hash
end
def eql?(other)
return starts == other.starts && ends == other.ends
end
##################################################################
# Utility functions
def to_s(*args)
return "[" + starts.to_s(*args) + "," + (forever? ? "-" : ends.to_s(*args)) + ")"
end
def foreverValue # convenience
return self.class.forever
end
def forever?
return ends == self.class.forever
end
def limited?
return ends != self.class.forever
end
def starts
return @starts
end
def ends
return @ends
end
##################################################################
# TODO: temporal use has strong opinions about how points relate to
# periods. Check chapter 3, build some tests and go for gold (or green...)
# hint: see how metBy? has a theoretically useless "starts > y.starts"
# clause? That may be what's needed to fix things; or it might need to be removed!
# Consider the granularity effect of the clocktick, and hope that it won't
# need subtracting from one of the values with changes from (eg) < to <= or whatever...
def before?(y); return ends < y.starts; end
def meets?(y); return ends == y.starts; end
def overlaps?(y); return starts < y.starts && ends > y.starts && ends < y.ends; end
def starts?(y); return starts == y.starts && ends < y.ends; end
def during?(y); return starts > y.starts && ends < y.ends; end
def finishes?(y); return starts > y.starts && ends == y.ends; end
def equals?(y); return starts == y.starts && ends == y.ends; end
def finishedBy?(y); return starts < y.starts && ends == y.ends; end
def includes?(y); return starts < y.starts && ends > y.ends; end
def startedBy?(y); return starts == y.starts && ends > y.ends; end
def overlappedBy?(y); return starts > y.starts && starts < y.ends && ends > y.ends; end
def metBy?(y); return starts == y.ends; end
def after?(y); return starts > y.ends; end
##################################################################
# Combinatoral operators - See chapter 3's taxonomy.
# TODO: Unit tests!
# TODO: expand the nested calls, and simplify the expressions,
# but ONLY after the unit tests are solid!!!
#
def aligns?(y); return starts?(y) || finishes?(y) || finishedBy?(y) || startedBy?(y); end
def occupies?(y); return during?(y) || includes?(y) || aligns?(y); end
def fills?(y); return equals?(y) || occupies?(y); end
def intersects?(y); return overlaps?(y) || overlappedBy?(y) || fills?(y); end
def excludes?(y); return before?(y) || meets?(y) || metBy?(y) || after?(y); end
##################################################################
def method_missing(key, *args)
text = key.to_s
if args.length == 1 and (text =~ /^(B)?(M)?(O)?(S)?(D)?(F)?(E)?(Fby)?(I)?(Sby)?(Oby)?(Mby)?(A)?\?$/ or
text =~ /^(Before)?(Meets)?(Overlaps)?(Starts)?(During)?(Finishes)?(Equals)?(FinishedBy)?(Includes)?(StartedBy)?(OverlappedBy)?(MetBy)?(After)?\?$/
)
names = Regexp.last_match
sep = 'return'
code = "def #{text}(y);"
%w(before? meets? overlaps? starts? during? finishes? equals? finishedBy? includes? startedBy? overlappedBy? metBy? after?).each_with_index do |name, i|
if ! names[i + 1].nil?
code += " #{sep} #{name}(y)"
sep = '||'
end
end
code += "; end"
Interval.class_eval code
return send(key, *args)
end
# TODO: a real error message shows line numbers...
raise NoMethodError, "undefined or improperly named method `#{text}(*#{args.count})'"
end
# method_missing
end
end
Changed class constraint (in constructor) demanding start < end.
This is good for granular clock-ticks BUT bad for wanting to pretend infintesimal granularity (eg use of Real) because it won't allow construction of points [x, x)
module Allens
class Interval
# Regarding the clock ticks mentioned below, e.g. timestamps could profitably use:
# chronon = 0.000001 (find out what Ruby's granularity is for timestamp)
# clocktick = 0.000001 (choose your preference, multiple of chronon)
# forever = "999/12/31 23:59:59.999999"
# BEWARE: that .999999 stuff should be exactly the last possible clock-tick at the
# granularity you are choosing to use. (and to understand this, you need to know
# about the concepts of 'atomic clock tick' ('chronon'), 'clock tick' and the finer
# points about their interection).
Whinge = "Do not use Allens::Interval directly. Subclass it, and define class methods 'chronon', 'clocktick' and 'forever' returning non-nil values"
def self.chronon; raise Allens::Interval::Whinge; end
def self.clocktick; raise Allens::Interval::Whinge; end
def self.forever; raise Allens::Interval::Whinge; end
def initialize(starts, ends = nil)
# Passing in a nil 'ends' (ie not passing) manufactures a forever Interval.
# Programmers may pass the forever value of their particular subclass; cope with that too
#
ends ||= self.class.forever
ends.nil? and raise Allens::Inteval::Winge
starts < ends or raise ArgumentError, "Expected starts < ends. Got starts=#{starts}, ends=#{ends}"
ends <= self.class.forever or raise ArgumentError, "Expected ends <= 'FOREVER' (#{self.class.forever}). Got starts=#{starts}, ends=#{ends}"
@starts, @ends = starts, ends
end
def hash
return starts.hash ^ ends.hash
end
def eql?(other)
return starts == other.starts && ends == other.ends
end
##################################################################
# Utility functions
def to_s(*args)
return "[" + starts.to_s(*args) + "," + (forever? ? "-" : ends.to_s(*args)) + ")"
end
def foreverValue # convenience
return self.class.forever
end
def forever?
return ends == self.class.forever
end
def limited?
return ends != self.class.forever
end
def starts
return @starts
end
def ends
return @ends
end
##################################################################
# TODO: temporal use has strong opinions about how points relate to
# periods. Check chapter 3, build some tests and go for gold (or green...)
# hint: see how metBy? has a theoretically useless "starts > y.starts"
# clause? That may be what's needed to fix things; or it might need to be removed!
# Consider the granularity effect of the clocktick, and hope that it won't
# need subtracting from one of the values with changes from (eg) < to <= or whatever...
def before?(y); return ends < y.starts; end
def meets?(y); return ends == y.starts; end
def overlaps?(y); return starts < y.starts && ends > y.starts && ends < y.ends; end
def starts?(y); return starts == y.starts && ends < y.ends; end
def during?(y); return starts > y.starts && ends < y.ends; end
def finishes?(y); return starts > y.starts && ends == y.ends; end
def equals?(y); return starts == y.starts && ends == y.ends; end
def finishedBy?(y); return starts < y.starts && ends == y.ends; end
def includes?(y); return starts < y.starts && ends > y.ends; end
def startedBy?(y); return starts == y.starts && ends > y.ends; end
def overlappedBy?(y); return starts > y.starts && starts < y.ends && ends > y.ends; end
def metBy?(y); return starts == y.ends; end
def after?(y); return starts > y.ends; end
##################################################################
# Combinatoral operators - See chapter 3's taxonomy.
# TODO: Unit tests!
# TODO: expand the nested calls, and simplify the expressions,
# but ONLY after the unit tests are solid!!!
#
def aligns?(y); return starts?(y) || finishes?(y) || finishedBy?(y) || startedBy?(y); end
def occupies?(y); return during?(y) || includes?(y) || aligns?(y); end
def fills?(y); return equals?(y) || occupies?(y); end
def intersects?(y); return overlaps?(y) || overlappedBy?(y) || fills?(y); end
def excludes?(y); return before?(y) || meets?(y) || metBy?(y) || after?(y); end
##################################################################
def method_missing(key, *args)
text = key.to_s
if args.length == 1 and (text =~ /^(B)?(M)?(O)?(S)?(D)?(F)?(E)?(Fby)?(I)?(Sby)?(Oby)?(Mby)?(A)?\?$/ or
text =~ /^(Before)?(Meets)?(Overlaps)?(Starts)?(During)?(Finishes)?(Equals)?(FinishedBy)?(Includes)?(StartedBy)?(OverlappedBy)?(MetBy)?(After)?\?$/
)
names = Regexp.last_match
sep = 'return'
code = "def #{text}(y);"
%w(before? meets? overlaps? starts? during? finishes? equals? finishedBy? includes? startedBy? overlappedBy? metBy? after?).each_with_index do |name, i|
if ! names[i + 1].nil?
code += " #{sep} #{name}(y)"
sep = '||'
end
end
code += "; end"
Interval.class_eval code
return send(key, *args)
end
# TODO: a real error message shows line numbers...
raise NoMethodError, "undefined or improperly named method `#{text}(*#{args.count})'"
end
# method_missing
end
end
|
#!/usr/bin/env ruby -KU
require "rubygems"
require "hpricot"
require "ftools"
class Crawler
def initialize(site_uri='cms.alphasights-001.vm.brightbox.net',
wget_path='/usr/local/bin/wget',
git_path='/usr/local/git/bin/git')
@site_uri = site_uri
@wget_path = wget_path
@git_path = git_path
sanity_check
end
def sanity_check
if @wget_path == '' && %x(which wget).length == 0
raise "wget not in $PATH. Please specify path to wget manually.\n\nCrawler.new('site_uri', '/path/to/wget')"
end
if @git_path == '' && %x(which git).length == 0
raise "git not in $PATH. Please specify path to git manually.\n\nCrawler.new('site_uri', '/path/to/git')"
end
end
def wget_sites(site_uris=[])
site_uris.each { |s| wget(s) }
end
def wget(s=@site_uri)
command = "#{@wget_path} -rk#{'q' unless $VERBOSE} '#{s}'"
$stdout.puts(command)
system command
fix_php_css
end
def fix_php_css
html_files = Array.new
Dir.glob("#{@site_uri}/**/*.*").each do |f|
html_files += [File.expand_path(f)] if f =~ /.html/
end
html_files.each do |f|
$stdout.puts "Parsing #{f}"
doc = Hpricot(File.read(f))
(doc/'/html/head/link[@rel=stylesheet]').map do |link|
pattern = '(.*)(stylesheet)(.*)(cssid)=(.*)&(mediatype)=(.*)'
new_link = link['href'].match(pattern)
return true if new_link.nil? # Files do not match our pattern. Give up.
@new_href = "#{new_link[1]}#{new_link[4]}-#{new_link[5]}_#{new_link[6]}-#{new_link[7]}.css"
File.copy(File.join(File.dirname(f), link['href']), File.join(File.dirname(f), @new_href))
link.set_attribute("href", @new_href)
end
File.open(f, 'w') {|f| f.write(doc) }
end
end
def git_push(s=@site_uri)
command = "#{@git_path} add #{s} && #{@git_path} commit -m 'Crawl as of #{Time.now.to_s}' && #{@git_path} push"
$stdout.puts(command)
system command
end
end
c = Crawler.new()
c.wget
c.git_push
Updated crawl.rb
- Still need to tweak Crawler
#!/usr/bin/env ruby -KU
require "rubygems"
require "hpricot"
require "ftools"
require "optparse"
class Crawler
def initialize(sites=['cms.alphasights-001.vm.brightbox.net'], options={})
parse(ARGV)
sites ||= []
ARGV.each do |arg|
sites += [arg.sub(/^(http:|https:)\/\//, '')]
end
@sites = sites
end
def run
unless sanity_check
print_usage
exit
end
wget_sites_to_github(@sites)
end
private
def sanity_check
[@options[:wget_path], @options[:git_path]].each do |binary_path|
binary_name = File.basename(binary_path)
unless File.exists?(binary_path)
$stderr.puts "\"#{binary_path}\" does not exist."
print "Would you like to search for \"#{binary_name}\" in $PATH? (y/N): "
if $stdin.gets.downcase.chomp! == 'y'
ENV['PATH'].split(':').each do |search_path|
all_results = []
if File.exists?(File.join(search_path, binary_name))
all_results += [File.join(search_path, binary_name)]
end # if File.exists?(File.join...
end # ENV['PATH'].split
if all_results.length == 0
$stderr.puts "Could not find \"#{binary_name}\" in $PATH. Exiting..."
return false
else
print "Found #{all_results.length}"
if $stdin.gets.downcase.chomp! == 'y'
@options[binary_name.to_sym] = found
puts "Using \"#{found}\" for \"#{binary_name}\""
return true
end
end
return false
end # $stdin.gets
$stderr.puts "Path to \"#{binary_name}\" (#{binary_path}) is invalid. Exiting..."
return false
end # unless File.exists?(binary_path)
end # [@options[:wget_path]...
end # sanity_check
def wget_sites_to_github(site_uris=[])
site_uris.each { |s| @options[:site_uri] = s; wget(s) && git_push(s) }
end
def wget(s)
command = "#{@options[:wget_path]} -rk#{'q' unless $VERBOSE} '#{s}'"
puts command
system command
fix_php_css
end
def git_push(s=@options[:site_uri])
command = "#{@options[:git_path]} add #{s} && #{@options[:git_path]} commit -m 'Crawl as of #{Time.now.to_s}' && #{@options[:git_path]} push"
puts(command)
system command
end
def fix_php_css
html_files = Array.new
Dir.glob("#{@options[:site_uri]}/**/*.*").each do |f|
html_files += [File.expand_path(f)] if f =~ /.html/
end
html_files.each do |f|
puts "Parsing #{f}"
doc = Hpricot(File.read(f))
(doc/'/html/head/link[@rel=stylesheet]').map do |link|
pattern = '(.*)(stylesheet)(.*)(cssid)=(.*)&(mediatype)=(.*)'
new_link = link['href'].match(pattern)
return true if new_link.nil? # Files do not match our pattern. Give up.
# @new_href = "#{new_link[1]}/css/#{new_link[4]}-#{new_link[5]}_#{new_link[6]}-#{new_link[7]}.css"
@full_path_to_css = File.join(File.dirname(f), new_link[1],
'css',
"#{new_link[4]}-#{new_link[5]}_#{new_link[6]}-#{new_link[7]}.css")
File.copy(File.join(File.dirname(f), link['href']), @full_path_to_css)
link.set_attribute("href", @full_path_to_css)
end
File.open(f, 'w') {|f| f.write(doc) }
end
end
def parse(argv)
options = {:wget_path => '/usr/local/bin/wget',
:git_path => '/usr/local/bin/git'}
argv.options do |opts|
opts.banner = "Usage: #{File.basename($PROGRAM_NAME)} [OPTIONS] SITE_URI(s)"
opts.separator ""
opts.separator "Specific Options:"
opts.on( "-w", "--wget", String,
"Full path to the wget binary (Must be at least version 1.12)" ) do |opt|
options[:wget_path] = opts
end
opts.on( "-g", "--git", String,
"Full path to the git binary" ) do |opt|
options[:git_path] = opts
end
opts.separator "Common Options:"
opts.on( "-h", "--help",
"Show this message." ) do
puts opts
exit
end
begin
opts.parse!
@options = options
@usage = opts
rescue
puts opts
exit
end
end
end
def print_usage
$stderr.puts "", @usage
exit
end
end
c = Crawler.new(ARGV)
# c.wget
# c.git_push
# c.sanity_check |
$falcon9 = {
id: 'falcon9',
name: 'Falcon 9',
active: true,
stages: 2,
cost_per_launch: 62000000,
success_rate_pct: 94,
first_flight: '2010-06-04',
country: 'United States',
company: 'SpaceX',
height: {
meters: 70,
feet: 229.6
},
diameter: {
meters: 3.7,
feet: 12
},
mass: {
kg: 549054,
lb: 1207920
},
payload_weights: {
{
id: 'leo',
name: 'low earth orbit',
kg: 22800,
lb: 50265
},
{
id: 'gto',
name: 'geosynchronous transfer orbit',
kg: 8300,
lb: 18300
},
{
id: 'mars',
name: 'mars orbit',
kg: 4020,
lb: 8860
}
},
first_stage: {
reusable: true,
engines: 9,
fuel_amount_tons: 385,
burn_time_sec: 180,
thrust_sea_level: {
kN: 7607,
lbf: 1710000
},
thrust_vacuum: {
kN: 8227,
lbf: 1849500
}
},
second_stage: {
engines: 1,
fuel_amount_tons: 90,
burn_time_sec: 397,
thrust: {
kN: 934,
lbf: 210000
},
payloads: {
option_1: 'dragon',
option_2: 'composite fairing',
composite_fairing: {
height: {
meters: 13.1,
feet: 43
},
diameter: {
meters: 5.2,
feet: 17.1
}
}
}
},
engines: {
number: 9,
type: 'merlin',
version: '1D+',
layout: 'octaweb',
engine_loss_max: 2,
propellant_1: 'liquid oxygen',
propellant_2: 'RP-1 kerosene',
thrust_sea_level: {
kN: 845,
lbf: 190000
},
thrust_vacuum: {
kN: 914,
lbf: 205500
},
thrust_to_weight: 180.1
},
landing_legs: {
number: 4,
material: 'carbon fiber'
},
description: 'Falcon 9 is a two-stage rocket designed and manufactured by SpaceX for the reliable and safe transport of satellites and the Dragon spacecraft into orbit.'
}
revised hash in falcon9 due to errors being thrown
$falcon9 = {
id: 'falcon9',
name: 'Falcon 9',
active: true,
stages: 2,
cost_per_launch: 62000000,
success_rate_pct: 94,
first_flight: '2010-06-04',
country: 'United States',
company: 'SpaceX',
height: {
meters: 70,
feet: 229.6
},
diameter: {
meters: 3.7,
feet: 12
},
mass: {
kg: 549054,
lb: 1207920
},
payload_weights: {
leo: {
id: 'leo',
name: 'low earth orbit',
kg: 22800,
lb: 50265
},
gto: {
id: 'gto',
name: 'geosynchronous transfer orbit',
kg: 8300,
lb: 18300
},
mars: {
id: 'mars',
name: 'mars orbit',
kg: 4020,
lb: 8860
}
},
first_stage: {
reusable: true,
engines: 9,
fuel_amount_tons: 385,
burn_time_sec: 180,
thrust_sea_level: {
kN: 7607,
lbf: 1710000
},
thrust_vacuum: {
kN: 8227,
lbf: 1849500
}
},
second_stage: {
engines: 1,
fuel_amount_tons: 90,
burn_time_sec: 397,
thrust: {
kN: 934,
lbf: 210000
},
payloads: {
option_1: 'dragon',
option_2: 'composite fairing',
composite_fairing: {
height: {
meters: 13.1,
feet: 43
},
diameter: {
meters: 5.2,
feet: 17.1
}
}
}
},
engines: {
number: 9,
type: 'merlin',
version: '1D+',
layout: 'octaweb',
engine_loss_max: 2,
propellant_1: 'liquid oxygen',
propellant_2: 'RP-1 kerosene',
thrust_sea_level: {
kN: 845,
lbf: 190000
},
thrust_vacuum: {
kN: 914,
lbf: 205500
},
thrust_to_weight: 180.1
},
landing_legs: {
number: 4,
material: 'carbon fiber'
},
description: 'Falcon 9 is a two-stage rocket designed and manufactured by SpaceX for the reliable and safe transport of satellites and the Dragon spacecraft into orbit.'
}
|
module Audited
# Specify this act if you want changes to your model to be saved in an
# audit table. This assumes there is an audits table ready.
#
# class User < ActiveRecord::Base
# audited
# end
#
# To store an audit comment set model.audit_comment to your comment before
# a create, update or destroy operation.
#
# See <tt>Audited::Auditor::ClassMethods#audited</tt>
# for configuration options
module Auditor #:nodoc:
extend ActiveSupport::Concern
CALLBACKS = [:audit_create, :audit_update, :audit_destroy]
module ClassMethods
# == Configuration options
#
#
# * +only+ - Only audit the given attributes
# * +except+ - Excludes fields from being saved in the audit log.
# By default, Audited will audit all but these fields:
#
# [self.primary_key, inheritance_column, 'lock_version', 'created_at', 'updated_at']
# You can add to those by passing one or an array of fields to skip.
#
# class User < ActiveRecord::Base
# audited except: :password
# end
#
# * +require_comment+ - Ensures that audit_comment is supplied before
# any create, update or destroy operation.
#
def audited(options = {})
# don't allow multiple calls
return if included_modules.include?(Audited::Auditor::AuditedInstanceMethods)
class_attribute :non_audited_column_init, instance_accessor: false
class_attribute :audit_associated_with, instance_writer: false
self.non_audited_column_init = -> do
if options[:only]
except = column_names - Array(options[:only]).flatten.map(&:to_s)
else
except = default_ignored_attributes + Audited.ignored_attributes
except |= Array(options[:except]).collect(&:to_s) if options[:except]
end
except
end
self.audit_associated_with = options[:associated_with]
if options[:comment_required]
validates_presence_of :audit_comment, if: :auditing_enabled
before_destroy :require_comment
end
attr_accessor :audit_comment
has_many :audits, -> { order(version: :asc) }, as: :auditable, class_name: Audited.audit_class.name
Audited.audit_class.audited_class_names << to_s
after_create :audit_create if !options[:on] || (options[:on] && options[:on].include?(:create))
before_update :audit_update if !options[:on] || (options[:on] && options[:on].include?(:update))
before_destroy :audit_destroy if !options[:on] || (options[:on] && options[:on].include?(:destroy))
# Define and set after_audit and around_audit callbacks. This might be useful if you want
# to notify a party after the audit has been created or if you want to access the newly-created
# audit.
define_callbacks :audit
set_callback :audit, :after, :after_audit, if: lambda { self.respond_to?(:after_audit) }
set_callback :audit, :around, :around_audit, if: lambda { self.respond_to?(:around_audit) }
attr_accessor :version
extend Audited::Auditor::AuditedClassMethods
include Audited::Auditor::AuditedInstanceMethods
self.auditing_enabled = true
end
def has_associated_audits
has_many :associated_audits, as: :associated, class_name: Audited.audit_class.name
end
def default_ignored_attributes
[primary_key, inheritance_column]
end
end
module AuditedInstanceMethods
# Temporarily turns off auditing while saving.
def save_without_auditing
without_auditing { save }
end
# Executes the block with the auditing callbacks disabled.
#
# @foo.without_auditing do
# @foo.save
# end
#
def without_auditing(&block)
self.class.without_auditing(&block)
end
# Gets an array of the revisions available
#
# user.revisions.each do |revision|
# user.name
# user.version
# end
#
def revisions(from_version = 1)
audits = self.audits.from_version(from_version)
return [] if audits.empty?
revisions = []
audits.each do |audit|
revisions << audit.revision
end
revisions
end
# Get a specific revision specified by the version number, or +:previous+
def revision(version)
revision_with Audited.audit_class.reconstruct_attributes(audits_to(version))
end
# Find the oldest revision recorded prior to the date/time provided.
def revision_at(date_or_time)
audits = self.audits.up_until(date_or_time)
revision_with Audited.audit_class.reconstruct_attributes(audits) unless audits.empty?
end
# List of attributes that are audited.
def audited_attributes
attributes.except(*non_audited_columns)
end
def non_audited_columns
self.class.non_audited_columns
end
protected
def revision_with(attributes)
dup.tap do |revision|
revision.id = id
revision.send :instance_variable_set, '@attributes', self.attributes if rails_below?('4.2.0')
revision.send :instance_variable_set, '@new_record', self.destroyed?
revision.send :instance_variable_set, '@persisted', !self.destroyed?
revision.send :instance_variable_set, '@readonly', false
revision.send :instance_variable_set, '@destroyed', false
revision.send :instance_variable_set, '@_destroyed', false
revision.send :instance_variable_set, '@marked_for_destruction', false
Audited.audit_class.assign_revision_attributes(revision, attributes)
# Remove any association proxies so that they will be recreated
# and reference the correct object for this revision. The only way
# to determine if an instance variable is a proxy object is to
# see if it responds to certain methods, as it forwards almost
# everything to its target.
for ivar in revision.instance_variables
proxy = revision.instance_variable_get ivar
if !proxy.nil? && proxy.respond_to?(:proxy_respond_to?)
revision.instance_variable_set ivar, nil
end
end
end
end
def rails_below?(rails_version)
Gem::Version.new(Rails::VERSION::STRING) < Gem::Version.new(rails_version)
end
private
def audited_changes
changed_attributes.except(*non_audited_columns).inject({}) do |changes, (attr, old_value)|
changes[attr] = [old_value, self[attr]]
changes
end
end
def audits_to(version = nil)
if version == :previous
version = if self.version
self.version - 1
else
previous = audits.descending.offset(1).first
previous ? previous.version : 1
end
end
audits.to_version(version)
end
def audit_create
write_audit(action: 'create', audited_changes: audited_attributes,
comment: audit_comment)
end
def audit_update
unless (changes = audited_changes).empty? && audit_comment.blank?
write_audit(action: 'update', audited_changes: changes,
comment: audit_comment)
end
end
def audit_destroy
write_audit(action: 'destroy', audited_changes: audited_attributes,
comment: audit_comment) unless self.new_record?
end
def write_audit(attrs)
attrs[:associated] = self.send(audit_associated_with) unless audit_associated_with.nil?
self.audit_comment = nil
run_callbacks(:audit) { self.audits.create(attrs) } if auditing_enabled
end
def require_comment
if auditing_enabled && audit_comment.blank?
errors.add(:audit_comment, "Comment required before destruction")
return false if Rails.version.start_with?('4.')
throw :abort
end
end
CALLBACKS.each do |attr_name|
alias_method "#{attr_name}_callback".to_sym, attr_name
end
def empty_callback #:nodoc:
end
def auditing_enabled
self.class.auditing_enabled
end
def auditing_enabled= val
self.class.auditing_enabled = val
end
end # InstanceMethods
module AuditedClassMethods
def self.extended(base)
base.const_set('AUDIT_VAR_NAME', "#{base.name.tableize}_auditing_enabled")
end
# Returns an array of columns that are audited. See non_audited_columns
def audited_columns
columns.select {|c| !non_audited_columns.include?(c.name) }
end
def non_audited_columns
@non_audited_columns ||= non_audited_column_init.call
end
# Executes the block with auditing disabled.
#
# Foo.without_auditing do
# @foo.save
# end
#
def without_auditing
auditing_was_enabled = auditing_enabled
disable_auditing
yield
ensure
enable_auditing if auditing_was_enabled
end
def disable_auditing
self.auditing_enabled = false
end
def enable_auditing
self.auditing_enabled = true
end
# All audit operations during the block are recorded as being
# made by +user+. This is not model specific, the method is a
# convenience wrapper around
# @see Audit#as_user.
def audit_as(user, &block)
Audited.audit_class.as_user(user, &block)
end
def auditing_enabled
if (val = Thread.current[const_get('AUDIT_VAR_NAME')]).nil?
true # default if not set yet
else
val
end
end
def auditing_enabled= val
Thread.current[const_get('AUDIT_VAR_NAME')] = val
end
end
end
end
Centralize `Audited.store` as thread safe variable store
module Audited
# Specify this act if you want changes to your model to be saved in an
# audit table. This assumes there is an audits table ready.
#
# class User < ActiveRecord::Base
# audited
# end
#
# To store an audit comment set model.audit_comment to your comment before
# a create, update or destroy operation.
#
# See <tt>Audited::Auditor::ClassMethods#audited</tt>
# for configuration options
module Auditor #:nodoc:
extend ActiveSupport::Concern
CALLBACKS = [:audit_create, :audit_update, :audit_destroy]
module ClassMethods
# == Configuration options
#
#
# * +only+ - Only audit the given attributes
# * +except+ - Excludes fields from being saved in the audit log.
# By default, Audited will audit all but these fields:
#
# [self.primary_key, inheritance_column, 'lock_version', 'created_at', 'updated_at']
# You can add to those by passing one or an array of fields to skip.
#
# class User < ActiveRecord::Base
# audited except: :password
# end
#
# * +require_comment+ - Ensures that audit_comment is supplied before
# any create, update or destroy operation.
#
def audited(options = {})
# don't allow multiple calls
return if included_modules.include?(Audited::Auditor::AuditedInstanceMethods)
class_attribute :non_audited_column_init, instance_accessor: false
class_attribute :audit_associated_with, instance_writer: false
self.non_audited_column_init = -> do
if options[:only]
except = column_names - Array(options[:only]).flatten.map(&:to_s)
else
except = default_ignored_attributes + Audited.ignored_attributes
except |= Array(options[:except]).collect(&:to_s) if options[:except]
end
except
end
self.audit_associated_with = options[:associated_with]
if options[:comment_required]
validates_presence_of :audit_comment, if: :auditing_enabled
before_destroy :require_comment
end
attr_accessor :audit_comment
has_many :audits, -> { order(version: :asc) }, as: :auditable, class_name: Audited.audit_class.name
Audited.audit_class.audited_class_names << to_s
after_create :audit_create if !options[:on] || (options[:on] && options[:on].include?(:create))
before_update :audit_update if !options[:on] || (options[:on] && options[:on].include?(:update))
before_destroy :audit_destroy if !options[:on] || (options[:on] && options[:on].include?(:destroy))
# Define and set after_audit and around_audit callbacks. This might be useful if you want
# to notify a party after the audit has been created or if you want to access the newly-created
# audit.
define_callbacks :audit
set_callback :audit, :after, :after_audit, if: lambda { self.respond_to?(:after_audit) }
set_callback :audit, :around, :around_audit, if: lambda { self.respond_to?(:around_audit) }
attr_accessor :version
extend Audited::Auditor::AuditedClassMethods
include Audited::Auditor::AuditedInstanceMethods
self.auditing_enabled = true
end
def has_associated_audits
has_many :associated_audits, as: :associated, class_name: Audited.audit_class.name
end
def default_ignored_attributes
[primary_key, inheritance_column]
end
end
module AuditedInstanceMethods
# Temporarily turns off auditing while saving.
def save_without_auditing
without_auditing { save }
end
# Executes the block with the auditing callbacks disabled.
#
# @foo.without_auditing do
# @foo.save
# end
#
def without_auditing(&block)
self.class.without_auditing(&block)
end
# Gets an array of the revisions available
#
# user.revisions.each do |revision|
# user.name
# user.version
# end
#
def revisions(from_version = 1)
audits = self.audits.from_version(from_version)
return [] if audits.empty?
revisions = []
audits.each do |audit|
revisions << audit.revision
end
revisions
end
# Get a specific revision specified by the version number, or +:previous+
def revision(version)
revision_with Audited.audit_class.reconstruct_attributes(audits_to(version))
end
# Find the oldest revision recorded prior to the date/time provided.
def revision_at(date_or_time)
audits = self.audits.up_until(date_or_time)
revision_with Audited.audit_class.reconstruct_attributes(audits) unless audits.empty?
end
# List of attributes that are audited.
def audited_attributes
attributes.except(*non_audited_columns)
end
def non_audited_columns
self.class.non_audited_columns
end
protected
def revision_with(attributes)
dup.tap do |revision|
revision.id = id
revision.send :instance_variable_set, '@attributes', self.attributes if rails_below?('4.2.0')
revision.send :instance_variable_set, '@new_record', self.destroyed?
revision.send :instance_variable_set, '@persisted', !self.destroyed?
revision.send :instance_variable_set, '@readonly', false
revision.send :instance_variable_set, '@destroyed', false
revision.send :instance_variable_set, '@_destroyed', false
revision.send :instance_variable_set, '@marked_for_destruction', false
Audited.audit_class.assign_revision_attributes(revision, attributes)
# Remove any association proxies so that they will be recreated
# and reference the correct object for this revision. The only way
# to determine if an instance variable is a proxy object is to
# see if it responds to certain methods, as it forwards almost
# everything to its target.
for ivar in revision.instance_variables
proxy = revision.instance_variable_get ivar
if !proxy.nil? && proxy.respond_to?(:proxy_respond_to?)
revision.instance_variable_set ivar, nil
end
end
end
end
def rails_below?(rails_version)
Gem::Version.new(Rails::VERSION::STRING) < Gem::Version.new(rails_version)
end
private
def audited_changes
changed_attributes.except(*non_audited_columns).inject({}) do |changes, (attr, old_value)|
changes[attr] = [old_value, self[attr]]
changes
end
end
def audits_to(version = nil)
if version == :previous
version = if self.version
self.version - 1
else
previous = audits.descending.offset(1).first
previous ? previous.version : 1
end
end
audits.to_version(version)
end
def audit_create
write_audit(action: 'create', audited_changes: audited_attributes,
comment: audit_comment)
end
def audit_update
unless (changes = audited_changes).empty? && audit_comment.blank?
write_audit(action: 'update', audited_changes: changes,
comment: audit_comment)
end
end
def audit_destroy
write_audit(action: 'destroy', audited_changes: audited_attributes,
comment: audit_comment) unless self.new_record?
end
def write_audit(attrs)
attrs[:associated] = self.send(audit_associated_with) unless audit_associated_with.nil?
self.audit_comment = nil
run_callbacks(:audit) { self.audits.create(attrs) } if auditing_enabled
end
def require_comment
if auditing_enabled && audit_comment.blank?
errors.add(:audit_comment, "Comment required before destruction")
return false if Rails.version.start_with?('4.')
throw :abort
end
end
CALLBACKS.each do |attr_name|
alias_method "#{attr_name}_callback".to_sym, attr_name
end
def empty_callback #:nodoc:
end
def auditing_enabled
self.class.auditing_enabled
end
def auditing_enabled= val
self.class.auditing_enabled = val
end
end # InstanceMethods
module AuditedClassMethods
# Returns an array of columns that are audited. See non_audited_columns
def audited_columns
columns.select {|c| !non_audited_columns.include?(c.name) }
end
def non_audited_columns
@non_audited_columns ||= non_audited_column_init.call
end
# Executes the block with auditing disabled.
#
# Foo.without_auditing do
# @foo.save
# end
#
def without_auditing
auditing_was_enabled = auditing_enabled
disable_auditing
yield
ensure
enable_auditing if auditing_was_enabled
end
def disable_auditing
self.auditing_enabled = false
end
def enable_auditing
self.auditing_enabled = true
end
# All audit operations during the block are recorded as being
# made by +user+. This is not model specific, the method is a
# convenience wrapper around
# @see Audit#as_user.
def audit_as(user, &block)
Audited.audit_class.as_user(user, &block)
end
def auditing_enabled
Audited.store.fetch("#{name.tableize}_auditing_enabled", true)
end
def auditing_enabled= val
Audited.store["#{name.tableize}_auditing_enabled"] = val
end
end
end
end
|
require 'rails'
class AuthCop::Railtie < Rails::Railtie
railtie_name :authcop
initializer "authcopy.active_record" do
ActiveRecord::Base.send :extend, AuthCop
end
initializer "authcopy.warden" do
if defined?(Warden)
Warden::Manager.after_set_user do |user, auth, opts|
Thread.current[:auth_scope_warden_pushed] = true
AuthCop.push_scope(user)
end
end
end
initializer "authcopy.controller" do
ActionController::Base.send :include, AuthCop::Controller
ActionController::Base.after_filter do
if Thread.current[:auth_scope_warden_pushed]
AuthCop.pop_scope
Thread.current[:auth_scope_warden_pushed] = nil
end
end
end
config.to_prepare do
if defined?(Devise)
Devise::ConfirmationsController.around_filter :auth_scope_unsafe, :only => [:new, :create, :show]
Devise::PasswordsController.around_filter :auth_scope_unsafe, :only => [:new, :create]
Devise::RegistrationsController.around_filter :auth_scope_unsafe, :only => [:new, :create]
Devise::SessionsController.around_filter :auth_scope_unsafe, :only => [:new, :destroy]
end
end
console do
AuthCop.unsafe!
end
end
Allow console to start in safe mode when AUTHCOP environment variable is present
require 'rails'
class AuthCop::Railtie < Rails::Railtie
railtie_name :authcop
initializer "authcopy.active_record" do
ActiveRecord::Base.send :extend, AuthCop
end
initializer "authcopy.warden" do
if defined?(Warden)
Warden::Manager.after_set_user do |user, auth, opts|
Thread.current[:auth_scope_warden_pushed] = true
AuthCop.push_scope(user)
end
end
end
initializer "authcopy.controller" do
ActionController::Base.send :include, AuthCop::Controller
ActionController::Base.after_filter do
if Thread.current[:auth_scope_warden_pushed]
AuthCop.pop_scope
Thread.current[:auth_scope_warden_pushed] = nil
end
end
end
config.to_prepare do
if defined?(Devise)
Devise::ConfirmationsController.around_filter :auth_scope_unsafe, :only => [:new, :create, :show]
Devise::PasswordsController.around_filter :auth_scope_unsafe, :only => [:new, :create]
Devise::RegistrationsController.around_filter :auth_scope_unsafe, :only => [:new, :create]
Devise::SessionsController.around_filter :auth_scope_unsafe, :only => [:new, :destroy]
end
end
console do
AuthCop.unsafe! unless ENV["AUTHCOP"]
end
end
|
module Axe
module API
class Context
attr_reader :inclusion, :exclusion
def initialize
@inclusion = []
@exclusion = []
end
def include(*selectors)
@inclusion.concat selectors.map { |s| Array(Selector.new s) }
end
def exclude(*selectors)
@exclusion.concat selectors.map { |s| Array(Selector.new s) }
end
def to_hash
{}.tap do |context_param|
# include key must not be included if empty
# (when undefined, defaults to `document`)
context_param[:include] = @inclusion unless @inclusion.empty?
# exclude array allowed to be empty
# and must exist in case `include` is omitted
# because context_param cannot be empty object ({})
context_param[:exclude] = @exclusion
end
end
def to_json
if @inclusion.empty?
if @exclusion.empty?
"document"
else
%Q({"include":document,"exclude":#{@exclusion.to_json}})
end
else
to_hash.to_json
end
end
alias :to_s :to_json
end
end
end
missing require
require 'axe/api/selector'
module Axe
module API
class Context
attr_reader :inclusion, :exclusion
def initialize
@inclusion = []
@exclusion = []
end
def include(*selectors)
@inclusion.concat selectors.map { |s| Array(Selector.new s) }
end
def exclude(*selectors)
@exclusion.concat selectors.map { |s| Array(Selector.new s) }
end
def to_hash
{}.tap do |context_param|
# include key must not be included if empty
# (when undefined, defaults to `document`)
context_param[:include] = @inclusion unless @inclusion.empty?
# exclude array allowed to be empty
# and must exist in case `include` is omitted
# because context_param cannot be empty object ({})
context_param[:exclude] = @exclusion
end
end
def to_json
if @inclusion.empty?
if @exclusion.empty?
"document"
else
%Q({"include":document,"exclude":#{@exclusion.to_json}})
end
else
to_hash.to_json
end
end
alias :to_s :to_json
end
end
end
|
Capistrano::Configuration.instance(:must_exist).load do
role :staging, "www@staging.botandrose.com:22022"
namespace "data" do
namespace "pull" do
desc "pull data"
task "default" do
run "cd #{application} && rake db:dump && gzip -9f db/data.sql"
transfer :down, "#{application}/db/data.sql.gz", "db/data.sql.gz"
system "gunzip -f db/data.sql.gz && rake db:load"
end
end
end
desc "push app from staging to production"
task :deploy, :roles => :production do
system "git push github" if `git remote` =~ /\bgithub\b/
run "cd #{application} && git pull origin master && rake bootstrap:production"
puts "Deploy Succeeded"
end
end
add asset paths syncing to capistrano.
require 'uri'
Capistrano::Configuration.instance(:must_exist).load do
role :staging, "www@staging.botandrose.com:22022"
set :asset_paths, []
namespace "data" do
namespace "pull" do
desc "pull data"
task "default" do
run "cd #{application} && rake db:dump && gzip -9f db/data.sql"
transfer :down, "#{application}/db/data.sql.gz", "db/data.sql.gz"
system "gunzip -f db/data.sql.gz && rake db:load"
end
desc "sync the static assets"
task "assets" do
uri = URI.parse("ssh://#{roles[ENV['ROLES'].to_sym].first.to_s}")
portopt = "-e'ssh -p#{uri.port}'" if uri.port
[asset_paths].flatten.each do |path|
dest_path = path.dup
dest_path.sub! %r(/[^/]+$), '/'
system "rsync #{portopt} --delete -avz #{uri.user}@#{uri.host}:#{application}/#{path} #{dest_path}"
end
end
end
end
after 'data:pull', 'data:pull:assets'
desc "push app from staging to production"
task :deploy, :roles => :production do
system "git push github" if `git remote` =~ /\bgithub\b/
run "cd #{application} && git pull origin master && rake bootstrap:production"
puts "Deploy Succeeded"
end
end
|
module Beyonic
VERSION = "0.0.11"
end
Release version 0.0.12
module Beyonic
VERSION = "0.0.12"
end
|
module BioLocus
require 'moneta'
module Store
def Store.run(options)
store = Moneta.new(:LocalMemCache, file: options[:db])
count = count_new = count_dup = 0
STDIN.each_line do | line |
if line =~ /^[[:alnum:]]+/
chr,pos,rest = line.split(/\t/,3)[0..1]
if pos =~ /^\d+$/
key = chr+"\t"+pos
if not store[key]
count_new += 1
store[key] = true
else
count_dup += 1
$stderr.print "Already in store: "
p [chr,pos]
end
count += 1
$stderr.print '.' if (count % 1_000_000) == 0 if not options[:quiet]
next
end
end
$stderr.print "Warning: did not store ",line
end
store.close
$stderr.print "Stored #{count_new} positions out of #{count} in #{options[:db]} (#{count_dup} duplicates)\n" if !options[:quiet]
end
end
end
Honour debug switch
module BioLocus
require 'moneta'
module Store
def Store.run(options)
store = Moneta.new(:LocalMemCache, file: options[:db])
count = count_new = count_dup = 0
STDIN.each_line do | line |
if line =~ /^[[:alnum:]]+/
chr,pos,rest = line.split(/\t/,3)[0..1]
if pos =~ /^\d+$/
key = chr+"\t"+pos
if not store[key]
count_new += 1
store[key] = true
else
count_dup += 1
if options[:debug]
$stderr.print "Store hit: "
p [chr,pos]
end
end
count += 1
$stderr.print '.' if (count % 1_000_000) == 0 if not options[:quiet]
next
end
end
$stderr.print "Warning: did not store ",line if options[:debug]
end
store.close
$stderr.print "Stored #{count_new} positions out of #{count} in #{options[:db]} (#{count_dup} hits)\n" if !options[:quiet]
end
end
end
|
# encoding: ascii-8bit
module Bitcoin
# Optional DSL to help create blocks and transactions.
#
# see also BlockBuilder, TxBuilder, TxInBuilder, TxOutBuilder, ScriptBuilder
module Builder
# build a Bitcoin::Protocol::Block matching the given +target+.
# see BlockBuilder for details.
def build_block(target = "00".ljust(64, 'f'))
c = BlockBuilder.new
yield c
c.block(target)
end
alias :blk :build_block
# build a Bitcoin::Protocol::Tx.
# see TxBuilder for details.
def build_tx opts = {}
c = TxBuilder.new
yield c
c.tx opts
end
alias :tx :build_tx
# build a Bitcoin::Script.
# see ScriptBuilder for details.
def script
c = ScriptBuilder.new
yield c
c.script
end
# DSL to create a Bitcoin::Protocol::Block used by Builder#create_block.
# block = blk("00".ljust(32, 'f')) do |b|
# b.prev_block "\x00"*32
# b.tx do |t|
# t.input {|i| i.coinbase }
# t.output do |o|
# o.value 5000000000;
# o.to Bitcoin::Key.generate.addr
# end
# end
# end
#
# See Bitcoin::Builder::TxBuilder for details on building transactions.
class BlockBuilder
def initialize
@block = P::Block.new(nil)
end
# specify block version. this is usually not necessary. defaults to 1.
def version v
@version = v
end
# set the hash of the previous block.
def prev_block hash
@prev_block = hash
end
# set the block timestamp (defaults to current time).
def time time
@time = time
end
# add transactions to the block (see TxBuilder).
def tx tx = nil
tx ||= ( c = TxBuilder.new; yield c; c.tx )
@block.tx << tx
tx
end
# create the block according to values specified via DSL.
def block target
@block.ver = @version || 1
@block.prev_block = @prev_block.htb.reverse
@block.mrkl_root = @mrkl_root
@block.time = @time || Time.now.to_i
@block.nonce = 0
@block.mrkl_root = Bitcoin.hash_mrkl_tree(@block.tx.map(&:hash)).last.htb.reverse
find_hash(target)
block = P::Block.new(@block.to_payload)
raise "Payload Error" unless block.to_payload == @block.to_payload
block
end
private
# increment nonce/time to find a block hash matching the +target+.
def find_hash target
@block.bits = Bitcoin.encode_compact_bits(target)
t = Time.now
@block.recalc_block_hash
until @block.hash.to_i(16) < target.to_i(16)
@block.nonce += 1
@block.recalc_block_hash
if @block.nonce == 100000
if t
tt = 1 / ((Time.now - t) / 100000) / 1000
print "\r%.2f khash/s" % tt
end
t = Time.now
@block.time = Time.now.to_i
@block.nonce = 0
$stdout.flush
end
end
end
end
# DSL to create Bitcoin::Protocol::Tx used by Builder#build_tx.
# tx = tx do |t|
# t.input do |i|
# i.prev_out prev_tx, 0
# i.signature_key key
# end
# t.output do |o|
# o.value 12345 # 0.00012345 BTC
# o.to key.addr
# end
# end
#
# Signs every input that has a signature key and where the previous outputs
# pk_script is known. If unable to sign, the resulting txin will include
# the #sig_hash that needs to be signed.
#
# See TxInBuilder and TxOutBuilder for details on how to build in/outputs.
class TxBuilder
def initialize
@tx = P::Tx.new(nil)
@tx.ver, @tx.lock_time = 1, 0
@ins, @outs = [], []
end
# specify tx version. this is usually not necessary. defaults to 1.
def version n
@tx.ver = n
end
# specify tx lock_time. this is usually not necessary. defaults to 0.
def lock_time n
@tx.lock_time = n
end
# add an input to the transaction (see TxInBuilder).
def input
c = TxInBuilder.new
yield c
@ins << c
end
# add an output to the transaction (see TxOutBuilder).
def output
c = TxOutBuilder.new
yield c
@outs << c
end
# Create the transaction according to values specified via DSL.
# Sign each input that has a signature key specified. If there is
# no key, store the sig_hash in the input, so it can easily be
# signed later.
#
# When :change_address and :input_value options are given, it will
# automatically create a change output sending the remaining funds
# to the given address. The :leave_fee option can be used in this
# case to specify a tx fee that should be left unclaimed by the
# change output.
def tx opts = {}
return @tx if @tx.hash
if opts[:change_address] && !opts[:input_value]
raise "Must give 'input_value' when auto-generating change output!"
end
@ins.each {|i| @tx.add_in(i.txin) }
@outs.each {|o| @tx.add_out(o.txout) }
if opts[:change_address]
output_value = @tx.out.map(&:value).inject(:+)
change_value = opts[:input_value] - output_value
if opts[:leave_fee]
fee = @tx.minimum_block_fee + (opts[:extra_fee] || 0)
if change_value >= fee
change_value -= fee
else
change_value = 0
end
end
if change_value > 0
script = Script.to_address_script(opts[:change_address])
@tx.add_out(P::TxOut.new(change_value, script))
end
end
@ins.each_with_index do |inc, i|
sign_input(i, inc)
end
# run our tx through an encode/decode cycle to make sure that the binary format is sane
raise "Payload Error" unless P::Tx.new(@tx.to_payload).to_payload == @tx.to_payload
@tx.instance_eval do
@payload = to_payload
@hash = hash_from_payload(@payload)
end
@tx
end
# coinbase inputs don't need to be signed, they only include the given +coinbase_data+
def include_coinbase_data i, inc
script_sig = [inc.coinbase_data].pack("H*")
@tx.in[i].script_sig_length = script_sig.bytesize
@tx.in[i].script_sig = script_sig
end
def sig_hash_and_all_keys_exist?(inc, sig_script)
return false unless @sig_hash && inc.has_keys?
script = Bitcoin::Script.new(sig_script)
return true if script.is_hash160? || script.is_pubkey?
if script.is_multisig?
return inc.has_multiple_keys? && inc.key.size >= script.get_signatures_required
end
raise "Script type must be hash160, pubkey or multisig"
end
def add_empty_script_sig_to_input(i)
@tx.in[i].script_sig_length = 0
@tx.in[i].script_sig = ""
# add the sig_hash that needs to be signed, so it can be passed on to a signing device
@tx.in[i].sig_hash = @sig_hash
# add the address the sig_hash needs to be signed with as a convenience for the signing device
@tx.in[i].sig_address = Script.new(@prev_script).get_address if @prev_script
end
def get_script_sig(inc)
if inc.has_multiple_keys?
# multiple keys given, generate signature for each one
sigs = inc.sign(@sig_hash)
if redeem_script = inc.instance_eval { @redeem_script }
# when a redeem_script was specified, assume we spend a p2sh multisig script
script_sig = Script.to_p2sh_multisig_script_sig(redeem_script, sigs)
else
# when no redeem_script is given, do a regular multisig spend
script_sig = Script.to_multisig_script_sig(*sigs)
end
else
# only one key given, generate signature and script_sig
sig = inc.sign(@sig_hash)
script_sig = Script.to_signature_pubkey_script(sig, [inc.key.pub].pack("H*"))
end
return script_sig
end
# Sign input number +i+ with data from given +inc+ object (a TxInBuilder).
def sign_input i, inc
if @tx.in[i].coinbase?
include_coinbase_data(i, inc)
else
@prev_script = inc.instance_variable_get(:@prev_out_script)
# get the signature script; use +redeem_script+ if given
# (indicates spending a p2sh output), otherwise use the prev_script
sig_script = inc.instance_eval { @redeem_script }
sig_script ||= @prev_script
# when a sig_script was found, generate the sig_hash to be signed
@sig_hash = @tx.signature_hash_for_input(i, sig_script) if sig_script
# when there is a sig_hash and one or more signature_keys were specified
if sig_hash_and_all_keys_exist?(inc, sig_script)
# add the script_sig to the txin
@tx.in[i].script_sig = get_script_sig(inc)
# double-check that the script_sig is valid to spend the given prev_script
raise "Signature error" if @prev_script && !@tx.verify_input_signature(i, @prev_script)
elsif inc.has_multiple_keys?
raise "Keys missing for multisig signing"
else
# no sig_hash, add an empty script_sig.
add_empty_script_sig_to_input(i)
end
end
end
# Randomize the outputs using SecureRandom
def randomize_outputs
@outs.sort_by!{ SecureRandom.random_bytes(4).unpack("I")[0] }
end
end
# Create a Bitcoin::Protocol::TxIn used by TxBuilder#input.
#
# Inputs need the transaction hash and the index of the output they spend.
# You can pass either the transaction, or just its hash (in hex form).
# To sign the input, builder also needs the pk_script of the previous output.
# If you specify a tx hash instead of the whole tx, you need to specify the
# output script separately.
#
# t.input do |i|
# i.prev_out prev_tx # previous transaction
# i.prev_out_index 0 # index of previous output
# i.signature_key key # Bitcoin::Key used to sign the input
# end
#
# t.input {|i| i.prev_out prev_tx, 0 }
#
# If you want to spend a p2sh output, you also need to specify the +redeem_script+.
#
# t.input do |i|
# i.prev_out prev_tx, 0
# i.redeem_script prev_out.redeem_script
# end
#
# If you want to spend a multisig output, just provide an array of keys to #signature_key.
class TxInBuilder
attr_reader :prev_tx, :prev_script, :redeem_script, :key, :coinbase_data
def initialize
@txin = P::TxIn.new
@prev_out_hash = "\x00" * 32
@prev_out_index = 0
end
# Previous transaction that contains the output we want to use.
# You can either pass the transaction, or just the tx hash.
# If you pass only the hash, you need to pass the previous outputs
# +script+ separately if you want the txin to be signed.
def prev_out tx, idx = nil, script = nil
if tx.is_a?(Bitcoin::P::Tx)
@prev_tx = tx
@prev_out_hash = tx.binary_hash
@prev_out_script = tx.out[idx].pk_script if idx
else
@prev_out_hash = tx.htb.reverse
end
@prev_out_script = script if script
@prev_out_index = idx if idx
end
# Index of the output in the #prev_out transaction.
def prev_out_index i
@prev_out_index = i
@prev_out_script = @prev_tx.out[i].pk_script if @prev_tx
end
# Previous output's +pk_script+. Needed when only the tx hash is specified as #prev_out.
def prev_out_script script
@prev_out_script = script
end
# Redeem script for P2SH output. To spend from a P2SH output, you need to provide
# the script with a hash matching the P2SH address.
def redeem_script script
@redeem_script = script
end
# Specify sequence. This is usually not needed.
def sequence s
@sequence = s
end
# Bitcoin::Key used to sign the signature_hash for the input.
# see Bitcoin::Script.signature_hash_for_input and Bitcoin::Key.sign.
def signature_key key
@key = key
end
# Specify that this is a coinbase input. Optionally set +data+.
# If this is set, no other options need to be given.
def coinbase data = nil
@coinbase_data = data || OpenSSL::Random.random_bytes(32)
@prev_out_hash = "\x00" * 32
@prev_out_index = 4294967295
end
# Create the txin according to specified values
def txin
@txin.prev_out = @prev_out_hash
@txin.prev_out_index = @prev_out_index
@txin.sequence = @sequence || "\xff\xff\xff\xff"
@txin
end
def has_multiple_keys?
@key.is_a?(Array)
end
def has_keys?
@key && (has_multiple_keys? ? @key.all?(&:priv) : @key.priv)
end
def sign(sig_hash)
if has_multiple_keys?
@key.map {|k| k.sign(sig_hash) }
else
@key.sign(sig_hash)
end
end
end
# Create a Bitcoin::Script used by TxOutBuilder#script.
class ScriptBuilder
attr_reader :script, :redeem_script
def initialize
@type = :address
@script = nil
end
# Script type (:pubkey, :address/hash160, :multisig).
# Defaults to :address.
def type type
@type = type.to_sym
end
# Recipient(s) of the script.
# Depending on the #type, this should be an address, a hash160 pubkey,
# or an array of multisig pubkeys.
def recipient *data
@script, @redeem_script = *Script.send("to_#{@type}_script", *data)
end
end
# Create a Bitcoin::Protocol::TxOut used by TxBuilder#output.
#
# t.output {|o| o.value 12345; o.to address }
#
# t.output do |o|
# o.value 12345
# o.script {|s| s.recipient address }
# end
#
# t.output {|o| o.to "deadbeef", OP_RETURN }
class TxOutBuilder
attr_reader :txout
def initialize
@txout = P::TxOut.new(0)
end
# Set output value (in base units / "satoshis")
def value value
@txout.value = value
end
# Set recipient address and script type (defaults to :address).
def to recipient, type = :address
@txout.pk_script, @txout.redeem_script = *Bitcoin::Script.send("to_#{type}_script", *recipient)
end
# Add a script to the output (see ScriptBuilder).
def script &block
c = ScriptBuilder.new
yield c
@txout.pk_script, @txout.redeem_script = c.script, c.redeem_script
end
end
end
end
builder: allow autogenerated change output as the only output
# encoding: ascii-8bit
module Bitcoin
# Optional DSL to help create blocks and transactions.
#
# see also BlockBuilder, TxBuilder, TxInBuilder, TxOutBuilder, ScriptBuilder
module Builder
# build a Bitcoin::Protocol::Block matching the given +target+.
# see BlockBuilder for details.
def build_block(target = "00".ljust(64, 'f'))
c = BlockBuilder.new
yield c
c.block(target)
end
alias :blk :build_block
# build a Bitcoin::Protocol::Tx.
# see TxBuilder for details.
def build_tx opts = {}
c = TxBuilder.new
yield c
c.tx opts
end
alias :tx :build_tx
# build a Bitcoin::Script.
# see ScriptBuilder for details.
def script
c = ScriptBuilder.new
yield c
c.script
end
# DSL to create a Bitcoin::Protocol::Block used by Builder#create_block.
# block = blk("00".ljust(32, 'f')) do |b|
# b.prev_block "\x00"*32
# b.tx do |t|
# t.input {|i| i.coinbase }
# t.output do |o|
# o.value 5000000000;
# o.to Bitcoin::Key.generate.addr
# end
# end
# end
#
# See Bitcoin::Builder::TxBuilder for details on building transactions.
class BlockBuilder
def initialize
@block = P::Block.new(nil)
end
# specify block version. this is usually not necessary. defaults to 1.
def version v
@version = v
end
# set the hash of the previous block.
def prev_block hash
@prev_block = hash
end
# set the block timestamp (defaults to current time).
def time time
@time = time
end
# add transactions to the block (see TxBuilder).
def tx tx = nil
tx ||= ( c = TxBuilder.new; yield c; c.tx )
@block.tx << tx
tx
end
# create the block according to values specified via DSL.
def block target
@block.ver = @version || 1
@block.prev_block = @prev_block.htb.reverse
@block.mrkl_root = @mrkl_root
@block.time = @time || Time.now.to_i
@block.nonce = 0
@block.mrkl_root = Bitcoin.hash_mrkl_tree(@block.tx.map(&:hash)).last.htb.reverse
find_hash(target)
block = P::Block.new(@block.to_payload)
raise "Payload Error" unless block.to_payload == @block.to_payload
block
end
private
# increment nonce/time to find a block hash matching the +target+.
def find_hash target
@block.bits = Bitcoin.encode_compact_bits(target)
t = Time.now
@block.recalc_block_hash
until @block.hash.to_i(16) < target.to_i(16)
@block.nonce += 1
@block.recalc_block_hash
if @block.nonce == 100000
if t
tt = 1 / ((Time.now - t) / 100000) / 1000
print "\r%.2f khash/s" % tt
end
t = Time.now
@block.time = Time.now.to_i
@block.nonce = 0
$stdout.flush
end
end
end
end
# DSL to create Bitcoin::Protocol::Tx used by Builder#build_tx.
# tx = tx do |t|
# t.input do |i|
# i.prev_out prev_tx, 0
# i.signature_key key
# end
# t.output do |o|
# o.value 12345 # 0.00012345 BTC
# o.to key.addr
# end
# end
#
# Signs every input that has a signature key and where the previous outputs
# pk_script is known. If unable to sign, the resulting txin will include
# the #sig_hash that needs to be signed.
#
# See TxInBuilder and TxOutBuilder for details on how to build in/outputs.
class TxBuilder
def initialize
@tx = P::Tx.new(nil)
@tx.ver, @tx.lock_time = 1, 0
@ins, @outs = [], []
end
# specify tx version. this is usually not necessary. defaults to 1.
def version n
@tx.ver = n
end
# specify tx lock_time. this is usually not necessary. defaults to 0.
def lock_time n
@tx.lock_time = n
end
# add an input to the transaction (see TxInBuilder).
def input
c = TxInBuilder.new
yield c
@ins << c
end
# add an output to the transaction (see TxOutBuilder).
def output
c = TxOutBuilder.new
yield c
@outs << c
end
# Create the transaction according to values specified via DSL.
# Sign each input that has a signature key specified. If there is
# no key, store the sig_hash in the input, so it can easily be
# signed later.
#
# When :change_address and :input_value options are given, it will
# automatically create a change output sending the remaining funds
# to the given address. The :leave_fee option can be used in this
# case to specify a tx fee that should be left unclaimed by the
# change output.
def tx opts = {}
return @tx if @tx.hash
if opts[:change_address] && !opts[:input_value]
raise "Must give 'input_value' when auto-generating change output!"
end
@ins.each {|i| @tx.add_in(i.txin) }
@outs.each {|o| @tx.add_out(o.txout) }
if opts[:change_address]
output_value = @tx.out.map(&:value).inject(:+) || 0
change_value = opts[:input_value] - output_value
if opts[:leave_fee]
fee = @tx.minimum_block_fee + (opts[:extra_fee] || 0)
if change_value >= fee
change_value -= fee
else
change_value = 0
end
end
if change_value > 0
script = Script.to_address_script(opts[:change_address])
@tx.add_out(P::TxOut.new(change_value, script))
end
end
@ins.each_with_index do |inc, i|
sign_input(i, inc)
end
# run our tx through an encode/decode cycle to make sure that the binary format is sane
raise "Payload Error" unless P::Tx.new(@tx.to_payload).to_payload == @tx.to_payload
@tx.instance_eval do
@payload = to_payload
@hash = hash_from_payload(@payload)
end
@tx
end
# coinbase inputs don't need to be signed, they only include the given +coinbase_data+
def include_coinbase_data i, inc
script_sig = [inc.coinbase_data].pack("H*")
@tx.in[i].script_sig_length = script_sig.bytesize
@tx.in[i].script_sig = script_sig
end
def sig_hash_and_all_keys_exist?(inc, sig_script)
return false unless @sig_hash && inc.has_keys?
script = Bitcoin::Script.new(sig_script)
return true if script.is_hash160? || script.is_pubkey?
if script.is_multisig?
return inc.has_multiple_keys? && inc.key.size >= script.get_signatures_required
end
raise "Script type must be hash160, pubkey or multisig"
end
def add_empty_script_sig_to_input(i)
@tx.in[i].script_sig_length = 0
@tx.in[i].script_sig = ""
# add the sig_hash that needs to be signed, so it can be passed on to a signing device
@tx.in[i].sig_hash = @sig_hash
# add the address the sig_hash needs to be signed with as a convenience for the signing device
@tx.in[i].sig_address = Script.new(@prev_script).get_address if @prev_script
end
def get_script_sig(inc)
if inc.has_multiple_keys?
# multiple keys given, generate signature for each one
sigs = inc.sign(@sig_hash)
if redeem_script = inc.instance_eval { @redeem_script }
# when a redeem_script was specified, assume we spend a p2sh multisig script
script_sig = Script.to_p2sh_multisig_script_sig(redeem_script, sigs)
else
# when no redeem_script is given, do a regular multisig spend
script_sig = Script.to_multisig_script_sig(*sigs)
end
else
# only one key given, generate signature and script_sig
sig = inc.sign(@sig_hash)
script_sig = Script.to_signature_pubkey_script(sig, [inc.key.pub].pack("H*"))
end
return script_sig
end
# Sign input number +i+ with data from given +inc+ object (a TxInBuilder).
def sign_input i, inc
if @tx.in[i].coinbase?
include_coinbase_data(i, inc)
else
@prev_script = inc.instance_variable_get(:@prev_out_script)
# get the signature script; use +redeem_script+ if given
# (indicates spending a p2sh output), otherwise use the prev_script
sig_script = inc.instance_eval { @redeem_script }
sig_script ||= @prev_script
# when a sig_script was found, generate the sig_hash to be signed
@sig_hash = @tx.signature_hash_for_input(i, sig_script) if sig_script
# when there is a sig_hash and one or more signature_keys were specified
if sig_hash_and_all_keys_exist?(inc, sig_script)
# add the script_sig to the txin
@tx.in[i].script_sig = get_script_sig(inc)
# double-check that the script_sig is valid to spend the given prev_script
raise "Signature error" if @prev_script && !@tx.verify_input_signature(i, @prev_script)
elsif inc.has_multiple_keys?
raise "Keys missing for multisig signing"
else
# no sig_hash, add an empty script_sig.
add_empty_script_sig_to_input(i)
end
end
end
# Randomize the outputs using SecureRandom
def randomize_outputs
@outs.sort_by!{ SecureRandom.random_bytes(4).unpack("I")[0] }
end
end
# Create a Bitcoin::Protocol::TxIn used by TxBuilder#input.
#
# Inputs need the transaction hash and the index of the output they spend.
# You can pass either the transaction, or just its hash (in hex form).
# To sign the input, builder also needs the pk_script of the previous output.
# If you specify a tx hash instead of the whole tx, you need to specify the
# output script separately.
#
# t.input do |i|
# i.prev_out prev_tx # previous transaction
# i.prev_out_index 0 # index of previous output
# i.signature_key key # Bitcoin::Key used to sign the input
# end
#
# t.input {|i| i.prev_out prev_tx, 0 }
#
# If you want to spend a p2sh output, you also need to specify the +redeem_script+.
#
# t.input do |i|
# i.prev_out prev_tx, 0
# i.redeem_script prev_out.redeem_script
# end
#
# If you want to spend a multisig output, just provide an array of keys to #signature_key.
class TxInBuilder
attr_reader :prev_tx, :prev_script, :redeem_script, :key, :coinbase_data
def initialize
@txin = P::TxIn.new
@prev_out_hash = "\x00" * 32
@prev_out_index = 0
end
# Previous transaction that contains the output we want to use.
# You can either pass the transaction, or just the tx hash.
# If you pass only the hash, you need to pass the previous outputs
# +script+ separately if you want the txin to be signed.
def prev_out tx, idx = nil, script = nil
if tx.is_a?(Bitcoin::P::Tx)
@prev_tx = tx
@prev_out_hash = tx.binary_hash
@prev_out_script = tx.out[idx].pk_script if idx
else
@prev_out_hash = tx.htb.reverse
end
@prev_out_script = script if script
@prev_out_index = idx if idx
end
# Index of the output in the #prev_out transaction.
def prev_out_index i
@prev_out_index = i
@prev_out_script = @prev_tx.out[i].pk_script if @prev_tx
end
# Previous output's +pk_script+. Needed when only the tx hash is specified as #prev_out.
def prev_out_script script
@prev_out_script = script
end
# Redeem script for P2SH output. To spend from a P2SH output, you need to provide
# the script with a hash matching the P2SH address.
def redeem_script script
@redeem_script = script
end
# Specify sequence. This is usually not needed.
def sequence s
@sequence = s
end
# Bitcoin::Key used to sign the signature_hash for the input.
# see Bitcoin::Script.signature_hash_for_input and Bitcoin::Key.sign.
def signature_key key
@key = key
end
# Specify that this is a coinbase input. Optionally set +data+.
# If this is set, no other options need to be given.
def coinbase data = nil
@coinbase_data = data || OpenSSL::Random.random_bytes(32)
@prev_out_hash = "\x00" * 32
@prev_out_index = 4294967295
end
# Create the txin according to specified values
def txin
@txin.prev_out = @prev_out_hash
@txin.prev_out_index = @prev_out_index
@txin.sequence = @sequence || "\xff\xff\xff\xff"
@txin
end
def has_multiple_keys?
@key.is_a?(Array)
end
def has_keys?
@key && (has_multiple_keys? ? @key.all?(&:priv) : @key.priv)
end
def sign(sig_hash)
if has_multiple_keys?
@key.map {|k| k.sign(sig_hash) }
else
@key.sign(sig_hash)
end
end
end
# Create a Bitcoin::Script used by TxOutBuilder#script.
class ScriptBuilder
attr_reader :script, :redeem_script
def initialize
@type = :address
@script = nil
end
# Script type (:pubkey, :address/hash160, :multisig).
# Defaults to :address.
def type type
@type = type.to_sym
end
# Recipient(s) of the script.
# Depending on the #type, this should be an address, a hash160 pubkey,
# or an array of multisig pubkeys.
def recipient *data
@script, @redeem_script = *Script.send("to_#{@type}_script", *data)
end
end
# Create a Bitcoin::Protocol::TxOut used by TxBuilder#output.
#
# t.output {|o| o.value 12345; o.to address }
#
# t.output do |o|
# o.value 12345
# o.script {|s| s.recipient address }
# end
#
# t.output {|o| o.to "deadbeef", OP_RETURN }
class TxOutBuilder
attr_reader :txout
def initialize
@txout = P::TxOut.new(0)
end
# Set output value (in base units / "satoshis")
def value value
@txout.value = value
end
# Set recipient address and script type (defaults to :address).
def to recipient, type = :address
@txout.pk_script, @txout.redeem_script = *Bitcoin::Script.send("to_#{type}_script", *recipient)
end
# Add a script to the output (see ScriptBuilder).
def script &block
c = ScriptBuilder.new
yield c
@txout.pk_script, @txout.redeem_script = c.script, c.redeem_script
end
end
end
end
|
module Bitfinex
module Orders
def self.active_orders
Bitfinex.sanity_check!
JSON.parse(Bitfinex::Net.post("/v1/orders").to_str).map do |order_attr|
Bitfinex::Order.new(order_attr)
end
end
def self.create(order_attr)
Bitfinex.sanity_check!
default = { symbol: Bitfinex.symbol }
result = Bitfinex::Net.post("/v1/order/new", default.merge(order_attr)).to_str
Bitfinex::Order.new(JSON.parse(result))
end
end
end
Add feature: cancel order
module Bitfinex
module Orders
def self.active_orders
Bitfinex.sanity_check!
JSON.parse(Bitfinex::Net.post("/v1/orders").to_str).map do |order_attr|
Bitfinex::Order.new(order_attr)
end
end
def self.create(order_attr)
Bitfinex.sanity_check!
default = { symbol: Bitfinex.symbol }
result = Bitfinex::Net.post("/v1/order/new", default.merge(order_attr)).to_str
Bitfinex::Order.new(JSON.parse(result))
end
def self.cancel(options = {})
Bitfinex.sanity_check!
Bitfinex::Net.post("/v1/order/cancel", options)
end
end
end
|
module Booties
VERSION = "0.0.2"
end
version bump
module Booties
VERSION = "0.0.3"
end
|
module Bourbon
VERSION = "2.1.1"
end
Bourbon version bump to 2.1.2
module Bourbon
VERSION = "2.1.2"
end
|
require 'msgpack'
module Bud
######## the collection types
# each collection is partitioned into 4:
# - pending holds tuples deferred til the next tick
# - storage holds the "normal" tuples
# - delta holds the delta for rhs's of rules during semi-naive
# - new_delta will hold the lhs tuples currently being produced during s-n
class BudCollection
include Enumerable
attr_accessor :bud_instance
attr_reader :schema, :key_cols, :val_cols, :tabname
attr_reader :storage, :delta, :new_delta
def initialize(name, bud_instance, given_schema=nil, defer_schema=false)
@tabname = name
@bud_instance = bud_instance
init_schema(given_schema) unless given_schema.nil? and defer_schema
init_buffers
end
private
def init_buffers
init_storage
init_pending
init_deltas
end
private
def init_schema(given_schema)
given_schema ||= {[:key]=>[:val]}
@given_schema = given_schema
@schema, @key_cols = parse_schema(given_schema)
@key_colnums = key_cols.map {|k| schema.index(k)}
setup_accessors
end
# The user-specified schema might come in two forms: a hash of Array =>
# Array (key_cols => remaining columns), or simply an Array of columns (if no
# key_cols were specified). Return a pair: [list of columns in entire tuple,
# list of key columns]
private
def parse_schema(given_schema)
if given_schema.respond_to? :keys
raise BudError, "invalid schema for #{tabname}" if given_schema.length != 1
key_cols = given_schema.keys.first
val_cols = given_schema.values.first
else
key_cols = given_schema
val_cols = []
end
schema = key_cols + val_cols
schema.each do |s|
if s.class != Symbol
raise BudError, "Invalid schema element \"#{s}\", type \"#{s.class}\""
end
end
if schema.uniq.length < schema.length
raise BudError, "schema for #{tabname} contains duplicate names"
end
return [schema, key_cols]
end
public
def clone_empty
self.class.new(tabname, bud_instance, @given_schema)
end
public
def val_cols
schema - key_cols
end
# define methods to turn 'table.col' into a [table,col] pair
# e.g. to support something like
# j = join link, path, {link.to => path.from}
private
def setup_accessors
s = @schema
s.each do |colname|
reserved = eval "defined?(#{colname})"
unless (reserved.nil? or
(reserved == "method" and method(colname).arity == -1 and (eval(colname))[0] == self.tabname))
raise BudError, "symbol :#{colname} reserved, cannot be used as column name for #{tabname}"
end
end
# set up schema accessors, which are class methods
m = Module.new do
s.each_with_index do |c, i|
define_method c do
[@tabname, i, c]
end
end
end
self.extend m
# now set up a Module for tuple accessors, which are instance methods
@tupaccess = Module.new do
s.each_with_index do |colname, offset|
define_method colname do
self[offset]
end
end
end
end
# define methods to access tuple attributes by column name
private
def tuple_accessors(tup)
tup.extend @tupaccess
end
public
def null_tuple
tuple_accessors(Array.new(@schema.length))
end
public
def keys
self.map{|t| (0..self.key_cols.length-1).map{|i| t[i]}}
end
public
def values
self.map{|t| (self.key_cols.length..self.schema.length-1).map{|i| t[i]}}
end
public
def inspected
self.map{|t| [t.inspect]}
end
private
def pending_inspected
@pending.map{|t| [t[1].inspect]}
end
public
def pro(&blk)
# to be filled in later for single-node semi-naive iteration
return map(&blk)
end
# By default, all tuples in any rhs are in storage or delta. Tuples in
# new_delta will get transitioned to delta in the next iteration of the
# evaluator (but within the current time tick).
public
def each(&block)
# if @bud_instance.stratum_first_iter
each_from([@storage, @delta], &block)
end
# :nodoc
private
def each_from(bufs, &block)
bufs.each do |b|
b.each_value do |v|
yield v
end
end
end
# :nodoc
public
def each_from_sym(buf_syms, &block)
bufs = buf_syms.map do |s|
case s
when :storage then @storage
when :delta then @delta
when :new_delta then @new_delta
else raise BudError, "bad symbol passed into each_from_sym"
end
end
each_from(bufs, &block)
end
private
def init_storage
@storage = {}
end
private
def init_pending
@pending = {}
end
private
def init_deltas
@delta = {}
@new_delta = {}
end
public
def close
end
public
def has_key?(k)
return false if k.nil? or k.empty? or self[k].nil?
return true
end
# return item with that key
# ---
# assumes that key is in storage or delta, but not both
# is this enforced in do_insert?
public
def [](key)
return @storage[key].nil? ? @delta[key] : @storage[key]
end
public
def include?(tuple)
return true if key_cols.nil? or (key_cols.empty? and length > 0)
return false if tuple.nil? or tuple.empty?
key = key_cols.map{|k| tuple[schema.index(k)]}
return (tuple == self[key])
end
public
def exists?(&block)
if length == 0
return false
elsif not block_given?
return true
else
retval = ((detect{|t| yield t}).nil?) ? false : true
return retval
end
end
private
def raise_pk_error(new, old)
keycols = key_cols.map{|k| old[schema.index(k)]}
raise KeyConstraintError, "Key conflict inserting #{old.inspect} into \"#{tabname}\": existing tuple #{new.inspect}, key_cols = #{keycols.inspect}"
end
private
def prep_tuple(o)
unless o.respond_to?(:length) and o.respond_to?(:[])
raise BudTypeError, "non-indexable type inserted into BudCollection #{self.tabname}: #{o.inspect}"
end
if o.length < schema.length then
# if this tuple has too few fields, pad with nil's
old = o.clone
(o.length..schema.length-1).each{|i| o << nil}
# puts "in #{@tabname}, converted #{old.inspect} to #{o.inspect}"
elsif o.length > schema.length then
# if this tuple has more fields than usual, bundle up the
# extras into an array
o = (0..(schema.length - 1)).map{|c| o[c]} << (schema.length..(o.length - 1)).map{|c| o[c]}
end
return o
end
private
def do_insert(o, store)
# return if o.respond_to?(:empty?) and o.empty?
return if o.nil? # silently ignore nils resulting from map predicates failing
o = prep_tuple(o)
keycols = @key_colnums.map{|i| o[i]}
# XXX should this be self[keycols?]
# but what about if we're not calling on store = @storage?
# probably pk should be tested by the caller of this routing
# XXX please check in some key violation tests!!
old = store[keycols]
if old.nil?
store[keycols] = tuple_accessors(o)
else
raise_pk_error(o, old) unless old == o
end
end
public
def insert(o)
# puts "insert: #{o.inspect} into #{tabname}"
do_insert(o, @storage)
end
alias << insert
private
def check_enumerable(o)
unless (o.nil? or o.class < Enumerable) and o.respond_to? 'each'
raise BudTypeError, "Attempt to merge non-enumerable type into BudCollection"
end
end
# Assign self a schema, by hook or by crook. If o is schemaless *and* empty, will
# leave @schema as is.
private
def establish_schema(o)
# use o's schema if available
deduce_schema(o) if @schema.nil?
# else use arity of first tuple of o
fit_schema(o.first.size) if @schema.nil? and not o.first.nil?
return @schema
end
# Copy over the schema from o if available
private
def deduce_schema(o)
if @schema.nil? and o.class <= Bud::BudCollection and not o.schema.nil?
# must have been initialized with defer_schema==true. take schema from rhs
init_schema(o.schema)
end
# returns old state of @schema (nil) if nothing available
return @schema
end
# manufacture schema of the form [:c0, :c1, ...] with width = arity
private
def fit_schema(arity)
# rhs is schemaless. create schema from first tuple merged
init_schema((0..arity-1).map{|indx| ("c"+indx.to_s).to_sym})
return @schema
end
# instantaneously merge items from collection into self
public
def merge(o, buf=@new_delta)
check_enumerable(o)
establish_schema(o) if @schema.nil?
delta = o.map do |i|
next if i.nil? or i == []
i = prep_tuple(i)
key_vals = @key_colnums.map{|k| i[k]}
if (old = self[key_vals])
raise_pk_error(i, old) if old != i
elsif (oldnew = self.new_delta[key_vals])
raise_pk_error(i, oldnew) if oldnew != i
else
buf[key_vals] = tuple_accessors(i)
end
end
return self
end
alias <= merge
public
def pending_merge(o)
check_enumerable(o)
deduce_schema(o)
o.each {|i| do_insert(i, @pending)}
return self
end
# merge items from collection into self at the end of this timestep
public
superator "<+" do |o|
pending_merge o
end
# Called at the end of each time step: prepare the collection for the next
# timestep.
public
def tick
@storage = @pending
@pending = {}
raise BudError, "orphaned tuples in @delta for #{@tabname}" unless @delta.empty?
raise BudError, "orphaned tuples in @new_delta for #{@tabname}" unless @new_delta.empty?
end
# move deltas to storage, and new_deltas to deltas.
public
def tick_deltas
# assertion: intersect(@storage, @delta) == nil
@storage.merge!(@delta)
@delta = @new_delta
@new_delta = {}
end
private
def method_missing(sym, *args, &block)
@storage.send sym, *args, &block
end
######## aggs
# a generalization of argmin/argmax to arbitrary exemplary aggregates.
# for each distinct value in the grouping key columns, return the item in that group
# that has the value of the exemplary aggregate "aggname"
public
def argagg(aggname, gbkey_cols, collection)
agg = bud_instance.send(aggname, nil)[0]
raise BudError, "#{aggname} not declared exemplary" unless agg.class <= Bud::ArgExemplary
keynames = gbkey_cols.map do |k|
if k.class == Symbol
k.to_s
else
k[2]
end
end
if collection.class == Symbol
colnum = self.send(collection.to_s)[1]
else
colnum = collection[1]
end
tups = self.inject({}) do |memo,p|
pkey_cols = keynames.map{|n| p.send(n.to_sym)}
if memo[pkey_cols].nil?
memo[pkey_cols] = {:agg=>agg.send(:init, p[colnum]), :tups => [p]}
else
newval = agg.send(:trans, memo[pkey_cols][:agg], p[colnum])
if memo[pkey_cols][:agg] == newval
if agg.send(:tie, memo[pkey_cols][:agg], p[colnum])
memo[pkey_cols][:tups] << p
end
else
memo[pkey_cols] = {:agg=>newval, :tups=>[p]}
end
end
memo
end
finals = []
outs = tups.each_value do |t|
ties = t[:tups].map do |tie|
finals << tie
end
end
# merge directly into retval.storage, so that the temp tuples get picked up
# by the lhs of the rule
retval = BudScratch.new('argagg_temp', bud_instance, @given_schema)
retval.merge(finals, retval.storage)
end
# for each distinct value in the grouping key columns, return the item in that group
# that has the minimum value of the attribute col
public
def argmin(gbkey_cols, col)
argagg(:min, gbkey_cols, col)
end
# for each distinct value in the grouping key columns, return the item in that group
# that has the maximum value of the attribute col
public
def argmax(gbkey_cols, col)
argagg(:max, gbkey_cols, col)
end
# form a collection containing all pairs of items in self and items in collection
public
def *(collection)
bud_instance.join([self, collection])
end
# currently support two options for column ref syntax -- :colname or table.colname
public
def group(key_cols, *aggpairs)
key_cols = [] if key_cols.nil?
keynames = key_cols.map do |k|
if k.class == Symbol
k
elsif k[2] and k[2].class == Symbol
k[2]
else
raise Bud::CompileError, "Invalid grouping key"
end
end
aggcolsdups = aggpairs.map{|ap| ap[0].class.name.split("::").last}
aggcols = []
aggcolsdups.each_with_index do |n, i|
aggcols << "#{n.downcase}_#{i}".to_sym
end
tups = self.inject({}) do |memo, p|
pkey_cols = keynames.map{|n| p.send(n)}
memo[pkey_cols] = [] if memo[pkey_cols].nil?
aggpairs.each_with_index do |ap, i|
agg = ap[0]
if ap[1].class == Symbol
colnum = ap[1].nil? ? nil : self.send(ap[1].to_s)[1]
else
colnum = ap[1].nil? ? nil : ap[1][1]
end
colval = colnum.nil? ? nil : p[colnum]
if memo[pkey_cols][i].nil?
memo[pkey_cols][i] = agg.send(:init, colval)
else
memo[pkey_cols][i] = agg.send(:trans, memo[pkey_cols][i], colval)
end
end
memo
end
result = tups.inject([]) do |memo, t|
finals = []
aggpairs.each_with_index do |ap, i|
finals << ap[0].send(:final, t[1][i])
end
memo << t[0] + finals
end
if block_given?
result.map{|r| yield r}
else
# merge directly into retval.storage, so that the temp tuples get picked up
# by the lhs of the rule
if aggcols.empty?
schema = keynames
else
schema = { keynames => aggcols }
end
retval = BudScratch.new('temp_group', bud_instance, schema)
retval.merge(result, retval.storage)
end
end
alias reduce inject
# methods that work on nested collections (resulting from joins)
# currently supports two options for equijoin predicates:
# general form: an array of arrays capturing a conjunction of equiv. classes
# [[table1.col1, table2.col2, table3.col3], [table1.col2, table2.col3]]
# common form: a hash capturing equality of a column on left with one on right.
# :col1 => :col2 (same as lefttable.col1 => righttable.col2)
public
def pairs(*preds, &blk)
unless preds.nil?
@localpreds = disambiguate_preds(preds)
canonicalize_localpreds(@rels)
end
blk.nil? ? self : map(&blk)
end
alias combos pairs
public
def matches(&blk)
preds = BudJoin::natural_preds(@bud_instance, @rels)
pairs(*preds, &blk)
end
public
def lefts(*preds)
@localpreds = disambiguate_preds(preds)
map{ |l,r| l }
end
public
def rights(*preds)
@localpreds = disambiguate_preds(preds)
map{ |l,r| r }
end
private
def disambiguate_preds(preds)
if preds.size == 1 and preds[0].class <= Hash
predarray = preds[0].map do |k,v|
if k.class != v.class
raise Bud::CompileError, "inconsistent attribute ref style #{k.inspect} => #{v.inspect}"
elsif k.class <= Array
[k,v]
elsif k.class <= Symbol
if @origrels and @origrels.length == 2
[find_attr_match(k,@origrels[0]), find_attr_match(v,@origrels[1])]
else
[find_attr_match(k), find_attr_match(v)]
end
else
raise Bud::CompileError, "invalid attribute ref in #{k.inspect} => #{v.inspect}"
end
end
return decomp_preds(*predarray)
else
return decomp_preds(*preds)
end
end
# find element in @origrels that contains this aname method
# if 2nd arg is non-nil, only check that collection.
# after found, return the result of invoking aname from chosen collection
private
def find_attr_match(aname, rel=nil)
dorels = (rel.nil? ? @origrels : [rel])
match = nil
dorels.each do |r|
match ||= r if r.respond_to?(aname)
if r.respond_to?(aname) and match != r
raise Bud::CompileError, "ambiguous attribute :#{aname} in both #{match.tabname} and #{r.tabname}"
end
end
if match.nil?
raise Bud::CompileError, "attribute :#{aname} not found in any of #{dorels.map{|t| t.tabname}.inspect}"
end
match.send(aname)
end
private
def decomp_preds(*preds)
# decompose each pred into a binary pred
return nil if preds.nil? or preds.empty? or preds == [nil]
newpreds = []
preds.each do |p|
p.each_with_index do |c, i|
newpreds << [p[i], p[i+1]] unless p[i+1].nil?
end
end
newpreds
end
private
def canonicalize_localpreds(rellist)
return if @localpreds.nil?
@localpreds.each do |p|
if p[1][0] == rellist[0].tabname
@localpreds.delete(p)
@localpreds << [p[1], p[0]]
end
end
end
end
class BudScratch < BudCollection
end
class BudTemp < BudCollection
end
class BudChannel < BudCollection
attr_reader :locspec_idx
def initialize(name, bud_instance, given_schema=nil)
given_schema ||= [:@address, :val]
the_schema, the_key_cols = parse_schema(given_schema)
the_val_cols = the_schema - the_key_cols
@locspec_idx = remove_at_sign!(the_key_cols)
@locspec_idx = remove_at_sign!(the_schema) if @locspec_idx.nil?
# If @locspec_idx is still nil, this is a loopback channel
# We mutate the hash key above, so we need to recreate the hash
# XXX: ugh, hacky
if given_schema.respond_to? :keys
given_schema = {the_key_cols => the_val_cols}
end
super(name, bud_instance, given_schema)
end
private
def remove_at_sign!(cols)
i = cols.find_index {|c| c.to_s[0].chr == '@'}
unless i.nil?
cols[i] = cols[i].to_s.delete('@').to_sym
end
return i
end
private
def split_locspec(l)
lsplit = l.split(':')
lsplit[1] = lsplit[1].to_i
return lsplit
end
# form a copy of this collection with no items in it
private
def clone_empty
retval = super
retval.locspec_idx = @locspec_idx
retval
end
public
def tick
@storage = {}
# Note that we do not clear @pending here: if the user inserted into the
# channel manually (e.g., via <~ from inside a sync_do block), we send the
# message at the end of the current tick.
end
public
def flush
ip = @bud_instance.ip
port = @bud_instance.port
each_from([@pending]) do |t|
if @locspec_idx.nil?
the_locspec = [ip, port]
else
begin
the_locspec = split_locspec(t[@locspec_idx])
raise BudError, "bad locspec" if the_locspec[0].nil? or the_locspec[1].nil? or the_locspec[0] == '' or the_locspec[1] == ''
rescue
puts "bad locspec '#{t[@locspec_idx]}', channel '#{@tabname}', skipping: #{t.inspect}"
next
end
end
@bud_instance.dsock.send_datagram([@tabname, t].to_msgpack, the_locspec[0], the_locspec[1])
end
@pending.clear
end
public
def payloads
if schema.size > 2
# need to bundle up each tuple's non-locspec fields into an array
retval = case @locspec_idx
when 0 then self.map{|t| t[1..(t.size-1)]}
when (t.size - 1) then self.map{|t| t[0..(t.size-2)]}
else self.map{|t| t[0..(@locspec_idx-1)] + t[@locspec_idx+1..(t.size-1)]}
end
else
# just return each tuple's non-locspec field value
retval = self.pro{|t| t[(@locspec_idx == 0) ? 1 : 0]}
end
return retval
end
superator "<~" do |o|
pending_merge o
end
superator "<+" do |o|
raise BudError, "Illegal use of <+ with channel '#{@tabname}' on left"
end
public
def <=(o)
raise BudError, "Illegal use of <= with channel '#{@tabname}' on left"
end
end
class BudTerminal < BudCollection
def initialize(name, given_schema, bud_instance, prompt=false)
super(name, bud_instance, given_schema)
@prompt = prompt
end
public
def start_stdin_reader
# XXX: Ugly hack. Rather than sending terminal data to EM via UDP,
# we should add the terminal file descriptor to the EM event loop.
@reader = Thread.new do
begin
while true
$stdout.print("#{tabname} > ") if @prompt
s = $stdin.gets
break if s.nil? # Hit EOF
s = s.chomp if s
tup = [s]
ip = @bud_instance.ip
port = @bud_instance.port
EventMachine::schedule do
socket = EventMachine::open_datagram_socket("127.0.0.1", 0)
socket.send_datagram([tabname, tup].to_msgpack, ip, port)
end
end
rescue
puts "terminal reader thread failed: #{$!}"
print $!.backtrace.join("\n")
exit
end
end
end
public
def flush
@pending.each do |p|
$stdout.puts p[0]
end
@pending = {}
end
public
def tick
@storage = {}
raise BudError unless @pending.empty?
end
public
def merge(o)
raise BudError, "no synchronous accumulation into terminal; use <~"
end
public
def <=(o)
merge(o)
end
superator "<~" do |o|
pending_merge(o)
end
end
class BudPeriodic < BudCollection
end
class BudTable < BudCollection
def initialize(name, bud_instance, given_schema)
super(name, bud_instance, given_schema)
@to_delete = []
end
public
def tick
@to_delete.each do |tuple|
keycols = @key_colnums.map{|k| tuple[k]}
if @storage[keycols] == tuple
@storage.delete keycols
end
end
@storage.merge! @pending
@to_delete = []
@pending = {}
end
superator "<-" do |o|
o.each do |tuple|
next if tuple.nil?
tuple = prep_tuple(tuple)
@to_delete << tuple
end
end
end
class BudReadOnly < BudScratch
superator "<+" do |o|
raise BudError, "Illegal use of <+ with read-only collection '#{@tabname}' on left"
end
public
def merge
raise BudError, "Illegal use of <= with read-only collection '#{@tabname}' on left"
end
end
class BudFileReader < BudReadOnly
def initialize(name, filename, delimiter, bud_instance)
super(name, bud_instance, {[:lineno] => [:text]})
@filename = filename
@storage = {}
# NEEDS A TRY/RESCUE BLOCK
@fd = File.open(@filename, "r")
@linenum = 0
end
# :nodoc
public
def each(&block)
while (l = @fd.gets)
t = tuple_accessors([@linenum, l.strip])
@linenum += 1
yield t
end
end
end
end
module Enumerable
public
def rename(new_tabname, new_schema=nil)
budi = (respond_to?(:bud_instance)) ? bud_instance : nil
if new_schema.nil? and respond_to?(:schema)
new_schema = schema
end
scr = Bud::BudScratch.new(new_tabname.to_s, budi, new_schema)
scr.merge(self, scr.storage)
scr
end
end
use pro rather than map in convenience methods
require 'msgpack'
module Bud
######## the collection types
# each collection is partitioned into 4:
# - pending holds tuples deferred til the next tick
# - storage holds the "normal" tuples
# - delta holds the delta for rhs's of rules during semi-naive
# - new_delta will hold the lhs tuples currently being produced during s-n
class BudCollection
include Enumerable
attr_accessor :bud_instance
attr_reader :schema, :key_cols, :val_cols, :tabname
attr_reader :storage, :delta, :new_delta
def initialize(name, bud_instance, given_schema=nil, defer_schema=false)
@tabname = name
@bud_instance = bud_instance
init_schema(given_schema) unless given_schema.nil? and defer_schema
init_buffers
end
private
def init_buffers
init_storage
init_pending
init_deltas
end
private
def init_schema(given_schema)
given_schema ||= {[:key]=>[:val]}
@given_schema = given_schema
@schema, @key_cols = parse_schema(given_schema)
@key_colnums = key_cols.map {|k| schema.index(k)}
setup_accessors
end
# The user-specified schema might come in two forms: a hash of Array =>
# Array (key_cols => remaining columns), or simply an Array of columns (if no
# key_cols were specified). Return a pair: [list of columns in entire tuple,
# list of key columns]
private
def parse_schema(given_schema)
if given_schema.respond_to? :keys
raise BudError, "invalid schema for #{tabname}" if given_schema.length != 1
key_cols = given_schema.keys.first
val_cols = given_schema.values.first
else
key_cols = given_schema
val_cols = []
end
schema = key_cols + val_cols
schema.each do |s|
if s.class != Symbol
raise BudError, "Invalid schema element \"#{s}\", type \"#{s.class}\""
end
end
if schema.uniq.length < schema.length
raise BudError, "schema for #{tabname} contains duplicate names"
end
return [schema, key_cols]
end
public
def clone_empty
self.class.new(tabname, bud_instance, @given_schema)
end
public
def val_cols
schema - key_cols
end
# define methods to turn 'table.col' into a [table,col] pair
# e.g. to support something like
# j = join link, path, {link.to => path.from}
private
def setup_accessors
s = @schema
s.each do |colname|
reserved = eval "defined?(#{colname})"
unless (reserved.nil? or
(reserved == "method" and method(colname).arity == -1 and (eval(colname))[0] == self.tabname))
raise BudError, "symbol :#{colname} reserved, cannot be used as column name for #{tabname}"
end
end
# set up schema accessors, which are class methods
m = Module.new do
s.each_with_index do |c, i|
define_method c do
[@tabname, i, c]
end
end
end
self.extend m
# now set up a Module for tuple accessors, which are instance methods
@tupaccess = Module.new do
s.each_with_index do |colname, offset|
define_method colname do
self[offset]
end
end
end
end
# define methods to access tuple attributes by column name
private
def tuple_accessors(tup)
tup.extend @tupaccess
end
public
def null_tuple
tuple_accessors(Array.new(@schema.length))
end
public
def keys
self.pro{|t| (0..self.key_cols.length-1).map{|i| t[i]}}
end
public
def values
self.pro{|t| (self.key_cols.length..self.schema.length-1).map{|i| t[i]}}
end
public
def inspected
self.pro{|t| [t.inspect]}
end
private
def pending_inspected
@pending.map{|t| [t[1].inspect]}
end
public
def pro(&blk)
# to be filled in later for single-node semi-naive iteration
return map(&blk)
end
# By default, all tuples in any rhs are in storage or delta. Tuples in
# new_delta will get transitioned to delta in the next iteration of the
# evaluator (but within the current time tick).
public
def each(&block)
# if @bud_instance.stratum_first_iter
each_from([@storage, @delta], &block)
end
# :nodoc
private
def each_from(bufs, &block)
bufs.each do |b|
b.each_value do |v|
yield v
end
end
end
# :nodoc
public
def each_from_sym(buf_syms, &block)
bufs = buf_syms.map do |s|
case s
when :storage then @storage
when :delta then @delta
when :new_delta then @new_delta
else raise BudError, "bad symbol passed into each_from_sym"
end
end
each_from(bufs, &block)
end
private
def init_storage
@storage = {}
end
private
def init_pending
@pending = {}
end
private
def init_deltas
@delta = {}
@new_delta = {}
end
public
def close
end
public
def has_key?(k)
return false if k.nil? or k.empty? or self[k].nil?
return true
end
# return item with that key
# ---
# assumes that key is in storage or delta, but not both
# is this enforced in do_insert?
public
def [](key)
return @storage[key].nil? ? @delta[key] : @storage[key]
end
public
def include?(tuple)
return true if key_cols.nil? or (key_cols.empty? and length > 0)
return false if tuple.nil? or tuple.empty?
key = key_cols.map{|k| tuple[schema.index(k)]}
return (tuple == self[key])
end
public
def exists?(&block)
if length == 0
return false
elsif not block_given?
return true
else
retval = ((detect{|t| yield t}).nil?) ? false : true
return retval
end
end
private
def raise_pk_error(new, old)
keycols = key_cols.map{|k| old[schema.index(k)]}
raise KeyConstraintError, "Key conflict inserting #{old.inspect} into \"#{tabname}\": existing tuple #{new.inspect}, key_cols = #{keycols.inspect}"
end
private
def prep_tuple(o)
unless o.respond_to?(:length) and o.respond_to?(:[])
raise BudTypeError, "non-indexable type inserted into BudCollection #{self.tabname}: #{o.inspect}"
end
if o.length < schema.length then
# if this tuple has too few fields, pad with nil's
old = o.clone
(o.length..schema.length-1).each{|i| o << nil}
# puts "in #{@tabname}, converted #{old.inspect} to #{o.inspect}"
elsif o.length > schema.length then
# if this tuple has more fields than usual, bundle up the
# extras into an array
o = (0..(schema.length - 1)).map{|c| o[c]} << (schema.length..(o.length - 1)).map{|c| o[c]}
end
return o
end
private
def do_insert(o, store)
# return if o.respond_to?(:empty?) and o.empty?
return if o.nil? # silently ignore nils resulting from map predicates failing
o = prep_tuple(o)
keycols = @key_colnums.map{|i| o[i]}
# XXX should this be self[keycols?]
# but what about if we're not calling on store = @storage?
# probably pk should be tested by the caller of this routing
# XXX please check in some key violation tests!!
old = store[keycols]
if old.nil?
store[keycols] = tuple_accessors(o)
else
raise_pk_error(o, old) unless old == o
end
end
public
def insert(o)
# puts "insert: #{o.inspect} into #{tabname}"
do_insert(o, @storage)
end
alias << insert
private
def check_enumerable(o)
unless (o.nil? or o.class < Enumerable) and o.respond_to? 'each'
raise BudTypeError, "Attempt to merge non-enumerable type into BudCollection"
end
end
# Assign self a schema, by hook or by crook. If o is schemaless *and* empty, will
# leave @schema as is.
private
def establish_schema(o)
# use o's schema if available
deduce_schema(o) if @schema.nil?
# else use arity of first tuple of o
fit_schema(o.first.size) if @schema.nil? and not o.first.nil?
return @schema
end
# Copy over the schema from o if available
private
def deduce_schema(o)
if @schema.nil? and o.class <= Bud::BudCollection and not o.schema.nil?
# must have been initialized with defer_schema==true. take schema from rhs
init_schema(o.schema)
end
# returns old state of @schema (nil) if nothing available
return @schema
end
# manufacture schema of the form [:c0, :c1, ...] with width = arity
private
def fit_schema(arity)
# rhs is schemaless. create schema from first tuple merged
init_schema((0..arity-1).map{|indx| ("c"+indx.to_s).to_sym})
return @schema
end
# instantaneously merge items from collection into self
public
def merge(o, buf=@new_delta)
check_enumerable(o)
establish_schema(o) if @schema.nil?
delta = o.map do |i|
next if i.nil? or i == []
i = prep_tuple(i)
key_vals = @key_colnums.map{|k| i[k]}
if (old = self[key_vals])
raise_pk_error(i, old) if old != i
elsif (oldnew = self.new_delta[key_vals])
raise_pk_error(i, oldnew) if oldnew != i
else
buf[key_vals] = tuple_accessors(i)
end
end
return self
end
alias <= merge
public
def pending_merge(o)
check_enumerable(o)
deduce_schema(o)
o.each {|i| do_insert(i, @pending)}
return self
end
# merge items from collection into self at the end of this timestep
public
superator "<+" do |o|
pending_merge o
end
# Called at the end of each time step: prepare the collection for the next
# timestep.
public
def tick
@storage = @pending
@pending = {}
raise BudError, "orphaned tuples in @delta for #{@tabname}" unless @delta.empty?
raise BudError, "orphaned tuples in @new_delta for #{@tabname}" unless @new_delta.empty?
end
# move deltas to storage, and new_deltas to deltas.
public
def tick_deltas
# assertion: intersect(@storage, @delta) == nil
@storage.merge!(@delta)
@delta = @new_delta
@new_delta = {}
end
private
def method_missing(sym, *args, &block)
@storage.send sym, *args, &block
end
######## aggs
# a generalization of argmin/argmax to arbitrary exemplary aggregates.
# for each distinct value in the grouping key columns, return the item in that group
# that has the value of the exemplary aggregate "aggname"
public
def argagg(aggname, gbkey_cols, collection)
agg = bud_instance.send(aggname, nil)[0]
raise BudError, "#{aggname} not declared exemplary" unless agg.class <= Bud::ArgExemplary
keynames = gbkey_cols.map do |k|
if k.class == Symbol
k.to_s
else
k[2]
end
end
if collection.class == Symbol
colnum = self.send(collection.to_s)[1]
else
colnum = collection[1]
end
tups = self.inject({}) do |memo,p|
pkey_cols = keynames.map{|n| p.send(n.to_sym)}
if memo[pkey_cols].nil?
memo[pkey_cols] = {:agg=>agg.send(:init, p[colnum]), :tups => [p]}
else
newval = agg.send(:trans, memo[pkey_cols][:agg], p[colnum])
if memo[pkey_cols][:agg] == newval
if agg.send(:tie, memo[pkey_cols][:agg], p[colnum])
memo[pkey_cols][:tups] << p
end
else
memo[pkey_cols] = {:agg=>newval, :tups=>[p]}
end
end
memo
end
finals = []
outs = tups.each_value do |t|
ties = t[:tups].map do |tie|
finals << tie
end
end
# merge directly into retval.storage, so that the temp tuples get picked up
# by the lhs of the rule
retval = BudScratch.new('argagg_temp', bud_instance, @given_schema)
retval.merge(finals, retval.storage)
end
# for each distinct value in the grouping key columns, return the item in that group
# that has the minimum value of the attribute col
public
def argmin(gbkey_cols, col)
argagg(:min, gbkey_cols, col)
end
# for each distinct value in the grouping key columns, return the item in that group
# that has the maximum value of the attribute col
public
def argmax(gbkey_cols, col)
argagg(:max, gbkey_cols, col)
end
# form a collection containing all pairs of items in self and items in collection
public
def *(collection)
bud_instance.join([self, collection])
end
# currently support two options for column ref syntax -- :colname or table.colname
public
def group(key_cols, *aggpairs)
key_cols = [] if key_cols.nil?
keynames = key_cols.map do |k|
if k.class == Symbol
k
elsif k[2] and k[2].class == Symbol
k[2]
else
raise Bud::CompileError, "Invalid grouping key"
end
end
aggcolsdups = aggpairs.map{|ap| ap[0].class.name.split("::").last}
aggcols = []
aggcolsdups.each_with_index do |n, i|
aggcols << "#{n.downcase}_#{i}".to_sym
end
tups = self.inject({}) do |memo, p|
pkey_cols = keynames.map{|n| p.send(n)}
memo[pkey_cols] = [] if memo[pkey_cols].nil?
aggpairs.each_with_index do |ap, i|
agg = ap[0]
if ap[1].class == Symbol
colnum = ap[1].nil? ? nil : self.send(ap[1].to_s)[1]
else
colnum = ap[1].nil? ? nil : ap[1][1]
end
colval = colnum.nil? ? nil : p[colnum]
if memo[pkey_cols][i].nil?
memo[pkey_cols][i] = agg.send(:init, colval)
else
memo[pkey_cols][i] = agg.send(:trans, memo[pkey_cols][i], colval)
end
end
memo
end
result = tups.inject([]) do |memo, t|
finals = []
aggpairs.each_with_index do |ap, i|
finals << ap[0].send(:final, t[1][i])
end
memo << t[0] + finals
end
if block_given?
result.map{|r| yield r}
else
# merge directly into retval.storage, so that the temp tuples get picked up
# by the lhs of the rule
if aggcols.empty?
schema = keynames
else
schema = { keynames => aggcols }
end
retval = BudScratch.new('temp_group', bud_instance, schema)
retval.merge(result, retval.storage)
end
end
alias reduce inject
# methods that work on nested collections (resulting from joins)
# currently supports two options for equijoin predicates:
# general form: an array of arrays capturing a conjunction of equiv. classes
# [[table1.col1, table2.col2, table3.col3], [table1.col2, table2.col3]]
# common form: a hash capturing equality of a column on left with one on right.
# :col1 => :col2 (same as lefttable.col1 => righttable.col2)
public
def pairs(*preds, &blk)
unless preds.nil?
@localpreds = disambiguate_preds(preds)
canonicalize_localpreds(@rels)
end
blk.nil? ? self : map(&blk)
end
alias combos pairs
public
def matches(&blk)
preds = BudJoin::natural_preds(@bud_instance, @rels)
pairs(*preds, &blk)
end
public
def lefts(*preds)
@localpreds = disambiguate_preds(preds)
map{ |l,r| l }
end
public
def rights(*preds)
@localpreds = disambiguate_preds(preds)
map{ |l,r| r }
end
private
def disambiguate_preds(preds)
if preds.size == 1 and preds[0].class <= Hash
predarray = preds[0].map do |k,v|
if k.class != v.class
raise Bud::CompileError, "inconsistent attribute ref style #{k.inspect} => #{v.inspect}"
elsif k.class <= Array
[k,v]
elsif k.class <= Symbol
if @origrels and @origrels.length == 2
[find_attr_match(k,@origrels[0]), find_attr_match(v,@origrels[1])]
else
[find_attr_match(k), find_attr_match(v)]
end
else
raise Bud::CompileError, "invalid attribute ref in #{k.inspect} => #{v.inspect}"
end
end
return decomp_preds(*predarray)
else
return decomp_preds(*preds)
end
end
# find element in @origrels that contains this aname method
# if 2nd arg is non-nil, only check that collection.
# after found, return the result of invoking aname from chosen collection
private
def find_attr_match(aname, rel=nil)
dorels = (rel.nil? ? @origrels : [rel])
match = nil
dorels.each do |r|
match ||= r if r.respond_to?(aname)
if r.respond_to?(aname) and match != r
raise Bud::CompileError, "ambiguous attribute :#{aname} in both #{match.tabname} and #{r.tabname}"
end
end
if match.nil?
raise Bud::CompileError, "attribute :#{aname} not found in any of #{dorels.map{|t| t.tabname}.inspect}"
end
match.send(aname)
end
private
def decomp_preds(*preds)
# decompose each pred into a binary pred
return nil if preds.nil? or preds.empty? or preds == [nil]
newpreds = []
preds.each do |p|
p.each_with_index do |c, i|
newpreds << [p[i], p[i+1]] unless p[i+1].nil?
end
end
newpreds
end
private
def canonicalize_localpreds(rellist)
return if @localpreds.nil?
@localpreds.each do |p|
if p[1][0] == rellist[0].tabname
@localpreds.delete(p)
@localpreds << [p[1], p[0]]
end
end
end
end
class BudScratch < BudCollection
end
class BudTemp < BudCollection
end
class BudChannel < BudCollection
attr_reader :locspec_idx
def initialize(name, bud_instance, given_schema=nil)
given_schema ||= [:@address, :val]
the_schema, the_key_cols = parse_schema(given_schema)
the_val_cols = the_schema - the_key_cols
@locspec_idx = remove_at_sign!(the_key_cols)
@locspec_idx = remove_at_sign!(the_schema) if @locspec_idx.nil?
# If @locspec_idx is still nil, this is a loopback channel
# We mutate the hash key above, so we need to recreate the hash
# XXX: ugh, hacky
if given_schema.respond_to? :keys
given_schema = {the_key_cols => the_val_cols}
end
super(name, bud_instance, given_schema)
end
private
def remove_at_sign!(cols)
i = cols.find_index {|c| c.to_s[0].chr == '@'}
unless i.nil?
cols[i] = cols[i].to_s.delete('@').to_sym
end
return i
end
private
def split_locspec(l)
lsplit = l.split(':')
lsplit[1] = lsplit[1].to_i
return lsplit
end
# form a copy of this collection with no items in it
private
def clone_empty
retval = super
retval.locspec_idx = @locspec_idx
retval
end
public
def tick
@storage = {}
# Note that we do not clear @pending here: if the user inserted into the
# channel manually (e.g., via <~ from inside a sync_do block), we send the
# message at the end of the current tick.
end
public
def flush
ip = @bud_instance.ip
port = @bud_instance.port
each_from([@pending]) do |t|
if @locspec_idx.nil?
the_locspec = [ip, port]
else
begin
the_locspec = split_locspec(t[@locspec_idx])
raise BudError, "bad locspec" if the_locspec[0].nil? or the_locspec[1].nil? or the_locspec[0] == '' or the_locspec[1] == ''
rescue
puts "bad locspec '#{t[@locspec_idx]}', channel '#{@tabname}', skipping: #{t.inspect}"
next
end
end
@bud_instance.dsock.send_datagram([@tabname, t].to_msgpack, the_locspec[0], the_locspec[1])
end
@pending.clear
end
public
def payloads
if schema.size > 2
# need to bundle up each tuple's non-locspec fields into an array
retval = case @locspec_idx
when 0 then self.pro{|t| t[1..(t.size-1)]}
when (t.size - 1) then self.pro{|t| t[0..(t.size-2)]}
else self.pro{|t| t[0..(@locspec_idx-1)] + t[@locspec_idx+1..(t.size-1)]}
end
else
# just return each tuple's non-locspec field value
retval = self.pro{|t| t[(@locspec_idx == 0) ? 1 : 0]}
end
return retval
end
superator "<~" do |o|
pending_merge o
end
superator "<+" do |o|
raise BudError, "Illegal use of <+ with channel '#{@tabname}' on left"
end
public
def <=(o)
raise BudError, "Illegal use of <= with channel '#{@tabname}' on left"
end
end
class BudTerminal < BudCollection
def initialize(name, given_schema, bud_instance, prompt=false)
super(name, bud_instance, given_schema)
@prompt = prompt
end
public
def start_stdin_reader
# XXX: Ugly hack. Rather than sending terminal data to EM via UDP,
# we should add the terminal file descriptor to the EM event loop.
@reader = Thread.new do
begin
while true
$stdout.print("#{tabname} > ") if @prompt
s = $stdin.gets
break if s.nil? # Hit EOF
s = s.chomp if s
tup = [s]
ip = @bud_instance.ip
port = @bud_instance.port
EventMachine::schedule do
socket = EventMachine::open_datagram_socket("127.0.0.1", 0)
socket.send_datagram([tabname, tup].to_msgpack, ip, port)
end
end
rescue
puts "terminal reader thread failed: #{$!}"
print $!.backtrace.join("\n")
exit
end
end
end
public
def flush
@pending.each do |p|
$stdout.puts p[0]
end
@pending = {}
end
public
def tick
@storage = {}
raise BudError unless @pending.empty?
end
public
def merge(o)
raise BudError, "no synchronous accumulation into terminal; use <~"
end
public
def <=(o)
merge(o)
end
superator "<~" do |o|
pending_merge(o)
end
end
class BudPeriodic < BudCollection
end
class BudTable < BudCollection
def initialize(name, bud_instance, given_schema)
super(name, bud_instance, given_schema)
@to_delete = []
end
public
def tick
@to_delete.each do |tuple|
keycols = @key_colnums.map{|k| tuple[k]}
if @storage[keycols] == tuple
@storage.delete keycols
end
end
@storage.merge! @pending
@to_delete = []
@pending = {}
end
superator "<-" do |o|
o.each do |tuple|
next if tuple.nil?
tuple = prep_tuple(tuple)
@to_delete << tuple
end
end
end
class BudReadOnly < BudScratch
superator "<+" do |o|
raise BudError, "Illegal use of <+ with read-only collection '#{@tabname}' on left"
end
public
def merge
raise BudError, "Illegal use of <= with read-only collection '#{@tabname}' on left"
end
end
class BudFileReader < BudReadOnly
def initialize(name, filename, delimiter, bud_instance)
super(name, bud_instance, {[:lineno] => [:text]})
@filename = filename
@storage = {}
# NEEDS A TRY/RESCUE BLOCK
@fd = File.open(@filename, "r")
@linenum = 0
end
# :nodoc
public
def each(&block)
while (l = @fd.gets)
t = tuple_accessors([@linenum, l.strip])
@linenum += 1
yield t
end
end
end
end
module Enumerable
public
def rename(new_tabname, new_schema=nil)
budi = (respond_to?(:bud_instance)) ? bud_instance : nil
if new_schema.nil? and respond_to?(:schema)
new_schema = schema
end
scr = Bud::BudScratch.new(new_tabname.to_s, budi, new_schema)
scr.merge(self, scr.storage)
scr
end
end
|
require 'rubygems'
require 'naether'
require "#{File.dirname(__FILE__)}/resolver/java"
module Buildr
module Resolver
class << self
def naether
Buildr::Resolver::Java.instance.naether
end
# Resolve dependencies for an array of dependencies
#
# excludes is an array of dependencies to exclude
# repos is an array of {:url => '', :username => '', :password => '' } of additional remote repos
# with authentication
def resolve( dependencies, excludes=[], repos = [] )
if Buildr.repositories.remote.size > 0
naether.clear_remote_repositories
Buildr.repositories.remote.each do |repo|
naether.add_remote_repository( repo )
end
unless repos.nil?
unless repos.is_a? Array
repos = [repos]
end
repos.each do |repo|
naether.add_remote_repository( repo[:url], repo[:username], repo[:password] )
end
end
end
naether.local_repo_path = Repositories.instance.local
naether.dependencies = dependencies
naether.resolve_dependencies( false )
dependences = naether.dependencies
unless excludes.nil?
unless excludes.is_a? Array
excludes = [excludes]
end
dependences.delete_if do |dep|
excludes.select { |exclude| dep.to_s =~ /^#{exclude}/ }.size > 0
end
end
end
def deploy_artifact( notation, file_path, url, opts = {} )
naether.deploy_artifact( notation, file_path, url, opts )
end
def write_pom( notation, file_path, dependencies, excludes=[], repos = [] )
naether.write_pom( notation, file_path )
end
end
end
end
remove unused params
require 'rubygems'
require 'naether'
require "#{File.dirname(__FILE__)}/resolver/java"
module Buildr
module Resolver
class << self
def naether
Buildr::Resolver::Java.instance.naether
end
# Resolve dependencies for an array of dependencies
#
# excludes is an array of dependencies to exclude
# repos is an array of {:url => '', :username => '', :password => '' } of additional remote repos
# with authentication
def resolve( dependencies, excludes=[], repos = [] )
if Buildr.repositories.remote.size > 0
naether.clear_remote_repositories
Buildr.repositories.remote.each do |repo|
naether.add_remote_repository( repo )
end
unless repos.nil?
unless repos.is_a? Array
repos = [repos]
end
repos.each do |repo|
naether.add_remote_repository( repo[:url], repo[:username], repo[:password] )
end
end
end
naether.local_repo_path = Repositories.instance.local
naether.dependencies = dependencies
naether.resolve_dependencies( false )
dependences = naether.dependencies
unless excludes.nil?
unless excludes.is_a? Array
excludes = [excludes]
end
dependences.delete_if do |dep|
excludes.select { |exclude| dep.to_s =~ /^#{exclude}/ }.size > 0
end
end
end
def deploy_artifact( notation, file_path, url, opts = {} )
naether.deploy_artifact( notation, file_path, url, opts )
end
def write_pom( notation, file_path )
naether.write_pom( notation, file_path )
end
end
end
end |
module Caboose
VERSION = '0.8.60'
end
Updated version file
module Caboose
VERSION = '0.8.61'
end
|
module Calabash
class Server
attr_reader :endpoint
attr_reader :test_server_port
# @param [URI] endpoint The endpoint to reach the test server.
# @param [Integer] test_server_port The port bound to the test server
# running on the device. On iOS this is same as the endpoint port.
def initialize(endpoint, test_server_port)
@endpoint = endpoint
@test_server_port = test_server_port
end
end
end
Server: Default test_server_port to endpoint.port
module Calabash
class Server
attr_reader :endpoint
attr_reader :test_server_port
# @param [URI] endpoint The endpoint to reach the test server.
# @param [Integer] test_server_port The port bound to the test server
# running on the device. On iOS this is same as the endpoint port.
def initialize(endpoint, test_server_port = nil)
@endpoint = endpoint
@test_server_port = test_server_port || endpoint.port
end
end
end
|
class Cask::CLI::Doctor
def self.run
default_cask_count = notfound_string
homebrew_origin = notfound_string
begin
default_cask_count = HOMEBREW_REPOSITORY.join(fq_default_tap, 'Casks').children.count(&:file?)
rescue StandardError
default_cask_count = "0 #{error_string %Q{Error reading #{fq_default_tap}}}"
end
begin
HOMEBREW_REPOSITORY.cd do
homebrew_origin = Cask::SystemCommand.run('git',
:args => %w{config --get remote.origin.url},
:stderr => :silence).strip
end
if homebrew_origin !~ %r{\S}
homebrew_origin = "#{none_string} #{error_string}"
elsif homebrew_origin !~ %r{(mxcl|Homebrew)/homebrew(\.git)?\Z}
homebrew_origin.concat " #{error_string 'warning: nonstandard origin'}"
end
rescue StandardError
homebrew_origin = error_string 'Not Found - Error running git'
end
ohai 'OS X Version:', render_with_none_as_error( MACOS_FULL_VERSION )
ohai "Hardware Architecture:", render_with_none_as_error( "#{Hardware::CPU.type}-#{Hardware::CPU.bits}" )
ohai 'Ruby Version:', render_with_none_as_error( "#{RUBY_VERSION}-p#{RUBY_PATCHLEVEL}" )
ohai 'Ruby Path:', render_with_none_as_error( RUBY_PATH )
ohai 'Homebrew Version:', render_with_none_as_error( HOMEBREW_VERSION )
ohai 'Homebrew Executable Path:', render_with_none_as_error( HOMEBREW_BREW_FILE )
ohai 'Homebrew Cellar Path:', render_with_none_as_error( HOMEBREW_CELLAR )
ohai 'Homebrew Repository Path:', render_with_none_as_error( HOMEBREW_REPOSITORY )
ohai 'Homebrew Origin:', render_with_none_as_error( homebrew_origin )
ohai 'Homebrew-cask Version:', render_with_none_as_error( HOMEBREW_CASK_VERSION )
ohai 'Homebrew-cask Default Tap Path:', render_with_none_as_error( fq_default_tap )
ohai 'Homebrew-cask Alternate Cask Taps:', render_with_none( alt_taps )
ohai 'Homebrew-cask Default Tap Cask Count:', render_with_none_as_error( default_cask_count )
ohai 'Contents of $LOAD_PATH:', render_with_none_as_error( $LOAD_PATH )
ohai 'Contents of $RUBYLIB Environment Variable:', render_with_none( ENV['RUBYLIB'] )
ohai 'Contents of $RUBYOPT Environment Variable:', render_with_none( ENV['RUBYOPT'] )
ohai 'Contents of $RUBYPATH Environment Variable:', render_with_none( ENV['RUBYPATH'] )
ohai 'Contents of $RBENV_VERSION Environment Variable:', render_with_none( ENV['RBENV_VERSION'] )
ohai 'Contents of $GEM_HOME Environment Variable:', render_with_none( ENV['GEM_HOME'] )
ohai 'Contents of $GEM_PATH Environment Variable:', render_with_none( ENV['GEM_PATH'] )
ohai 'Contents of $BUNDLE_PATH Environment Variable:', render_with_none( ENV['BUNDLE_PATH'] )
ohai 'Contents of Locale Environment Variables:', render_with_none( locale_variables )
ohai 'Running As Privileged User:', render_with_none_as_error( privileged_uid )
end
def self.fq_default_tap
return @fq_default_tap if @fq_default_tap
@fq_default_tap = notfound_string
begin
@fq_default_tap = HOMEBREW_REPOSITORY.join 'Library', 'Taps', Cask.default_tap
rescue StandardError; end
@fq_default_tap
end
def self.alt_taps
alt_taps = notfound_string
begin
alt_taps = Pathname.glob(HOMEBREW_REPOSITORY.join 'Library', 'Taps', '*', '*', 'Casks').map(&:dirname) -
[fq_default_tap]
alt_taps = nil unless alt_taps.length > 0
rescue StandardError; end
alt_taps
end
def self.locale_variables
ENV.keys.grep(/^(?:LC_\S+|LANG|LANGUAGE)\Z/).collect_concat { |v| %Q{#{v}="#{ENV[v]}"} }.sort
end
def self.privileged_uid
privileged_uid = notfound_string
begin
privileged_uid = Process.euid == 0 ? "Yes #{error_string 'warning: not recommended'}" : 'No'
rescue StandardError; end
privileged_uid
end
def self.none_string
'<NONE>'
end
def self.notfound_string
"#{Tty.red}Not Found - Unknown Error#{Tty.reset}"
end
def self.error_string(string='Error')
"#{Tty.red}(#{string})#{Tty.reset}"
end
def self.render_with_none(string)
(string.nil? or not string.respond_to?(:to_s) or string.to_s.length == 0) ?
none_string :
string
end
def self.render_with_none_as_error(string)
(string.nil? or not string.respond_to?(:to_s) or string.to_s.length == 0) ?
"#{none_string} #{error_string}" :
string
end
def self.help
"checks for configuration issues"
end
end
move default_cask_count to method in doctor
class Cask::CLI::Doctor
def self.run
homebrew_origin = notfound_string
begin
HOMEBREW_REPOSITORY.cd do
homebrew_origin = Cask::SystemCommand.run('git',
:args => %w{config --get remote.origin.url},
:stderr => :silence).strip
end
if homebrew_origin !~ %r{\S}
homebrew_origin = "#{none_string} #{error_string}"
elsif homebrew_origin !~ %r{(mxcl|Homebrew)/homebrew(\.git)?\Z}
homebrew_origin.concat " #{error_string 'warning: nonstandard origin'}"
end
rescue StandardError
homebrew_origin = error_string 'Not Found - Error running git'
end
ohai 'OS X Version:', render_with_none_as_error( MACOS_FULL_VERSION )
ohai "Hardware Architecture:", render_with_none_as_error( "#{Hardware::CPU.type}-#{Hardware::CPU.bits}" )
ohai 'Ruby Version:', render_with_none_as_error( "#{RUBY_VERSION}-p#{RUBY_PATCHLEVEL}" )
ohai 'Ruby Path:', render_with_none_as_error( RUBY_PATH )
ohai 'Homebrew Version:', render_with_none_as_error( HOMEBREW_VERSION )
ohai 'Homebrew Executable Path:', render_with_none_as_error( HOMEBREW_BREW_FILE )
ohai 'Homebrew Cellar Path:', render_with_none_as_error( HOMEBREW_CELLAR )
ohai 'Homebrew Repository Path:', render_with_none_as_error( HOMEBREW_REPOSITORY )
ohai 'Homebrew Origin:', render_with_none_as_error( homebrew_origin )
ohai 'Homebrew-cask Version:', render_with_none_as_error( HOMEBREW_CASK_VERSION )
ohai 'Homebrew-cask Default Tap Path:', render_with_none_as_error( fq_default_tap )
ohai 'Homebrew-cask Alternate Cask Taps:', render_with_none( alt_taps )
ohai 'Homebrew-cask Default Tap Cask Count:', render_with_none_as_error( default_cask_count )
ohai 'Contents of $LOAD_PATH:', render_with_none_as_error( $LOAD_PATH )
ohai 'Contents of $RUBYLIB Environment Variable:', render_with_none( ENV['RUBYLIB'] )
ohai 'Contents of $RUBYOPT Environment Variable:', render_with_none( ENV['RUBYOPT'] )
ohai 'Contents of $RUBYPATH Environment Variable:', render_with_none( ENV['RUBYPATH'] )
ohai 'Contents of $RBENV_VERSION Environment Variable:', render_with_none( ENV['RBENV_VERSION'] )
ohai 'Contents of $GEM_HOME Environment Variable:', render_with_none( ENV['GEM_HOME'] )
ohai 'Contents of $GEM_PATH Environment Variable:', render_with_none( ENV['GEM_PATH'] )
ohai 'Contents of $BUNDLE_PATH Environment Variable:', render_with_none( ENV['BUNDLE_PATH'] )
ohai 'Contents of Locale Environment Variables:', render_with_none( locale_variables )
ohai 'Running As Privileged User:', render_with_none_as_error( privileged_uid )
end
def self.fq_default_tap
return @fq_default_tap if @fq_default_tap
@fq_default_tap = notfound_string
begin
@fq_default_tap = HOMEBREW_REPOSITORY.join 'Library', 'Taps', Cask.default_tap
rescue StandardError; end
@fq_default_tap
end
def self.alt_taps
alt_taps = notfound_string
begin
alt_taps = Pathname.glob(HOMEBREW_REPOSITORY.join 'Library', 'Taps', '*', '*', 'Casks').map(&:dirname) -
[fq_default_tap]
alt_taps = nil unless alt_taps.length > 0
rescue StandardError; end
alt_taps
end
def self.default_cask_count
default_cask_count = notfound_string
begin
default_cask_count = HOMEBREW_REPOSITORY.join(fq_default_tap, 'Casks').children.count(&:file?)
rescue StandardError
default_cask_count = "0 #{error_string %Q{Error reading #{fq_default_tap}}}"
end
default_cask_count
end
def self.locale_variables
ENV.keys.grep(/^(?:LC_\S+|LANG|LANGUAGE)\Z/).collect_concat { |v| %Q{#{v}="#{ENV[v]}"} }.sort
end
def self.privileged_uid
privileged_uid = notfound_string
begin
privileged_uid = Process.euid == 0 ? "Yes #{error_string 'warning: not recommended'}" : 'No'
rescue StandardError; end
privileged_uid
end
def self.none_string
'<NONE>'
end
def self.notfound_string
"#{Tty.red}Not Found - Unknown Error#{Tty.reset}"
end
def self.error_string(string='Error')
"#{Tty.red}(#{string})#{Tty.reset}"
end
def self.render_with_none(string)
(string.nil? or not string.respond_to?(:to_s) or string.to_s.length == 0) ?
none_string :
string
end
def self.render_with_none_as_error(string)
(string.nil? or not string.respond_to?(:to_s) or string.to_s.length == 0) ?
"#{none_string} #{error_string}" :
string
end
def self.help
"checks for configuration issues"
end
end
|
#! /opt/sensu/embedded/bin/ruby
#
# check-ceph-usage
#
# DESCRIPTION:
# Raise alert if ceph cluster %RAW used exceed threshold
#
# OUTPUT:
# plain text, eixt code 0: OK, 1:Warning, 2:Critical, Others:Unknown
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
# ceph client
#
# USAGE:
# #YELLOW
#
# NOTES:
# Runs 'ceph df' command(s) to report usage of ceph cluster. May
# need read access to ceph keyring and/or root access for
# authentication.
#
# Using -z (--criticality) option to change criticality level.
# if criticality is warning, raise warning alert; or raise critical
# alert.
#
# Using -t (--threshold) option to determine alert level.
# >= threshold, raise alert
#
# LICENSE:
# Copyright 2013 Brian Clark <brian.clark@cloudapt.com>
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
#
require 'rubygems' if RUBY_VERSION < '1.9.0'
require 'sensu-plugin/check/cli'
require 'timeout'
require 'English'
require 'json'
class CheckCephClusterUsage < Sensu::Plugin::Check::CLI
option :keyring,
description: 'Path to cephx authentication keyring file',
short: '-k KEY',
long: '--keyring',
proc: proc { |k| " -k #{k}" }
option :monitor,
description: 'Optional monitor IP',
short: '-m MON',
long: '--monitor',
proc: proc { |m| " -m #{m}" }
option :cluster,
description: 'Optional cluster name',
short: '-c NAME',
long: '--cluster',
proc: proc { |c| " --cluster=#{c}" }
option :timeout,
description: 'Timeout (default 10)',
short: '-t SEC',
long: '--timeout',
proc: proc(&:to_i),
default: 10
option :threshold,
short: '-w PERCENT',
long: '--threshold',
proc: proc {|a| a.to_i },
default: 75
option :criticality,
description: 'Set criticality level, critical is default',
short: '-z criticality',
long: '--criticality criticality',
default: 'critical'
option :verbose,
description: 'Show cluster usage (verbose!)',
short: '-v',
long: '--verbose',
boolean: true,
default: false
def run_cmd(cmd)
pipe, status = nil
begin
cmd += config[:cluster] if config[:cluster]
cmd += config[:keyring] if config[:keyring]
cmd += config[:monitor] if config[:monitor]
cmd += ' 2>&1'
Timeout.timeout(config[:timeout]) do
pipe = IO.popen(cmd)
Process.wait(pipe.pid)
status = $CHILD_STATUS.exitstatus
end
rescue Timeout::Error
begin
Process.kill(9, pipe.pid)
Process.wait(pipe.pid)
rescue Errno::ESRCH, Errno::EPERM
# Catch errors from trying to kill the timed-out process
# We must do something here to stop travis complaining
if config[:criticality] == 'warning'
warning 'Execution timed out'
else
critical 'Execution timed out'
end
ensure
if config[:criticality] == 'warning'
warning 'Execution timed out'
else
critical 'Execution timed out'
end
end
end
output = pipe.read
if config[:criticality] == 'warning'
warning "Command '#{cmd}' returned no output" if output.to_s == ''
warning output unless status == 0
else
critical "Command '#{cmd}' returned no output" if output.to_s == ''
critical output unless status == 0
end
output
end
def run
result = run_cmd('ceph df --format=json')
data = JSON.parse(result)
used_percentage = data['stats']['total_used_bytes'] * 100.0 / data['stats']['total_bytes']
output = '%RAW Used: ' + used_percentage.to_s
output = output + ' ' + result if config[:verbose]
if used_percentage >= config[:threshold]
warning output if config[:criticality] == 'warning'
critical output
end
ok output
end
end
Check ceph usage (#104)
* check ceph usage by pool
to support multi-end ceph
* add specific pool option
#! /opt/sensu/embedded/bin/ruby
#
# check-ceph-usage
#
# DESCRIPTION:
# Raise alert if ceph pool %used exceed threshold
#
# OUTPUT:
# plain text, eixt code 0: OK, 1:Warning, 2:Critical, Others:Unknown
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
# ceph client
#
# USAGE:
# #YELLOW
#
# NOTES:
# Runs 'ceph df' command(s) to report usage of ceph cluster. May
# need read access to ceph keyring and/or root access for
# authentication.
#
# Using -z (--criticality) option to change criticality level.
# if criticality is warning, raise warning alert; or raise critical
# alert.
#
# Using -t (--threshold) option to determine alert level.
# >= threshold, raise alert
#
# Using -p (--pool) option to determine specific pools to check.
# if not be provided, then all pools will be checked.
#
# Code adapted from Brian Clark's script in the Sensu Plugins community:
# https://github.com/sensu-plugins/sensu-plugins-ceph/blob/master/bin/check-ceph.rb
# with modification to support pool usage check.
#
# Released under the same terms as Sensu (the MIT license); see MITLICENSE
# for details.
#
# Xiao Hua, Shen <shenxh@cn.ibm.com>
require 'rubygems' if RUBY_VERSION < '1.9.0'
require 'sensu-plugin/check/cli'
require 'timeout'
require 'English'
require 'json'
class CheckCephPoolUsage < Sensu::Plugin::Check::CLI
option :keyring,
description: 'Path to cephx authentication keyring file',
short: '-k KEY',
long: '--keyring',
proc: proc { |k| " -k #{k}" }
option :monitor,
description: 'Optional monitor IP',
short: '-m MON',
long: '--monitor',
proc: proc { |m| " -m #{m}" }
option :cluster,
description: 'Optional cluster name',
short: '-c NAME',
long: '--cluster',
proc: proc { |c| " --cluster=#{c}" }
option :timeout,
description: 'Timeout (default 10)',
short: '-t SEC',
long: '--timeout',
proc: proc(&:to_i),
default: 10
option :threshold,
short: '-w PERCENT',
long: '--threshold',
proc: proc {|a| a.to_i },
default: 75
option :criticality,
description: 'Set criticality level, critical is default',
short: '-z criticality',
long: '--criticality criticality',
default: 'critical'
option :verbose,
description: 'Show cluster usage (verbose!)',
short: '-v',
long: '--verbose',
boolean: true,
default: false
option :pool,
description: 'only check specific pools',
short: '-p POOL',
long: '--pools POOL',
proc: proc {|a| a.split(',') }
def run_cmd(cmd)
pipe, status = nil
begin
cmd += config[:cluster] if config[:cluster]
cmd += config[:keyring] if config[:keyring]
cmd += config[:monitor] if config[:monitor]
cmd += ' 2>&1'
Timeout.timeout(config[:timeout]) do
pipe = IO.popen(cmd)
Process.wait(pipe.pid)
status = $CHILD_STATUS.exitstatus
end
rescue Timeout::Error
begin
Process.kill(9, pipe.pid)
Process.wait(pipe.pid)
rescue Errno::ESRCH, Errno::EPERM
# Catch errors from trying to kill the timed-out process
# We must do something here to stop travis complaining
if config[:criticality] == 'warning'
warning 'Execution timed out'
else
critical 'Execution timed out'
end
ensure
if config[:criticality] == 'warning'
warning 'Execution timed out'
else
critical 'Execution timed out'
end
end
end
output = pipe.read
if config[:criticality] == 'warning'
warning "Command '#{cmd}' returned no output" if output.to_s == ''
warning output unless status == 0
else
critical "Command '#{cmd}' returned no output" if output.to_s == ''
critical output unless status == 0
end
output
end
def run
result = run_cmd('ceph df --format=json')
data = JSON.parse(result)
crit_pool = []
output = ''
data['pools'].each do | pool |
next if config[:pool] && !config[:pool].include?(pool['name'])
usage = pool['stats']['bytes_used'] * 100.0 /(pool['stats']['bytes_used'] + pool['stats']['max_avail'])
crit_pool << "#{pool['name']} #{usage}%" if usage >= config[:threshold]
end unless result.to_s == ''
ok "All Pools usage under #{config[:threshold]}%" if crit_pool.empty?
output = crit_pool.join(', ')
output = output + "\n" + result if config[:verbose]
warning output if config[:criticality] == 'warning'
critical output
end
end
|
class Cask::CLI::Doctor < Cask::CLI::Base
def self.run
ohai 'OS X Version:', render_with_none_as_error( MACOS_FULL_VERSION )
ohai "Hardware Architecture:", render_with_none_as_error( "#{Hardware::CPU.type}-#{Hardware::CPU.bits}" )
ohai 'Ruby Version:', render_with_none_as_error( "#{RUBY_VERSION}-p#{RUBY_PATCHLEVEL}" )
ohai 'Ruby Path:', render_with_none_as_error( RUBY_PATH )
ohai 'Homebrew Version:', render_with_none_as_error( HOMEBREW_VERSION )
ohai 'Homebrew Executable Path:', render_with_none_as_error( HOMEBREW_BREW_FILE )
ohai 'Homebrew Cellar Path:', render_with_none_as_error( HOMEBREW_CELLAR )
ohai 'Homebrew Repository Path:', render_with_none_as_error( HOMEBREW_REPOSITORY )
ohai 'Homebrew Origin:', render_with_none_as_error( homebrew_origin )
ohai 'Homebrew-cask Version:', render_with_none_as_error( HOMEBREW_CASK_VERSION )
ohai 'Homebrew-cask Install Location:', render_install_location( HOMEBREW_CASK_VERSION )
ohai 'Homebrew-cask Cached Downloads:', render_cached_downloads
ohai 'Homebrew-cask Default Tap Path:', render_tap_paths( fq_default_tap )
ohai 'Homebrew-cask Alternate Cask Taps:', render_tap_paths( alt_taps )
ohai 'Homebrew-cask Default Tap Cask Count:', render_with_none_as_error( default_cask_count )
ohai 'Contents of $LOAD_PATH:', render_load_path( $LOAD_PATH )
ohai 'Contents of $RUBYLIB Environment Variable:', render_env_var( 'RUBYLIB' )
ohai 'Contents of $RUBYOPT Environment Variable:', render_env_var( 'RUBYOPT' )
ohai 'Contents of $RUBYPATH Environment Variable:', render_env_var( 'RUBYPATH' )
ohai 'Contents of $RBENV_VERSION Environment Variable:', render_env_var( 'RBENV_VERSION' )
ohai 'Contents of $CHRUBY_VERSION Environment Variable:', render_env_var( 'CHRUBY_VERSION' )
ohai 'Contents of $GEM_HOME Environment Variable:', render_env_var( 'GEM_HOME' )
ohai 'Contents of $GEM_PATH Environment Variable:', render_env_var( 'GEM_PATH' )
ohai 'Contents of $BUNDLE_PATH Environment Variable:', render_env_var( 'BUNDLE_PATH' )
ohai 'Contents of $PATH Environment Variable:', render_env_var( 'PATH' )
ohai 'Contents of $SHELL Environment Variable:', render_env_var( 'SHELL' )
ohai 'Contents of Locale Environment Variables:', render_with_none( locale_variables )
ohai 'Running As Privileged User:', render_with_none_as_error( privileged_uid )
end
def self.fq_default_tap
return @fq_default_tap if @fq_default_tap
@fq_default_tap = notfound_string
begin
@fq_default_tap = HOMEBREW_REPOSITORY.join 'Library', 'Taps', Cask.default_tap
rescue StandardError; end
@fq_default_tap
end
def self.alt_taps
alt_taps = notfound_string
begin
alt_taps = Pathname.glob(HOMEBREW_REPOSITORY.join 'Library', 'Taps', '*', '*', 'Casks').map(&:dirname) -
[fq_default_tap]
alt_taps = nil unless alt_taps.length > 0
rescue StandardError; end
alt_taps
end
def self.default_cask_count
default_cask_count = notfound_string
begin
default_cask_count = HOMEBREW_REPOSITORY.join(fq_default_tap, 'Casks').children.count(&:file?)
rescue StandardError
default_cask_count = "0 #{error_string %Q{Error reading #{fq_default_tap}}}"
end
default_cask_count
end
def self.homebrew_origin
homebrew_origin = notfound_string
begin
HOMEBREW_REPOSITORY.cd do
homebrew_origin = Cask::SystemCommand.run('git',
:args => %w{config --get remote.origin.url},
:print_stderr => false).stdout.strip
end
if homebrew_origin !~ %r{\S}
homebrew_origin = "#{none_string} #{error_string}"
elsif homebrew_origin !~ %r{(mxcl|Homebrew)/homebrew(\.git)?\Z}
homebrew_origin.concat " #{error_string 'warning: nonstandard origin'}"
end
rescue StandardError
homebrew_origin = error_string 'Not Found - Error running git'
end
homebrew_origin
end
def self.locale_variables
ENV.keys.grep(/^(?:LC_\S+|LANG|LANGUAGE)\Z/).collect { |v| %Q{#{v}="#{ENV[v]}"} }.sort.join("\n")
end
def self.privileged_uid
privileged_uid = notfound_string
begin
privileged_uid = Process.euid == 0 ? "Yes #{error_string 'warning: not recommended'}" : 'No'
rescue StandardError; end
privileged_uid
end
def self.none_string
'<NONE>'
end
def self.legacy_tap_pattern
%r{phinze}
end
def self.notfound_string
"#{Tty.red}Not Found - Unknown Error#{Tty.reset}"
end
def self.error_string(string='Error')
"#{Tty.red}(#{string})#{Tty.reset}"
end
def self.render_with_none(string)
(string.nil? or not string.respond_to?(:to_s) or string.to_s.length == 0) ?
none_string :
string
end
def self.render_with_none_as_error(string)
(string.nil? or not string.respond_to?(:to_s) or string.to_s.length == 0) ?
"#{none_string} #{error_string}" :
string
end
def self.render_tap_paths(paths)
paths = [ paths ] unless paths.respond_to?(:each)
paths.collect do |dir|
if (dir.nil? or dir.to_s.length == 0) then
none_string
elsif dir.to_s.match(legacy_tap_pattern)
dir.to_s.concat(" #{error_string 'Warning: legacy tap path'}")
else
dir.to_s
end
end
end
def self.render_env_var(var)
if ENV.key?(var)
%Q{#{var}="#{ENV[var]}"}
else
none_string
end
end
# This could be done by calling into Homebrew, but the situation
# where "doctor" is needed is precisely the situation where such
# things are less dependable.
def self.render_install_location(current_version)
locations = Dir.glob(HOMEBREW_CELLAR.join('brew-cask', '*')).reverse
locations.each do |l|
basename = File.basename l
l.concat %Q{ #{error_string %Q{error: old version. Run "brew cleanup".}}} unless basename == current_version
end
end
def self.render_load_path(paths)
if paths.nil? or paths.size == 0
return "#{none_string} #{error_string}"
end
copy = Array.new(paths)
unless Cask::Utils.file_is_descendant(copy[0], HOMEBREW_CELLAR)
copy[0] = "#{copy[0]} #{error_string %Q{error: should be descendant of HOMEBREW_CELLAR}}"
end
copy
end
def self.render_cached_downloads
files = Cask::CLI::Cleanup.all_cache_files
count = files.count
space = Cask::CLI::Cleanup.space_in_megs files
[
HOMEBREW_CACHE,
HOMEBREW_CACHE_CASKS,
count.to_s.concat(" files").concat(count == 0 ? '' : %Q{ #{error_string %Q{warning: run "brew cask cleanup"}}}),
space.to_s.concat(" megs").concat(count == 0 ? '' : %Q{ #{error_string %Q{warning: run "brew cask cleanup"}}}),
]
end
def self.help
"checks for configuration issues"
end
end
show staging directory (caskroom) in doctor
refs #6471
class Cask::CLI::Doctor < Cask::CLI::Base
def self.run
ohai 'OS X Version:', render_with_none_as_error( MACOS_FULL_VERSION )
ohai "Hardware Architecture:", render_with_none_as_error( "#{Hardware::CPU.type}-#{Hardware::CPU.bits}" )
ohai 'Ruby Version:', render_with_none_as_error( "#{RUBY_VERSION}-p#{RUBY_PATCHLEVEL}" )
ohai 'Ruby Path:', render_with_none_as_error( RUBY_PATH )
ohai 'Homebrew Version:', render_with_none_as_error( HOMEBREW_VERSION )
ohai 'Homebrew Executable Path:', render_with_none_as_error( HOMEBREW_BREW_FILE )
ohai 'Homebrew Cellar Path:', render_with_none_as_error( HOMEBREW_CELLAR )
ohai 'Homebrew Repository Path:', render_with_none_as_error( HOMEBREW_REPOSITORY )
ohai 'Homebrew Origin:', render_with_none_as_error( homebrew_origin )
ohai 'Homebrew-cask Version:', render_with_none_as_error( HOMEBREW_CASK_VERSION )
ohai 'Homebrew-cask Install Location:', render_install_location( HOMEBREW_CASK_VERSION )
ohai 'Homebrew-cask Staging Location:', render_staging_location( Cask.caskroom )
ohai 'Homebrew-cask Cached Downloads:', render_cached_downloads
ohai 'Homebrew-cask Default Tap Path:', render_tap_paths( fq_default_tap )
ohai 'Homebrew-cask Alternate Cask Taps:', render_tap_paths( alt_taps )
ohai 'Homebrew-cask Default Tap Cask Count:', render_with_none_as_error( default_cask_count )
ohai 'Contents of $LOAD_PATH:', render_load_path( $LOAD_PATH )
ohai 'Contents of $RUBYLIB Environment Variable:', render_env_var( 'RUBYLIB' )
ohai 'Contents of $RUBYOPT Environment Variable:', render_env_var( 'RUBYOPT' )
ohai 'Contents of $RUBYPATH Environment Variable:', render_env_var( 'RUBYPATH' )
ohai 'Contents of $RBENV_VERSION Environment Variable:', render_env_var( 'RBENV_VERSION' )
ohai 'Contents of $CHRUBY_VERSION Environment Variable:', render_env_var( 'CHRUBY_VERSION' )
ohai 'Contents of $GEM_HOME Environment Variable:', render_env_var( 'GEM_HOME' )
ohai 'Contents of $GEM_PATH Environment Variable:', render_env_var( 'GEM_PATH' )
ohai 'Contents of $BUNDLE_PATH Environment Variable:', render_env_var( 'BUNDLE_PATH' )
ohai 'Contents of $PATH Environment Variable:', render_env_var( 'PATH' )
ohai 'Contents of $SHELL Environment Variable:', render_env_var( 'SHELL' )
ohai 'Contents of Locale Environment Variables:', render_with_none( locale_variables )
ohai 'Running As Privileged User:', render_with_none_as_error( privileged_uid )
end
def self.fq_default_tap
return @fq_default_tap if @fq_default_tap
@fq_default_tap = notfound_string
begin
@fq_default_tap = HOMEBREW_REPOSITORY.join 'Library', 'Taps', Cask.default_tap
rescue StandardError; end
@fq_default_tap
end
def self.alt_taps
alt_taps = notfound_string
begin
alt_taps = Pathname.glob(HOMEBREW_REPOSITORY.join 'Library', 'Taps', '*', '*', 'Casks').map(&:dirname) -
[fq_default_tap]
alt_taps = nil unless alt_taps.length > 0
rescue StandardError; end
alt_taps
end
def self.default_cask_count
default_cask_count = notfound_string
begin
default_cask_count = HOMEBREW_REPOSITORY.join(fq_default_tap, 'Casks').children.count(&:file?)
rescue StandardError
default_cask_count = "0 #{error_string %Q{Error reading #{fq_default_tap}}}"
end
default_cask_count
end
def self.homebrew_origin
homebrew_origin = notfound_string
begin
HOMEBREW_REPOSITORY.cd do
homebrew_origin = Cask::SystemCommand.run('git',
:args => %w{config --get remote.origin.url},
:print_stderr => false).stdout.strip
end
if homebrew_origin !~ %r{\S}
homebrew_origin = "#{none_string} #{error_string}"
elsif homebrew_origin !~ %r{(mxcl|Homebrew)/homebrew(\.git)?\Z}
homebrew_origin.concat " #{error_string 'warning: nonstandard origin'}"
end
rescue StandardError
homebrew_origin = error_string 'Not Found - Error running git'
end
homebrew_origin
end
def self.locale_variables
ENV.keys.grep(/^(?:LC_\S+|LANG|LANGUAGE)\Z/).collect { |v| %Q{#{v}="#{ENV[v]}"} }.sort.join("\n")
end
def self.privileged_uid
privileged_uid = notfound_string
begin
privileged_uid = Process.euid == 0 ? "Yes #{error_string 'warning: not recommended'}" : 'No'
rescue StandardError; end
privileged_uid
end
def self.none_string
'<NONE>'
end
def self.legacy_tap_pattern
%r{phinze}
end
def self.notfound_string
"#{Tty.red}Not Found - Unknown Error#{Tty.reset}"
end
def self.error_string(string='Error')
"#{Tty.red}(#{string})#{Tty.reset}"
end
def self.render_with_none(string)
(string.nil? or not string.respond_to?(:to_s) or string.to_s.length == 0) ?
none_string :
string
end
def self.render_with_none_as_error(string)
(string.nil? or not string.respond_to?(:to_s) or string.to_s.length == 0) ?
"#{none_string} #{error_string}" :
string
end
def self.render_tap_paths(paths)
paths = [ paths ] unless paths.respond_to?(:each)
paths.collect do |dir|
if (dir.nil? or dir.to_s.length == 0) then
none_string
elsif dir.to_s.match(legacy_tap_pattern)
dir.to_s.concat(" #{error_string 'Warning: legacy tap path'}")
else
dir.to_s
end
end
end
def self.render_env_var(var)
if ENV.key?(var)
%Q{#{var}="#{ENV[var]}"}
else
none_string
end
end
# This could be done by calling into Homebrew, but the situation
# where "doctor" is needed is precisely the situation where such
# things are less dependable.
def self.render_install_location(current_version)
locations = Dir.glob(HOMEBREW_CELLAR.join('brew-cask', '*')).reverse
locations.each do |l|
basename = File.basename l
l.concat %Q{ #{error_string %Q{error: old version. Run "brew cleanup".}}} unless basename == current_version
end
end
def self.render_staging_location(path)
path = Pathname.new(path)
if !path.exist?
%Q{#{path} #{error_string %Q{error: path does not exist}}}
elsif !path.writable?
%Q{#{path} #{error_string %Q{error: not writable by current user}}}
else
path
end
end
def self.render_load_path(paths)
if paths.nil? or paths.size == 0
return "#{none_string} #{error_string}"
end
copy = Array.new(paths)
unless Cask::Utils.file_is_descendant(copy[0], HOMEBREW_CELLAR)
copy[0] = "#{copy[0]} #{error_string %Q{error: should be descendant of HOMEBREW_CELLAR}}"
end
copy
end
def self.render_cached_downloads
files = Cask::CLI::Cleanup.all_cache_files
count = files.count
space = Cask::CLI::Cleanup.space_in_megs files
[
HOMEBREW_CACHE,
HOMEBREW_CACHE_CASKS,
count.to_s.concat(" files").concat(count == 0 ? '' : %Q{ #{error_string %Q{warning: run "brew cask cleanup"}}}),
space.to_s.concat(" megs").concat(count == 0 ? '' : %Q{ #{error_string %Q{warning: run "brew cask cleanup"}}}),
]
end
def self.help
"checks for configuration issues"
end
end
|
module Cfer::Core
# Defines the structure of a CloudFormation stack
class Stack < Cfer::Block
include Cfer::Core::Functions
include Cfer::Core::Hooks
# The parameters strictly as passed via command line
attr_reader :input_parameters
# The fully resolved parameters, including defaults and parameters fetched from an existing stack during an update
attr_reader :parameters
attr_reader :options
attr_reader :git_state
def client
@options[:client] || raise('No client set on this stack')
end
def converge!(options = {})
client.converge self, options
end
def tail!(options = {}, &block)
client.tail self, options, &block
end
def initialize(options = {})
self[:AWSTemplateFormatVersion] = '2010-09-09'
self[:Description] = ''
@options = options
self[:Metadata] = {
:Cfer => {
:Version => Cfer::SEMANTIC_VERSION.to_h.delete_if { |k, v| v === nil }
}
}
self[:Parameters] = {}
self[:Mappings] = {}
self[:Conditions] = {}
self[:Resources] = {}
self[:Outputs] = {}
if options[:client] && git = options[:client].git
begin
@git_state = git.object('HEAD^')
self[:Metadata][:Cfer][:Git] = {
Rev: git_state.sha,
Clean: git.status.changed.empty?
}
rescue e
Cfer::LOGGER.warn("Unable to add Git information to CloudFormation Metadata.", e)
end
end
@parameters = HashWithIndifferentAccess.new
@input_parameters = HashWithIndifferentAccess.new
if options[:client]
begin
@parameters.merge! options[:client].fetch_parameters
rescue Cfer::Util::StackDoesNotExistError
Cfer::LOGGER.debug "Can't include current stack parameters because the stack doesn't exist yet."
end
end
if options[:parameters]
options[:parameters].each do |key, val|
@input_parameters[key] = @parameters[key] = val
end
end
end
# Sets the description for this CloudFormation stack
def description(desc)
self[:Description] = desc
end
# Declares a CloudFormation parameter
#
# @param name [String] The parameter name
# @param options [Hash]
# @option options [String] :type The type for the CloudFormation parameter
# @option options [String] :default A value of the appropriate type for the template to use if no value is specified when a stack is created. If you define constraints for the parameter, you must specify a value that adheres to those constraints.
# @option options [String] :no_echo Whether to mask the parameter value whenever anyone makes a call that describes the stack. If you set the value to `true`, the parameter value is masked with asterisks (*****).
# @option options [String] :allowed_values An array containing the list of values allowed for the parameter.
# @option options [String] :allowed_pattern A regular expression that represents the patterns you want to allow for String types.
# @option options [Number] :max_length An integer value that determines the largest number of characters you want to allow for String types.
# @option options [Number] :min_length An integer value that determines the smallest number of characters you want to allow for String types.
# @option options [Number] :max_value A numeric value that determines the largest numeric value you want to allow for Number types.
# @option options [Number] :min_value A numeric value that determines the smallest numeric value you want to allow for Number types.
# @option options [String] :description A string of up to 4000 characters that describes the parameter.
# @option options [String] :constraint_description A string that explains the constraint when the constraint is violated. For example, without a constraint description, a parameter that has an allowed pattern of `[A-Za-z0-9]+` displays the following error message when the user specifies an invalid value:
#
# ```Malformed input-Parameter MyParameter must match pattern [A-Za-z0-9]+```
#
# By adding a constraint description, such as must only contain upper- and lowercase letters, and numbers, you can display a customized error message:
#
# ```Malformed input-Parameter MyParameter must only contain upper and lower case letters and numbers```
def parameter(name, options = {})
param = {}
options.each do |key, v|
next if v === nil
k = key.to_s.camelize.to_sym
param[k] =
case k
when :AllowedPattern
if v.class == Regexp
v.source
end
when :Default
@parameters[name] ||= v
end
param[k] ||= v
end
param[:Type] ||= 'String'
self[:Parameters][name] = param
end
# Sets the mappings block for this stack. See [The CloudFormation Documentation](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/mappings-section-structure.html) for more details
def mappings(mappings)
self[:Mappings] = mappings
end
# Adds a condition to the template.
# @param name [String] The name of the condition.
# @param expr [Hash] The CloudFormation condition to add. See [The Cloudformation Documentation](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/conditions-section-structure.html) for more details
def condition(name, expr)
self[:Conditions][name] = expr
end
# Creates a CloudFormation resource
# @param name [String] The name of the resource (must be alphanumeric)
# @param type [String] The type of CloudFormation resource to create.
# @param options [Hash] Additional attributes to add to the resource block (such as the `UpdatePolicy` for an `AWS::AutoScaling::AutoScalingGroup`)
def resource(name, type, options = {}, &block)
Preconditions.check_argument(/[[:alnum:]]+/ =~ name, "Resource name must be alphanumeric")
clazz = Cfer::Core::Resource.resource_class(type)
rc = clazz.new(name, type, self, options, &block)
self[:Resources][name] = rc
rc.handle
end
# Adds an output to the CloudFormation stack.
# @param name [String] The Logical ID of the output parameter
# @param value [String] Value to return
# @param options [Hash] Extra options for this output parameter
# @option options [String] :Description Information about the value
def output(name, value, options = {})
self[:Outputs][name] = options.merge('Value' => value)
end
# Renders the stack into a CloudFormation template.
# @return [String] The final template
def to_cfn
if @options[:pretty_print]
JSON.pretty_generate(to_h)
else
to_h.to_json
end
end
# Gets the Cfn client, if one exists, or throws an error if one does not
def client
@options[:client] || raise(Cfer::Util::CferError, "Stack has no associated client.")
end
# Includes template code from one or more files, and evals it in the context of this stack.
# Filenames are relative to the file containing the invocation of this method.
def include_template(*files)
include_base = options[:include_base] || File.dirname(caller.first.split(/:\d/,2).first)
files.each do |file|
path = File.join(include_base, file)
include_file(path)
end
end
# Looks up a specific output of another CloudFormation stack in the same region.
# @param stack [String] The name of the stack to fetch an output from
# @param out [String] The name of the output to fetch from the stack
def lookup_output(stack, out)
lookup_outputs(stack).fetch(out)
end
# Looks up a hash of all outputs from another CloudFormation stack in the same region.
# @param stack [String] The name of the stack to fetch outputs from
def lookup_outputs(stack)
client = @options[:client] || raise(Cfer::Util::CferError, "Can not fetch stack outputs without a client")
client.fetch_outputs(stack)
end
class << self
def extend_stack(&block)
class_eval(&block)
end
end
end
end
Git fixup
module Cfer::Core
# Defines the structure of a CloudFormation stack
class Stack < Cfer::Block
include Cfer::Core::Functions
include Cfer::Core::Hooks
# The parameters strictly as passed via command line
attr_reader :input_parameters
# The fully resolved parameters, including defaults and parameters fetched from an existing stack during an update
attr_reader :parameters
attr_reader :options
attr_reader :git_state
def client
@options[:client] || raise('No client set on this stack')
end
def converge!(options = {})
client.converge self, options
end
def tail!(options = {}, &block)
client.tail self, options, &block
end
def initialize(options = {})
self[:AWSTemplateFormatVersion] = '2010-09-09'
self[:Description] = ''
@options = options
self[:Metadata] = {
:Cfer => {
:Version => Cfer::SEMANTIC_VERSION.to_h.delete_if { |k, v| v === nil }
}
}
self[:Parameters] = {}
self[:Mappings] = {}
self[:Conditions] = {}
self[:Resources] = {}
self[:Outputs] = {}
if options[:client] && git = options[:client].git
begin
@git_state = git.object('HEAD^')
self[:Metadata][:Cfer][:Git] = {
Rev: git_state.sha,
Clean: git.status.changed.empty?
}
rescue => e
Cfer::LOGGER.warn("Unable to add Git information to CloudFormation Metadata. #{e}")
end
end
@parameters = HashWithIndifferentAccess.new
@input_parameters = HashWithIndifferentAccess.new
if options[:client]
begin
@parameters.merge! options[:client].fetch_parameters
rescue Cfer::Util::StackDoesNotExistError
Cfer::LOGGER.debug "Can't include current stack parameters because the stack doesn't exist yet."
end
end
if options[:parameters]
options[:parameters].each do |key, val|
@input_parameters[key] = @parameters[key] = val
end
end
end
# Sets the description for this CloudFormation stack
def description(desc)
self[:Description] = desc
end
# Declares a CloudFormation parameter
#
# @param name [String] The parameter name
# @param options [Hash]
# @option options [String] :type The type for the CloudFormation parameter
# @option options [String] :default A value of the appropriate type for the template to use if no value is specified when a stack is created. If you define constraints for the parameter, you must specify a value that adheres to those constraints.
# @option options [String] :no_echo Whether to mask the parameter value whenever anyone makes a call that describes the stack. If you set the value to `true`, the parameter value is masked with asterisks (*****).
# @option options [String] :allowed_values An array containing the list of values allowed for the parameter.
# @option options [String] :allowed_pattern A regular expression that represents the patterns you want to allow for String types.
# @option options [Number] :max_length An integer value that determines the largest number of characters you want to allow for String types.
# @option options [Number] :min_length An integer value that determines the smallest number of characters you want to allow for String types.
# @option options [Number] :max_value A numeric value that determines the largest numeric value you want to allow for Number types.
# @option options [Number] :min_value A numeric value that determines the smallest numeric value you want to allow for Number types.
# @option options [String] :description A string of up to 4000 characters that describes the parameter.
# @option options [String] :constraint_description A string that explains the constraint when the constraint is violated. For example, without a constraint description, a parameter that has an allowed pattern of `[A-Za-z0-9]+` displays the following error message when the user specifies an invalid value:
#
# ```Malformed input-Parameter MyParameter must match pattern [A-Za-z0-9]+```
#
# By adding a constraint description, such as must only contain upper- and lowercase letters, and numbers, you can display a customized error message:
#
# ```Malformed input-Parameter MyParameter must only contain upper and lower case letters and numbers```
def parameter(name, options = {})
param = {}
options.each do |key, v|
next if v === nil
k = key.to_s.camelize.to_sym
param[k] =
case k
when :AllowedPattern
if v.class == Regexp
v.source
end
when :Default
@parameters[name] ||= v
end
param[k] ||= v
end
param[:Type] ||= 'String'
self[:Parameters][name] = param
end
# Sets the mappings block for this stack. See [The CloudFormation Documentation](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/mappings-section-structure.html) for more details
def mappings(mappings)
self[:Mappings] = mappings
end
# Adds a condition to the template.
# @param name [String] The name of the condition.
# @param expr [Hash] The CloudFormation condition to add. See [The Cloudformation Documentation](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/conditions-section-structure.html) for more details
def condition(name, expr)
self[:Conditions][name] = expr
end
# Creates a CloudFormation resource
# @param name [String] The name of the resource (must be alphanumeric)
# @param type [String] The type of CloudFormation resource to create.
# @param options [Hash] Additional attributes to add to the resource block (such as the `UpdatePolicy` for an `AWS::AutoScaling::AutoScalingGroup`)
def resource(name, type, options = {}, &block)
Preconditions.check_argument(/[[:alnum:]]+/ =~ name, "Resource name must be alphanumeric")
clazz = Cfer::Core::Resource.resource_class(type)
rc = clazz.new(name, type, self, options, &block)
self[:Resources][name] = rc
rc.handle
end
# Adds an output to the CloudFormation stack.
# @param name [String] The Logical ID of the output parameter
# @param value [String] Value to return
# @param options [Hash] Extra options for this output parameter
# @option options [String] :Description Information about the value
def output(name, value, options = {})
self[:Outputs][name] = options.merge('Value' => value)
end
# Renders the stack into a CloudFormation template.
# @return [String] The final template
def to_cfn
if @options[:pretty_print]
JSON.pretty_generate(to_h)
else
to_h.to_json
end
end
# Gets the Cfn client, if one exists, or throws an error if one does not
def client
@options[:client] || raise(Cfer::Util::CferError, "Stack has no associated client.")
end
# Includes template code from one or more files, and evals it in the context of this stack.
# Filenames are relative to the file containing the invocation of this method.
def include_template(*files)
include_base = options[:include_base] || File.dirname(caller.first.split(/:\d/,2).first)
files.each do |file|
path = File.join(include_base, file)
include_file(path)
end
end
# Looks up a specific output of another CloudFormation stack in the same region.
# @param stack [String] The name of the stack to fetch an output from
# @param out [String] The name of the output to fetch from the stack
def lookup_output(stack, out)
lookup_outputs(stack).fetch(out)
end
# Looks up a hash of all outputs from another CloudFormation stack in the same region.
# @param stack [String] The name of the stack to fetch outputs from
def lookup_outputs(stack)
client = @options[:client] || raise(Cfer::Util::CferError, "Can not fetch stack outputs without a client")
client.fetch_outputs(stack)
end
class << self
def extend_stack(&block)
class_eval(&block)
end
end
end
end
|
module Chartjs
VERSION = "1.0.0"
end
Promote to v1.0.1.
module Chartjs
VERSION = "1.0.1"
end
|
module Chromaprint
# Ports the chromaprint API functions. To get their detailed documentation
# please see +chromaprint.h+ of the original C/C++ library.
#
# ref: https://bitbucket.org/acoustid/chromaprint/src/master/src/chromaprint.h
module Lib
extend FFI::Library
ffi_lib 'chromaprint'
# Return the version number of Chromaprint.
#
# const char *chromaprint_get_version(void)
attach_function :chromaprint_get_version, [], :string
# Allocate and initialize the Chromaprint context.
#
# Parameters:
# - version: Version of the fingerprint algorithm, use
# CHROMAPRINT_ALGORITHM_DEFAULT for the default
# algorithm
#
# Returns:
# - Chromaprint context pointer
#
# ChromaprintContext *chromaprint_new(int algorithm)
attach_function :chromaprint_new, [:int], :pointer
# Deallocate the Chromaprint context.
#
# Parameters:
# - ctx: Chromaprint context pointer
#
# void chromaprint_free(ChromaprintContext *ctx)
attach_function :chromaprint_free, [:pointer], :void
# Return the fingerprint algorithm this context is configured to use.
#
# int chromaprint_get_algorithm(ChromaprintContext *ctx)
#
# @note
# In Debian Squeeze chromaprint.so (version 6.0.0) doesn't have
# chromaprint_get_algorithm() function. So we comment it out to not
# raise exception on loading.
#
# attach_function :chromaprint_get_algorithm, [:pointer], :int
# Restart the computation of a fingerprint with a new audio stream.
#
# Parameters:
# - ctx: Chromaprint context pointer
# - sample_rate: sample rate of the audio stream (in Hz)
# - num_channels: numbers of channels in the audio stream (1 or 2)
#
# Returns:
# - 0 on error, 1 on success
#
# int chromaprint_start(ChromaprintContext *ctx, int sample_rate, int num_channels)
attach_function :chromaprint_start, [:pointer, :int, :int], :int
# Send audio data to the fingerprint calculator.
#
# Parameters:
# - ctx: Chromaprint context pointer
# - data: raw audio data, should point to an array of 16-bit signed
# integers in native byte-order
# - size: size of the data buffer (in samples)
#
# Returns:
# - 0 on error, 1 on success
#
# int chromaprint_feed(ChromaprintContext *ctx, void *data, int size)
attach_function :chromaprint_feed, [:pointer, :pointer, :int], :int
# Process any remaining buffered audio data and calculate the fingerprint.
#
# Parameters:
# - ctx: Chromaprint context pointer
#
# Returns:
# - 0 on error, 1 on success
#
# int chromaprint_finish(ChromaprintContext *ctx)
attach_function :chromaprint_finish, [:pointer], :int
# Return the calculated fingerprint as a compressed string.
#
# The caller is responsible for freeing the returned pointer using
# chromaprint_dealloc().
#
# Parameters:
# - ctx: Chromaprint context pointer
# - fingerprint: pointer to a pointer, where a pointer to the allocated array
# will be stored
#
# Returns:
# - 0 on error, 1 on success
#
# int chromaprint_get_fingerprint(ChromaprintContext *ctx, char **fingerprint)
attach_function :chromaprint_get_fingerprint, [:pointer, :pointer], :int
# Return the calculated fingerprint as an array of 32-bit integers.
#
# The caller is responsible for freeing the returned pointer using
# chromaprint_dealloc().
#
# Parameters:
# - ctx: Chromaprint context pointer
# - fingerprint: pointer to a pointer, where a pointer to the allocated array
# will be stored
# - size: number of items in the returned raw fingerprint
#
# Returns:
# - 0 on error, 1 on success
#
# int chromaprint_get_raw_fingerprint(ChromaprintContext *ctx, void **fingerprint, int *size)
attach_function :chromaprint_get_raw_fingerprint, [:pointer, :pointer, :pointer], :int
# Compress and optionally base64-encode a raw fingerprint
#
# The caller is responsible for freeing the returned pointer using
# chromaprint_dealloc().
#
# Parameters:
# - fp: pointer to an array of 32-bit integers representing the raw
# fingerprint to be encoded
# - size: number of items in the raw fingerprint
# - algorithm: Chromaprint algorithm version which was used to generate the
# raw fingerprint
# - encoded_fp: pointer to a pointer, where the encoded fingerprint will be
# stored
# - encoded_size: size of the encoded fingerprint in bytes
# - base64: Whether to return binary data or base64-encoded ASCII data. The
# compressed fingerprint will be encoded using base64 with the
# URL-safe scheme if you set this parameter to 1. It will return
# binary data if it's 0.
#
# Returns:
# - 0 on error, 1 on success
# int chromaprint_encode_fingerprint(void *fp, int size, int algorithm, void **encoded_fp, int *encoded_size, int base64)
attach_function :chromaprint_encode_fingerprint, [:pointer, :int, :int, :pointer, :pointer, :int], :int
# Uncompress and optionally base64-decode an encoded fingerprint
#
# The caller is responsible for freeing the returned pointer using
# chromaprint_dealloc().
#
# Parameters:
# - encoded_fp: Pointer to an encoded fingerprint
# - encoded_size: Size of the encoded fingerprint in bytes
# - fp: Pointer to a pointer, where the decoded raw fingerprint (array
# of 32-bit integers) will be stored
# - size: Number of items in the returned raw fingerprint
# - algorithm: Chromaprint algorithm version which was used to generate the
# raw fingerprint
# - base64: Whether the encoded_fp parameter contains binary data or
# base64-encoded ASCII data. If 1, it will base64-decode the data
# before uncompressing the fingerprint.
#
# Returns:
# - 0 on error, 1 on success
#
# int chromaprint_decode_fingerprint(void *encoded_fp, int encoded_size, void **fp, int *size, int *algorithm, int base64)
attach_function :chromaprint_decode_fingerprint, [:pointer, :int, :pointer, :pointer, :pointer, :int], :int
# Free memory allocated by any function from the Chromaprint API.
#
# - ptr: Pointer to be deallocated
#
# void chromaprint_dealloc(void *ptr);
attach_function :chromaprint_dealloc, [:pointer], :void
end
end
s/Squeeze/Wheezy/
Task 12373
module Chromaprint
# Ports the chromaprint API functions. To get their detailed documentation
# please see +chromaprint.h+ of the original C/C++ library.
#
# ref: https://bitbucket.org/acoustid/chromaprint/src/master/src/chromaprint.h
module Lib
extend FFI::Library
ffi_lib 'chromaprint'
# Return the version number of Chromaprint.
#
# const char *chromaprint_get_version(void)
attach_function :chromaprint_get_version, [], :string
# Allocate and initialize the Chromaprint context.
#
# Parameters:
# - version: Version of the fingerprint algorithm, use
# CHROMAPRINT_ALGORITHM_DEFAULT for the default
# algorithm
#
# Returns:
# - Chromaprint context pointer
#
# ChromaprintContext *chromaprint_new(int algorithm)
attach_function :chromaprint_new, [:int], :pointer
# Deallocate the Chromaprint context.
#
# Parameters:
# - ctx: Chromaprint context pointer
#
# void chromaprint_free(ChromaprintContext *ctx)
attach_function :chromaprint_free, [:pointer], :void
# Return the fingerprint algorithm this context is configured to use.
#
# int chromaprint_get_algorithm(ChromaprintContext *ctx)
#
# @note
# In Debian Wheezy chromaprint.so (version 6.0.0) doesn't have
# chromaprint_get_algorithm() function. So we comment it out to not
# raise exception on loading.
#
# attach_function :chromaprint_get_algorithm, [:pointer], :int
# Restart the computation of a fingerprint with a new audio stream.
#
# Parameters:
# - ctx: Chromaprint context pointer
# - sample_rate: sample rate of the audio stream (in Hz)
# - num_channels: numbers of channels in the audio stream (1 or 2)
#
# Returns:
# - 0 on error, 1 on success
#
# int chromaprint_start(ChromaprintContext *ctx, int sample_rate, int num_channels)
attach_function :chromaprint_start, [:pointer, :int, :int], :int
# Send audio data to the fingerprint calculator.
#
# Parameters:
# - ctx: Chromaprint context pointer
# - data: raw audio data, should point to an array of 16-bit signed
# integers in native byte-order
# - size: size of the data buffer (in samples)
#
# Returns:
# - 0 on error, 1 on success
#
# int chromaprint_feed(ChromaprintContext *ctx, void *data, int size)
attach_function :chromaprint_feed, [:pointer, :pointer, :int], :int
# Process any remaining buffered audio data and calculate the fingerprint.
#
# Parameters:
# - ctx: Chromaprint context pointer
#
# Returns:
# - 0 on error, 1 on success
#
# int chromaprint_finish(ChromaprintContext *ctx)
attach_function :chromaprint_finish, [:pointer], :int
# Return the calculated fingerprint as a compressed string.
#
# The caller is responsible for freeing the returned pointer using
# chromaprint_dealloc().
#
# Parameters:
# - ctx: Chromaprint context pointer
# - fingerprint: pointer to a pointer, where a pointer to the allocated array
# will be stored
#
# Returns:
# - 0 on error, 1 on success
#
# int chromaprint_get_fingerprint(ChromaprintContext *ctx, char **fingerprint)
attach_function :chromaprint_get_fingerprint, [:pointer, :pointer], :int
# Return the calculated fingerprint as an array of 32-bit integers.
#
# The caller is responsible for freeing the returned pointer using
# chromaprint_dealloc().
#
# Parameters:
# - ctx: Chromaprint context pointer
# - fingerprint: pointer to a pointer, where a pointer to the allocated array
# will be stored
# - size: number of items in the returned raw fingerprint
#
# Returns:
# - 0 on error, 1 on success
#
# int chromaprint_get_raw_fingerprint(ChromaprintContext *ctx, void **fingerprint, int *size)
attach_function :chromaprint_get_raw_fingerprint, [:pointer, :pointer, :pointer], :int
# Compress and optionally base64-encode a raw fingerprint
#
# The caller is responsible for freeing the returned pointer using
# chromaprint_dealloc().
#
# Parameters:
# - fp: pointer to an array of 32-bit integers representing the raw
# fingerprint to be encoded
# - size: number of items in the raw fingerprint
# - algorithm: Chromaprint algorithm version which was used to generate the
# raw fingerprint
# - encoded_fp: pointer to a pointer, where the encoded fingerprint will be
# stored
# - encoded_size: size of the encoded fingerprint in bytes
# - base64: Whether to return binary data or base64-encoded ASCII data. The
# compressed fingerprint will be encoded using base64 with the
# URL-safe scheme if you set this parameter to 1. It will return
# binary data if it's 0.
#
# Returns:
# - 0 on error, 1 on success
# int chromaprint_encode_fingerprint(void *fp, int size, int algorithm, void **encoded_fp, int *encoded_size, int base64)
attach_function :chromaprint_encode_fingerprint, [:pointer, :int, :int, :pointer, :pointer, :int], :int
# Uncompress and optionally base64-decode an encoded fingerprint
#
# The caller is responsible for freeing the returned pointer using
# chromaprint_dealloc().
#
# Parameters:
# - encoded_fp: Pointer to an encoded fingerprint
# - encoded_size: Size of the encoded fingerprint in bytes
# - fp: Pointer to a pointer, where the decoded raw fingerprint (array
# of 32-bit integers) will be stored
# - size: Number of items in the returned raw fingerprint
# - algorithm: Chromaprint algorithm version which was used to generate the
# raw fingerprint
# - base64: Whether the encoded_fp parameter contains binary data or
# base64-encoded ASCII data. If 1, it will base64-decode the data
# before uncompressing the fingerprint.
#
# Returns:
# - 0 on error, 1 on success
#
# int chromaprint_decode_fingerprint(void *encoded_fp, int encoded_size, void **fp, int *size, int *algorithm, int base64)
attach_function :chromaprint_decode_fingerprint, [:pointer, :int, :pointer, :pointer, :pointer, :int], :int
# Free memory allocated by any function from the Chromaprint API.
#
# - ptr: Pointer to be deallocated
#
# void chromaprint_dealloc(void *ptr);
attach_function :chromaprint_dealloc, [:pointer], :void
end
end
|
module Chronic
DEFAULT_OPTIONS = {
:context => :future,
:now => nil,
:guess => true,
:ambiguous_time_range => 6,
:endian_precedence => [:middle, :little],
:ambiguous_year_future_bias => 50
}
class << self
# Parses a string containing a natural language date or time. If the parser
# can find a date or time, either a Time or Chronic::Span will be returned
# (depending on the value of <tt>:guess</tt>). If no date or time can be found,
# +nil+ will be returned.
#
# Options are:
#
# [<tt>:context</tt>]
# <tt>:past</tt> or <tt>:future</tt> (defaults to <tt>:future</tt>)
#
# If your string represents a birthday, you can set <tt>:context</tt> to <tt>:past</tt>
# and if an ambiguous string is given, it will assume it is in the
# past. Specify <tt>:future</tt> or omit to set a future context.
#
# [<tt>:now</tt>]
# Time (defaults to Time.now)
#
# By setting <tt>:now</tt> to a Time, all computations will be based off
# of that time instead of Time.now. If set to nil, Chronic will use Time.now.
#
# [<tt>:guess</tt>]
# +true+ or +false+ (defaults to +true+)
#
# By default, the parser will guess a single point in time for the
# given date or time. If you'd rather have the entire time span returned,
# set <tt>:guess</tt> to +false+ and a Chronic::Span will be returned.
#
# [<tt>:ambiguous_time_range</tt>]
# Integer or <tt>:none</tt> (defaults to <tt>6</tt> (6am-6pm))
#
# If an Integer is given, ambiguous times (like 5:00) will be
# assumed to be within the range of that time in the AM to that time
# in the PM. For example, if you set it to <tt>7</tt>, then the parser will
# look for the time between 7am and 7pm. In the case of 5:00, it would
# assume that means 5:00pm. If <tt>:none</tt> is given, no assumption
# will be made, and the first matching instance of that time will
# be used.
#
# [<tt>:endian_precedence</tt>]
# Array (defaults to <tt>[:middle, :little]</tt>)
#
# By default, Chronic will parse "03/04/2011" as the fourth day
# of the third month. Alternatively you can tell Chronic to parse
# this as the third day of the fourth month by altering the
# <tt>:endian_precedence</tt> to <tt>[:little, :middle]</tt>.
def parse(text, specified_options = {})
options = DEFAULT_OPTIONS.merge specified_options
# ensure the specified options are valid
(specified_options.keys - DEFAULT_OPTIONS.keys).each do |key|
raise InvalidArgumentException, "#{key} is not a valid option key."
end
unless [:past, :future, :none].include?(options[:context])
raise InvalidArgumentException, "Invalid context, :past/:future only"
end
options[:now] ||= Chronic.time_class.now
@now = options[:now]
options[:text] = text
# tokenize words
tokens = tokenize(text, options)
if Chronic.debug
puts "+---------------------------------------------------"
puts "| " + tokens.to_s
puts "+---------------------------------------------------"
end
span = tokens_to_span(tokens, options)
if options[:guess]
guess span
else
span
end
end
# Clean up the specified input text by stripping unwanted characters,
# converting idioms to their canonical form, converting number words
# to numbers (three => 3), and converting ordinal words to numeric
# ordinals (third => 3rd)
def pre_normalize(text) #:nodoc:
normalized_text = text.to_s.downcase
normalized_text.gsub!(/['"\.,]/, '')
normalized_text.gsub!(/\bsecond (of|day|month|hour|minute|second)\b/, '2nd \1')
normalized_text = numericize_numbers(normalized_text)
normalized_text.gsub!(/ \-(\d{4})\b/, ' tzminus\1')
normalized_text.gsub!(/([\/\-\,\@])/) { ' ' + $1 + ' ' }
normalized_text.gsub!(/\b0(\d+:\d+\s*pm?\b)/, '\1')
normalized_text.gsub!(/\btoday\b/, 'this day')
normalized_text.gsub!(/\btomm?orr?ow\b/, 'next day')
normalized_text.gsub!(/\byesterday\b/, 'last day')
normalized_text.gsub!(/\bnoon\b/, '12:00')
normalized_text.gsub!(/\bmidnight\b/, '24:00')
normalized_text.gsub!(/\bbefore now\b/, 'past')
normalized_text.gsub!(/\bnow\b/, 'this second')
normalized_text.gsub!(/\b(ago|before)\b/, 'past')
normalized_text.gsub!(/\bthis past\b/, 'last')
normalized_text.gsub!(/\bthis last\b/, 'last')
normalized_text.gsub!(/\b(?:in|during) the (morning)\b/, '\1')
normalized_text.gsub!(/\b(?:in the|during the|at) (afternoon|evening|night)\b/, '\1')
normalized_text.gsub!(/\btonight\b/, 'this night')
normalized_text.gsub!(/\b\d+:?\d*[ap]\b/,'\0m')
normalized_text.gsub!(/(\d)([ap]m|oclock)\b/, '\1 \2')
normalized_text.gsub!(/\b(hence|after|from)\b/, 'future')
normalized_text
end
# Convert number words to numbers (three => 3)
def numericize_numbers(text) #:nodoc:
Numerizer.numerize(text)
end
# Guess a specific time within the given span
def guess(span) #:nodoc:
return nil if span.nil?
if span.width > 1
span.begin + (span.width / 2)
else
span.begin
end
end
def definitions(options={}) #:nodoc:
options[:endian_precedence] ||= [:middle, :little]
@definitions ||= {
:time => [
Handler.new([:repeater_time, :repeater_day_portion?], nil)
],
:date => [
Handler.new([:repeater_day_name, :repeater_month_name, :scalar_day, :repeater_time, :separator_slash_or_dash?, :time_zone, :scalar_year], :handle_rdn_rmn_sd_t_tz_sy),
Handler.new([:repeater_month_name, :scalar_day, :scalar_year], :handle_rmn_sd_sy),
Handler.new([:repeater_month_name, :ordinal_day, :scalar_year], :handle_rmn_od_sy),
Handler.new([:repeater_month_name, :scalar_day, :scalar_year, :separator_at?, 'time?'], :handle_rmn_sd_sy),
Handler.new([:repeater_month_name, :ordinal_day, :scalar_year, :separator_at?, 'time?'], :handle_rmn_od_sy),
Handler.new([:repeater_month_name, :scalar_day, :separator_at?, 'time?'], :handle_rmn_sd),
Handler.new([:repeater_time, :repeater_day_portion?, :separator_on?, :repeater_month_name, :scalar_day], :handle_rmn_sd_on),
Handler.new([:repeater_month_name, :ordinal_day, :separator_at?, 'time?'], :handle_rmn_od),
Handler.new([:repeater_time, :repeater_day_portion?, :separator_on?, :repeater_month_name, :ordinal_day], :handle_rmn_od_on),
Handler.new([:repeater_month_name, :scalar_year], :handle_rmn_sy),
Handler.new([:scalar_day, :repeater_month_name, :scalar_year, :separator_at?, 'time?'], :handle_sd_rmn_sy),
Handler.new([:scalar_year, :separator_slash_or_dash, :scalar_month, :separator_slash_or_dash, :scalar_day, :separator_at?, 'time?'], :handle_sy_sm_sd),
Handler.new([:scalar_month, :separator_slash_or_dash, :scalar_year], :handle_sm_sy)
],
# tonight at 7pm
:anchor => [
Handler.new([:grabber?, :repeater, :separator_at?, :repeater?, :repeater?], :handle_r),
Handler.new([:grabber?, :repeater, :repeater, :separator_at?, :repeater?, :repeater?], :handle_r),
Handler.new([:repeater, :grabber, :repeater], :handle_r_g_r)
],
# 3 weeks from now, in 2 months
:arrow => [
Handler.new([:scalar, :repeater, :pointer], :handle_s_r_p),
Handler.new([:pointer, :scalar, :repeater], :handle_p_s_r),
Handler.new([:scalar, :repeater, :pointer, 'anchor'], :handle_s_r_p_a)
],
# 3rd week in march
:narrow => [
Handler.new([:ordinal, :repeater, :separator_in, :repeater], :handle_o_r_s_r),
Handler.new([:ordinal, :repeater, :grabber, :repeater], :handle_o_r_g_r)
]
}
endians = [
Handler.new([:scalar_month, :separator_slash_or_dash, :scalar_day, :separator_slash_or_dash, :scalar_year, :separator_at?, 'time?'], :handle_sm_sd_sy),
Handler.new([:scalar_day, :separator_slash_or_dash, :scalar_month, :separator_slash_or_dash, :scalar_year, :separator_at?, 'time?'], :handle_sd_sm_sy)
]
case endian = Array(options[:endian_precedence]).first
when :little
@definitions[:endian] = endians.reverse
when :middle
@definitions[:endian] = endians
else
raise InvalidArgumentException, "Unknown endian option '#{endian}'"
end
@definitions
end
private
def tokenize(text, options) #:nodoc:
text = pre_normalize(text)
tokens = text.split(' ').map { |word| Token.new(word) }
[Repeater, Grabber, Pointer, Scalar, Ordinal, Separator, TimeZone].each do |tok|
tokens = tok.scan(tokens, options)
end
tokens.select { |token| token.tagged? }
end
def tokens_to_span(tokens, options) #:nodoc:
definitions = definitions(options)
(definitions[:date] + definitions[:endian]).each do |handler|
if handler.match(tokens, definitions)
puts "-date" if Chronic.debug
good_tokens = tokens.select { |o| !o.get_tag Separator }
return Handlers.send(handler.handler_method, good_tokens, options)
end
end
definitions[:anchor].each do |handler|
if handler.match(tokens, definitions)
puts "-anchor" if Chronic.debug
good_tokens = tokens.select { |o| !o.get_tag Separator }
return Handlers.send(handler.handler_method, good_tokens, options)
end
end
definitions[:arrow].each do |handler|
if handler.match(tokens, definitions)
puts "-arrow" if Chronic.debug
tags = [SeparatorAt, SeparatorSlashOrDash, SeparatorComma]
good_tokens = tokens.reject { |o| tags.any? { |t| o.get_tag(t) } }
return Handlers.send(handler.handler_method, good_tokens, options)
end
end
definitions[:narrow].each do |handler|
if handler.match(tokens, definitions)
puts "-narrow" if Chronic.debug
good_tokens = tokens.select { |o| !o.get_tag Separator }
return Handlers.send(handler.handler_method, tokens, options)
end
end
puts "-none" if Chronic.debug
return nil
end
end
# Internal exception
class ChronicPain < Exception #:nodoc:
end
# This exception is raised if an invalid argument is provided to
# any of Chronic's methods
class InvalidArgumentException < Exception
end
end
assign Chronic.now explicitly though writer not through class ivar
module Chronic
DEFAULT_OPTIONS = {
:context => :future,
:now => nil,
:guess => true,
:ambiguous_time_range => 6,
:endian_precedence => [:middle, :little],
:ambiguous_year_future_bias => 50
}
class << self
# Parses a string containing a natural language date or time. If the parser
# can find a date or time, either a Time or Chronic::Span will be returned
# (depending on the value of <tt>:guess</tt>). If no date or time can be found,
# +nil+ will be returned.
#
# Options are:
#
# [<tt>:context</tt>]
# <tt>:past</tt> or <tt>:future</tt> (defaults to <tt>:future</tt>)
#
# If your string represents a birthday, you can set <tt>:context</tt> to <tt>:past</tt>
# and if an ambiguous string is given, it will assume it is in the
# past. Specify <tt>:future</tt> or omit to set a future context.
#
# [<tt>:now</tt>]
# Time (defaults to Time.now)
#
# By setting <tt>:now</tt> to a Time, all computations will be based off
# of that time instead of Time.now. If set to nil, Chronic will use Time.now.
#
# [<tt>:guess</tt>]
# +true+ or +false+ (defaults to +true+)
#
# By default, the parser will guess a single point in time for the
# given date or time. If you'd rather have the entire time span returned,
# set <tt>:guess</tt> to +false+ and a Chronic::Span will be returned.
#
# [<tt>:ambiguous_time_range</tt>]
# Integer or <tt>:none</tt> (defaults to <tt>6</tt> (6am-6pm))
#
# If an Integer is given, ambiguous times (like 5:00) will be
# assumed to be within the range of that time in the AM to that time
# in the PM. For example, if you set it to <tt>7</tt>, then the parser will
# look for the time between 7am and 7pm. In the case of 5:00, it would
# assume that means 5:00pm. If <tt>:none</tt> is given, no assumption
# will be made, and the first matching instance of that time will
# be used.
#
# [<tt>:endian_precedence</tt>]
# Array (defaults to <tt>[:middle, :little]</tt>)
#
# By default, Chronic will parse "03/04/2011" as the fourth day
# of the third month. Alternatively you can tell Chronic to parse
# this as the third day of the fourth month by altering the
# <tt>:endian_precedence</tt> to <tt>[:little, :middle]</tt>.
def parse(text, specified_options = {})
options = DEFAULT_OPTIONS.merge specified_options
# ensure the specified options are valid
(specified_options.keys - DEFAULT_OPTIONS.keys).each do |key|
raise InvalidArgumentException, "#{key} is not a valid option key."
end
unless [:past, :future, :none].include?(options[:context])
raise InvalidArgumentException, "Invalid context, :past/:future only"
end
options[:text] = text
options[:now] ||= Chronic.time_class.now
Chronic.now = options[:now]
# tokenize words
tokens = tokenize(text, options)
if Chronic.debug
puts "+---------------------------------------------------"
puts "| " + tokens.to_s
puts "+---------------------------------------------------"
end
span = tokens_to_span(tokens, options)
if options[:guess]
guess span
else
span
end
end
# Clean up the specified input text by stripping unwanted characters,
# converting idioms to their canonical form, converting number words
# to numbers (three => 3), and converting ordinal words to numeric
# ordinals (third => 3rd)
def pre_normalize(text) #:nodoc:
normalized_text = text.to_s.downcase
normalized_text.gsub!(/['"\.,]/, '')
normalized_text.gsub!(/\bsecond (of|day|month|hour|minute|second)\b/, '2nd \1')
normalized_text = numericize_numbers(normalized_text)
normalized_text.gsub!(/ \-(\d{4})\b/, ' tzminus\1')
normalized_text.gsub!(/([\/\-\,\@])/) { ' ' + $1 + ' ' }
normalized_text.gsub!(/\b0(\d+:\d+\s*pm?\b)/, '\1')
normalized_text.gsub!(/\btoday\b/, 'this day')
normalized_text.gsub!(/\btomm?orr?ow\b/, 'next day')
normalized_text.gsub!(/\byesterday\b/, 'last day')
normalized_text.gsub!(/\bnoon\b/, '12:00')
normalized_text.gsub!(/\bmidnight\b/, '24:00')
normalized_text.gsub!(/\bbefore now\b/, 'past')
normalized_text.gsub!(/\bnow\b/, 'this second')
normalized_text.gsub!(/\b(ago|before)\b/, 'past')
normalized_text.gsub!(/\bthis past\b/, 'last')
normalized_text.gsub!(/\bthis last\b/, 'last')
normalized_text.gsub!(/\b(?:in|during) the (morning)\b/, '\1')
normalized_text.gsub!(/\b(?:in the|during the|at) (afternoon|evening|night)\b/, '\1')
normalized_text.gsub!(/\btonight\b/, 'this night')
normalized_text.gsub!(/\b\d+:?\d*[ap]\b/,'\0m')
normalized_text.gsub!(/(\d)([ap]m|oclock)\b/, '\1 \2')
normalized_text.gsub!(/\b(hence|after|from)\b/, 'future')
normalized_text
end
# Convert number words to numbers (three => 3)
def numericize_numbers(text) #:nodoc:
Numerizer.numerize(text)
end
# Guess a specific time within the given span
def guess(span) #:nodoc:
return nil if span.nil?
if span.width > 1
span.begin + (span.width / 2)
else
span.begin
end
end
def definitions(options={}) #:nodoc:
options[:endian_precedence] ||= [:middle, :little]
@definitions ||= {
:time => [
Handler.new([:repeater_time, :repeater_day_portion?], nil)
],
:date => [
Handler.new([:repeater_day_name, :repeater_month_name, :scalar_day, :repeater_time, :separator_slash_or_dash?, :time_zone, :scalar_year], :handle_rdn_rmn_sd_t_tz_sy),
Handler.new([:repeater_month_name, :scalar_day, :scalar_year], :handle_rmn_sd_sy),
Handler.new([:repeater_month_name, :ordinal_day, :scalar_year], :handle_rmn_od_sy),
Handler.new([:repeater_month_name, :scalar_day, :scalar_year, :separator_at?, 'time?'], :handle_rmn_sd_sy),
Handler.new([:repeater_month_name, :ordinal_day, :scalar_year, :separator_at?, 'time?'], :handle_rmn_od_sy),
Handler.new([:repeater_month_name, :scalar_day, :separator_at?, 'time?'], :handle_rmn_sd),
Handler.new([:repeater_time, :repeater_day_portion?, :separator_on?, :repeater_month_name, :scalar_day], :handle_rmn_sd_on),
Handler.new([:repeater_month_name, :ordinal_day, :separator_at?, 'time?'], :handle_rmn_od),
Handler.new([:repeater_time, :repeater_day_portion?, :separator_on?, :repeater_month_name, :ordinal_day], :handle_rmn_od_on),
Handler.new([:repeater_month_name, :scalar_year], :handle_rmn_sy),
Handler.new([:scalar_day, :repeater_month_name, :scalar_year, :separator_at?, 'time?'], :handle_sd_rmn_sy),
Handler.new([:scalar_year, :separator_slash_or_dash, :scalar_month, :separator_slash_or_dash, :scalar_day, :separator_at?, 'time?'], :handle_sy_sm_sd),
Handler.new([:scalar_month, :separator_slash_or_dash, :scalar_year], :handle_sm_sy)
],
# tonight at 7pm
:anchor => [
Handler.new([:grabber?, :repeater, :separator_at?, :repeater?, :repeater?], :handle_r),
Handler.new([:grabber?, :repeater, :repeater, :separator_at?, :repeater?, :repeater?], :handle_r),
Handler.new([:repeater, :grabber, :repeater], :handle_r_g_r)
],
# 3 weeks from now, in 2 months
:arrow => [
Handler.new([:scalar, :repeater, :pointer], :handle_s_r_p),
Handler.new([:pointer, :scalar, :repeater], :handle_p_s_r),
Handler.new([:scalar, :repeater, :pointer, 'anchor'], :handle_s_r_p_a)
],
# 3rd week in march
:narrow => [
Handler.new([:ordinal, :repeater, :separator_in, :repeater], :handle_o_r_s_r),
Handler.new([:ordinal, :repeater, :grabber, :repeater], :handle_o_r_g_r)
]
}
endians = [
Handler.new([:scalar_month, :separator_slash_or_dash, :scalar_day, :separator_slash_or_dash, :scalar_year, :separator_at?, 'time?'], :handle_sm_sd_sy),
Handler.new([:scalar_day, :separator_slash_or_dash, :scalar_month, :separator_slash_or_dash, :scalar_year, :separator_at?, 'time?'], :handle_sd_sm_sy)
]
case endian = Array(options[:endian_precedence]).first
when :little
@definitions[:endian] = endians.reverse
when :middle
@definitions[:endian] = endians
else
raise InvalidArgumentException, "Unknown endian option '#{endian}'"
end
@definitions
end
private
def tokenize(text, options) #:nodoc:
text = pre_normalize(text)
tokens = text.split(' ').map { |word| Token.new(word) }
[Repeater, Grabber, Pointer, Scalar, Ordinal, Separator, TimeZone].each do |tok|
tokens = tok.scan(tokens, options)
end
tokens.select { |token| token.tagged? }
end
def tokens_to_span(tokens, options) #:nodoc:
definitions = definitions(options)
(definitions[:date] + definitions[:endian]).each do |handler|
if handler.match(tokens, definitions)
puts "-date" if Chronic.debug
good_tokens = tokens.select { |o| !o.get_tag Separator }
return Handlers.send(handler.handler_method, good_tokens, options)
end
end
definitions[:anchor].each do |handler|
if handler.match(tokens, definitions)
puts "-anchor" if Chronic.debug
good_tokens = tokens.select { |o| !o.get_tag Separator }
return Handlers.send(handler.handler_method, good_tokens, options)
end
end
definitions[:arrow].each do |handler|
if handler.match(tokens, definitions)
puts "-arrow" if Chronic.debug
tags = [SeparatorAt, SeparatorSlashOrDash, SeparatorComma]
good_tokens = tokens.reject { |o| tags.any? { |t| o.get_tag(t) } }
return Handlers.send(handler.handler_method, good_tokens, options)
end
end
definitions[:narrow].each do |handler|
if handler.match(tokens, definitions)
puts "-narrow" if Chronic.debug
good_tokens = tokens.select { |o| !o.get_tag Separator }
return Handlers.send(handler.handler_method, tokens, options)
end
end
puts "-none" if Chronic.debug
return nil
end
end
# Internal exception
class ChronicPain < Exception #:nodoc:
end
# This exception is raised if an invalid argument is provided to
# any of Chronic's methods
class InvalidArgumentException < Exception
end
end
|
module Danger
# This is your plugin class. Any attributes or methods you expose here will
# be available from within your Dangerfile.
#
# To be published on the Danger plugins site, you will need to have
# the public interface documented. Danger uses [YARD](http://yardoc.org/)
# for generating documentation from your plugin source, and you can verify
# by running `danger plugins lint` or `bundle exec rake spec`.
#
# You should replace these comments with a public description of your library.
#
# @example Ensure people are well warned about merging on Mondays
#
# my_plugin.warn_on_mondays
#
# @see /danger-circleci
# @tags monday, weekends, time, rattata
#
class DangerCircleci < Plugin
# Show links for the artifacts mentioned
#
# @param [Array<String>] artifacts
# List of maps for the artifacts, using 'message' and 'path' keys
# @return [String]
def artifacts_links(artifacts)
return unless should_display_message
message = ''
artifacts.each do |artifact|
title = artifact['message']
link = artifact_link(artifact)
message << "[#{title}](#{link})"
end
markdown message
end
# Checks if we can display the links to artifacts
#
# @return [Boolean]
def should_display_message
cc_username && cc_project_name && cc_build_number && cc_node_index
end
private
def cc_username
ENV['CIRCLE_PROJECT_USERNAME']
end
def cc_project_name
ENV['CIRCLE_PROJECT_NAME']
end
def cc_build_number
ENV['CIRCLE_BUILD_NUM']
end
def cc_node_index
ENV['CIRCLE_NODE_INDEX']
end
def circleci_url
"https://circleci.com/api/v1/project/#{cc_username}/#{cc_project_name}" \
"/#{cc_build_number}/artifacts/#{cc_node_index}"
end
def artifact_link(artifact)
path = artifact['path'].sub('/', '')
"#{circleci_url}/#{path}"
end
end
end
Add documentation
module Danger
# Interact with CircleCI artifacts
#
# @example To show links to artifacts
#
# artifacts = [
# {
# 'message' => 'Test Report',
# 'path' => "#{ENV['CIRCLE_TEST_REPORTS']}/test/report.html"
# },
# {
# 'message' => 'Code Coverage Report',
# 'path' => "#{ENV['CIRCLE_TEST_REPORTS']}/cov/index.html"
# }
# ]
# circleci.artifacts_links artifacts
#
#
# @see /danger-circleci
# @tags circleci, build, artifacts
#
class DangerCircleci < Plugin
# Show links for build artifacts
#
# @param [Array<String>] artifacts
# List of maps for the artifacts, using 'message' and 'path' keys
# @return [String]
def artifacts_links(artifacts)
return unless should_display_message
message = ''
artifacts.each do |artifact|
title = artifact['message']
link = artifact_link(artifact)
message << "[#{title}](#{link})"
end
markdown message
end
# Checks if we can display the links to artifacts
#
# @return [Boolean]
def should_display_message
cc_username && cc_project_name && cc_build_number && cc_node_index
end
private
def cc_username
ENV['CIRCLE_PROJECT_USERNAME']
end
def cc_project_name
ENV['CIRCLE_PROJECT_NAME']
end
def cc_build_number
ENV['CIRCLE_BUILD_NUM']
end
def cc_node_index
ENV['CIRCLE_NODE_INDEX']
end
def circleci_url
"https://circleci.com/api/v1/project/#{cc_username}/#{cc_project_name}" \
"/#{cc_build_number}/artifacts/#{cc_node_index}"
end
def artifact_link(artifact)
path = artifact['path'].sub('/', '')
"#{circleci_url}/#{path}"
end
end
end
|
module Closeio
VERSION = '3.0.1'
end
3.1.0
module Closeio
VERSION = '3.1.0'.freeze
end
|
module Seeds
class Core
# @return [String] project folder path
#
attr_reader :root_path
# @return [Boolean] whether display outputs
#
attr_accessor :mute
# @return [String] Seedfile path
#
# @!visibility private
#
attr_reader :seedfile_path
# @return [String] Seedfile.lock path
#
# @!visibility private
#
attr_reader :lockfile_path
# @return [Xcodeproj::Project] Xcode project
#
# @!visibility private
#
attr_accessor :project
# @return [String] content of Seedfile
#
# @!visibility private
#
attr_accessor :seedfile
# @return [String] content of Seedfile.lock
#
# @!visibility private
#
attr_accessor :lockfile
# @return [Hash{Sting => Seeds::Seed}] seeds by seed name
#
# @!visibility private
#
attr_reader :seeds
# @return [Hash{Sting => Seeds::Seed}] locked dependencies by seed name
#
# @!visibility private
#
attr_reader :locks
# @return [Hash{Sting => String}] target name by seed name
#
# @!visibility private
#
attr_reader :targets
# @return [Hash{Sting => Seeds::Seed}] source file paths by seed name
#
# @!visibility private
#
attr_reader :source_files
# @return [Array<Xcodeproj::Project::Object::PBXFileReference>]
# file references that will be added to project
#
# @!visibility private
#
attr_reader :file_references
# @return [Boolean] whether append seed name as a prefix to swift files
#
# @!visibility private
#
attr_accessor :swift_seedname_prefix
# @param [String] root_path
# The path provided will be used for detecting Xcode project and
# Seedfile.
#
# @see #root_path
#
def initialize(root_path)
@root_path = root_path
@seedfile_path = File.join(root_path, "Seedfile")
@lockfile_path = File.join(root_path, "Seeds", "Seedfile.lock")
@seeds = {}
@locks = {}
@targets = {}
@source_files = {}
@file_references = []
end
# Read Seedfile and install dependencies. An exception will be raised if
# there is no .xcodeproj file or Seedfile in the {#root_path}.
#
# @see #root_path
#
def install
self.prepare_requirements
self.analyze_dependencies
self.execute_seedfile
self.remove_seeds
self.install_seeds
self.configure_project
self.configure_phase
self.project.save
self.build_lockfile
@seeds = {}
@locks = {}
@targets = {}
@source_files = {}
@file_references = []
@swift_seedname_prefix = false
end
# Read Xcode project, Seedfile and lockfile. An exception will be raised if
# there is no .xcodeproj file or Seedfile in the {#root_path}.
#
# @see #root_path
#
# @!visibility private
#
def prepare_requirements
# .xcodeproj
project_filename = Dir.glob("#{root_path}/*.xcodeproj")[0]
if project_filename
self.project = Xcodeproj::Project.open(project_filename)
end
# Seedfile
begin
self.seedfile = File.read(self.seedfile_path)
rescue Errno::ENOENT
raise Seeds::Exception.new "Couldn't find Seedfile."
end
# Seedfile.lock - optional
begin
self.lockfile = File.read(self.lockfile_path)
rescue Errno::ENOENT
end
end
# Parses Seedfile.lockfile into {#lockfile}.
#
# @see #lockfile
#
# @!visibility private
#
def analyze_dependencies
say "Anaylizing dependencies"
# Seedfile.lock
if self.lockfile
locks = YAML.load(self.lockfile)
locks["SEEDS"].each do |lock|
seed = Seeds::Seed.new
seed.name = lock.split(' (')[0]
seed.version = lock.split('(')[1].split(')')[0]
if seed.version.start_with? '$'
seed.commit = seed.version[1..-1]
seed.version = nil
end
self.locks[seed.name] = seed
end
end
end
# Executes {#seedfile} using `eval`
#
# @!visibility private
#
def execute_seedfile
@current_target_name = nil
# Sets `@swift_seedname_prefix` as `true`.
#
# @!scope method
# @!visibility private
#
def swift_seedname_prefix!()
@swift_seedname_prefix = true
end
# Set current Xcode project with given path.
#
# @!scope method
# @!visibility private
#
def xcodeproj(path)
proejct_filename = File.join(self.root_path, path)
self.project = Xcodeproj::Project.open(proejct_filename)
self.validate_project
end
# Sets `@current_target_name` and executes code block.
#
# @param [String] names The name of target.
#
# @!scope method
# @!visibility private
#
def target(*names, &code)
self.validate_project
names.each do |name|
name = name.to_s # use string instead of symbol
target = self.project.target_named(name)
if not target
raise Seeds::Exception.new\
"#{self.project.path.basename} doesn't have a target `#{name}`"
end
@current_target_name = name
code.call()
end
@current_target_name = nil
end
# Creates a new instance of {#Seeds::Seed::GitHub} and adds to {#seeds}.
#
# @see #Seeds::Seed::GitHub
#
# @!scope method
# @!visibility private
#
def github(repo, tag, options={})
self.validate_project
if not @current_target_name # apply to all targets
target *self.project.targets.map(&:name) do
send(__callee__, repo, tag, options)
end
elsif repo.split('/').count != 2
raise Seeds::Exception.new\
"#{repo}: GitHub should have both username and repo name.\n"\
" (e.g. `devxoul/JLToast`)"
else
seed = Seeds::Seed::GitHub.new
seed.url = "https://github.com/#{repo}"
seed.name = repo.split('/')[1]
if tag.is_a?(String)
if options[:commit]
raise Seeds::Exception.new\
"#{repo}: Version and commit are both specified."
end
seed.version = tag
seed.files = options[:files] || '**/*.{h,m,mm,swift}'
seed.exclude_files = options[:exclude_files] || []
elsif tag.is_a?(Hash)
seed.commit = tag[:commit][0..6]
seed.files = tag[:files] || '**/*.{h,m,mm,swift}'
seed.exclude_files = options[:exclude_files] || []
end
if seed.files.kind_of?(String)
seed.files = [seed.files]
end
if seed.exclude_files.kind_of?(String)
seed.exclude_files = [seed.exclude_files]
end
self.seeds[seed.name] = seed
self.targets[seed.name] ||= []
self.targets[seed.name] << @current_target_name.to_s
end
end
# Creates a new instance of {#Seeds::Seed::BitBucket} and adds to
# {#seeds}.
#
# @see #Seeds::Seed::BitBucket
#
# @!scope method
# @!visibility private
#
def bitbucket(repo, tag, options={})
self.validate_project
if not @current_target_name # apply to all targets
target *self.project.targets.map(&:name) do
send(__callee__, repo, tag, options)
end
elsif repo.split('/').count != 2
raise Seeds::Exception.new\
"#{repo}: BitBucket should have both username and repo name.\n"\
" (e.g. `devxoul/JLToast`)"
else
seed = Seeds::Seed::BitBucket.new
seed.url = "https://bitbucket.org/#{repo}"
seed.name = repo.split('/')[1]
if tag.is_a?(String)
if options[:commit]
raise Seeds::Exception.new\
"#{repo}: Version and commit are both specified."
end
seed.version = tag
seed.files = options[:files] || '**/*.{h,m,mm,swift}'
elsif tag.is_a?(Hash)
seed.commit = tag[:commit][0..6]
seed.files = tag[:files] || '**/*.{h,m,mm,swift}'
seed.exclude_files = options[:exclude_files] || []
end
if seed.files.kind_of?(String)
seed.files = [seed.files]
seed.exclude_files = options[:exclude_files] || []
end
if seed.exclude_files.kind_of?(String)
seed.exclude_files = [seed.exclude_files]
end
self.seeds[seed.name] = seed
self.targets[seed.name] ||= []
self.targets[seed.name] << @current_target_name.to_s
end
end
eval seedfile
end
# Removes disused seeds.
#
# @!visibility private
#
def remove_seeds
removings = self.locks.keys - self.seeds.keys
removings.each do |name|
say "Removing #{name} (#{self.locks[name].version})".red
dirname = File.join(self.root_path, "Seeds", name)
FileUtils.rm_rf(dirname)
end
end
# Installs new seeds or updates existing seeds.
#
# @!visibility private
#
def install_seeds
self.seeds.sort.each do |name, seed|
dirname = File.join(self.root_path, "Seeds", seed.name)
self.install_seed(seed, Shellwords.escape(dirname))
next if not seed.files
# add seed files to `source_files`
self.source_files[name] = []
seed.files.each do |file|
paths = Dir.glob(File.join(dirname, file))
# exclude files
seed.exclude_files.each do |exclude_file|
exclude_paths = Dir.glob(File.join(dirname, exclude_file))
exclude_paths.each do |exclude_path|
paths.delete(exclude_path)
end
end
paths.each do |path|
path = self.path_with_prefix(seed.name, path)
self.source_files[name].push(path)
end
end
end
end
# Installs new seed or updates existing seed in {#dirname}.
#
# @!visibility private
#
def install_seed(seed, dirname)
# if remote url has changed, remove directory and clone again
remote_url = `
cd #{dirname} 2>&1 &&
git remote show origin -n | grep Fetch | awk '{ print $3 }' 2>&1
`.strip
if remote_url != seed.url
FileUtils.rm_rf(dirname)
end
# clone and return if not exists
if not File.exist?(dirname)
say "Installing #{seed.name} (#{seed.version or seed.commit})".green
command = "git clone #{seed.url}"
command += " -b #{seed.version}" if seed.version
command += " #{dirname} 2>&1"
output = `#{command}`
not_found = output.include?("not found")
if not_found and output.include?("repository")
raise Seeds::Exception.new\
"#{seed.name}: Couldn't find the repository."
elsif not_found and output.include?("upstream")
raise Seeds::Exception.new\
"#{seed.name}: Couldn't find the tag `#{seed.version}`."
end
if seed.commit and not seed.version # checkout to commit
output = `cd #{dirname} 2>&1 && git checkout #{seed.commit} 2>&1`
if output.include?("did not match any")
raise Seeds::Exception.new\
"#{seed.name}: Couldn't find the commit `#{seed.commit}`."
end
end
return
end
# discard local changes
`cd #{dirname} 2>&1 &&\
git reset HEAD --hard 2>&1 &&\
git checkout . 2>&1 &&\
git clean -fd 2>&1`
if lock = self.locks[seed.name]
lock_version = lock.version
lock_commit = lock.commit
end
if seed.version == lock_version and seed.commit == lock_commit
say "Using #{seed.name} (#{lock_version or lock_commit})"
return
end
if seed.version
say "Installing #{seed.name} #{seed.version}"\
" (was #{lock_version or lock_commit})".green
output = `cd #{dirname} 2>&1 &&\
git fetch origin #{seed.version} --tags 2>&1 &&\
git checkout #{seed.version} 2>&1`
if output.include?("Couldn't find")
raise Seeds::Exception.new\
"#{seed.name}: Couldn't find the tag or branch `#{seed.version}`."
end
elsif seed.commit
say "Installing #{seed.name} #{seed.commit}"\
" (was #{lock_version or lock_commit})".green
output = `cd #{dirname} 2>&1 &&
git checkout master 2>&1 &&
git pull 2>&1 &&
git checkout #{seed.commit} 2>&1`
if output.include?("did not match any")
raise Seeds::Exception.new\
"#{seed.name}: Couldn't find the commit `#{seed.commit}`.".red
end
end
end
# Append seed name as a prefix to file name and returns the path.
#
# @!visibility private
#
def path_with_prefix(seedname, path)
if @swift_seedname_prefix
components = path.split("/")
prefix = seedname + "_" # Alamofire_
filename = components[-1] # Alamofire.swift
extension = File.extname(filename) # .swift
# only swift files can have prefix in filename
if extension == '.swift' and not filename.start_with? prefix
filename = prefix + filename # Alamofire_Alamofire.swift
newpath = components[0...-1].join('/') + '/' + filename
File.rename(path, newpath) # rename real files
path = newpath
end
end
path
end
# Adds source files to the group 'Seeds' and save its reference to
# {#file_references} and removes disused sources files,
#
# @see #file_references
#
# @!visibility private
#
def configure_project
say "Configuring #{self.project.path.basename}"
group = self.project["Seeds"]
if group
group.clear
else
uuid = Xcodeproj::uuid_with_name "Seeds"
group = self.project.new_group_with_uuid("Seeds", uuid)
end
# remove existing group that doesn't have any file references
group.groups.each do |seedgroup|
valid_files = seedgroup.children.select do |child|
File.exist?(child.real_path)
end
if valid_files.length == 0
seedgroup.remove_from_project
end
end
self.source_files.each do |seedname, filepaths|
uuid = Xcodeproj::uuid_with_name "Seeds/#{seedname}"
seedgroup = group[seedname] ||
group.new_group_with_uuid(seedname, uuid)
filepaths.each do |path|
filename = path.split('/')[-1]
relpath = path[self.root_path.length..-1]
uuid = Xcodeproj::uuid_with_name relpath
file_reference = seedgroup[filename] ||
seedgroup.new_reference_with_uuid(path, uuid)
self.file_references << file_reference
end
unusing_files = seedgroup.files - self.file_references
unusing_files.each { |file| file.remove_from_project }
end
end
# Adds file references to the 'Sources Build Phase'.
#
# @!visibility private
#
def configure_phase
self.project.targets.each do |target|
begin
phase = target.sources_build_phase
next unless phase
rescue NoMethodError
next
end
# remove zombie build files
phase.files_references.each do |file|
begin
file.real_path
rescue
phase.files.each do |build_file|
phase.files.delete(build_file) if build_file.file_ref == file
end
end
end
removings = [] # name of seeds going to be removed from the target
addings = [] # name of seeds going to be added to the target
self.targets.keys.sort.each do |seed_name|
target_names = self.targets[seed_name]
if not target_names.include?(target.name)
removings << seed_name if not removings.include?(seed_name)
else
addings << seed_name if not addings.include?(seed_name)
end
end
self.file_references.each do |file|
removings.each do |seed_names|
next if not seed_names.include?(file.parent.name)
phase.files.each do |build_file|
phase.files.delete(build_file) if build_file.file_ref == file
end
end
addings.each do |seed_names|
next if file.name.end_with? ".h"
next if not seed_names.include?(file.parent.name)
uuid = Xcodeproj::uuid_with_name "#{target.name}:#{file.name}"
phase.add_file_reference_with_uuid(file, uuid, true)
end
end
end
end
# Writes Seedfile.lock file.
#
# @!visibility private
#
def build_lockfile
tree = { "SEEDS" => [] }
self.seeds.each do |name, seed|
tree["SEEDS"] << "#{name} (#{seed.version or '$' + seed.commit})"
end
File.write(self.lockfile_path, YAML.dump(tree))
end
def validate_project
if self.project.nil?
raise Seeds::Exception.new "Couldn't find .xcodeproj file."
end
end
# Prints a message if {#mute} is `false`.
#
# @see #mute
#
def say(*strings)
puts strings.join(" ") if not @mute
end
end
end
Add some exception handling code
module Seeds
class Core
# @return [String] project folder path
#
attr_reader :root_path
# @return [Boolean] whether display outputs
#
attr_accessor :mute
# @return [String] Seedfile path
#
# @!visibility private
#
attr_reader :seedfile_path
# @return [String] Seedfile.lock path
#
# @!visibility private
#
attr_reader :lockfile_path
# @return [Xcodeproj::Project] Xcode project
#
# @!visibility private
#
attr_accessor :project
# @return [String] content of Seedfile
#
# @!visibility private
#
attr_accessor :seedfile
# @return [String] content of Seedfile.lock
#
# @!visibility private
#
attr_accessor :lockfile
# @return [Hash{Sting => Seeds::Seed}] seeds by seed name
#
# @!visibility private
#
attr_reader :seeds
# @return [Hash{Sting => Seeds::Seed}] locked dependencies by seed name
#
# @!visibility private
#
attr_reader :locks
# @return [Hash{Sting => String}] target name by seed name
#
# @!visibility private
#
attr_reader :targets
# @return [Hash{Sting => Seeds::Seed}] source file paths by seed name
#
# @!visibility private
#
attr_reader :source_files
# @return [Array<Xcodeproj::Project::Object::PBXFileReference>]
# file references that will be added to project
#
# @!visibility private
#
attr_reader :file_references
# @return [Boolean] whether append seed name as a prefix to swift files
#
# @!visibility private
#
attr_accessor :swift_seedname_prefix
# @param [String] root_path
# The path provided will be used for detecting Xcode project and
# Seedfile.
#
# @see #root_path
#
def initialize(root_path)
@root_path = root_path
@seedfile_path = File.join(root_path, "Seedfile")
@lockfile_path = File.join(root_path, "Seeds", "Seedfile.lock")
@seeds = {}
@locks = {}
@targets = {}
@source_files = {}
@file_references = []
end
# Read Seedfile and install dependencies. An exception will be raised if
# there is no .xcodeproj file or Seedfile in the {#root_path}.
#
# @see #root_path
#
def install
self.prepare_requirements
self.analyze_dependencies
self.execute_seedfile
self.remove_seeds
self.install_seeds
self.configure_project
self.configure_phase
self.project.save
self.build_lockfile
@seeds = {}
@locks = {}
@targets = {}
@source_files = {}
@file_references = []
@swift_seedname_prefix = false
end
# Read Xcode project, Seedfile and lockfile. An exception will be raised if
# there is no .xcodeproj file or Seedfile in the {#root_path}.
#
# @see #root_path
#
# @!visibility private
#
def prepare_requirements
# .xcodeproj
project_filename = Dir.glob("#{root_path}/*.xcodeproj")[0]
if project_filename
self.project = Xcodeproj::Project.open(project_filename)
end
# Seedfile
begin
self.seedfile = File.read(self.seedfile_path)
rescue Errno::ENOENT
raise Seeds::Exception.new "Couldn't find Seedfile."
end
# Seedfile.lock - optional
begin
self.lockfile = File.read(self.lockfile_path)
rescue Errno::ENOENT
end
end
# Parses Seedfile.lockfile into {#lockfile}.
#
# @see #lockfile
#
# @!visibility private
#
def analyze_dependencies
say "Anaylizing dependencies"
# Seedfile.lock
if self.lockfile
locks = YAML.load(self.lockfile)
locks["SEEDS"].each do |lock|
seed = Seeds::Seed.new
seed.name = lock.split(' (')[0]
seed.version = lock.split('(')[1].split(')')[0]
if seed.version.start_with? '$'
seed.commit = seed.version[1..-1]
seed.version = nil
end
self.locks[seed.name] = seed
end
end
end
# Executes {#seedfile} using `eval`
#
# @!visibility private
#
def execute_seedfile
@current_target_name = nil
# Sets `@swift_seedname_prefix` as `true`.
#
# @!scope method
# @!visibility private
#
def swift_seedname_prefix!()
@swift_seedname_prefix = true
end
# Set current Xcode project with given path.
#
# @!scope method
# @!visibility private
#
def xcodeproj(path)
proejct_filename = File.join(self.root_path, path)
self.project = Xcodeproj::Project.open(proejct_filename)
self.validate_project
end
# Sets `@current_target_name` and executes code block.
#
# @param [String] names The name of target.
#
# @!scope method
# @!visibility private
#
def target(*names, &code)
self.validate_project
names.each do |name|
name = name.to_s # use string instead of symbol
target = self.project.target_named(name)
if not target
raise Seeds::Exception.new\
"#{self.project.path.basename} doesn't have a target `#{name}`"
end
@current_target_name = name
code.call()
end
@current_target_name = nil
end
# Creates a new instance of {#Seeds::Seed::GitHub} and adds to {#seeds}.
#
# @see #Seeds::Seed::GitHub
#
# @!scope method
# @!visibility private
#
def github(repo, tag, options={})
self.validate_project
if not @current_target_name # apply to all targets
target *self.project.targets.map(&:name) do
send(__callee__, repo, tag, options)
end
elsif repo.split('/').count != 2
raise Seeds::Exception.new\
"#{repo}: GitHub should have both username and repo name.\n"\
" (e.g. `devxoul/JLToast`)"
else
seed = Seeds::Seed::GitHub.new
seed.url = "https://github.com/#{repo}"
seed.name = repo.split('/')[1]
if tag.is_a?(String)
if options[:commit]
raise Seeds::Exception.new\
"#{repo}: Version and commit are both specified."
end
seed.version = tag
seed.files = options[:files] || '**/*.{h,m,mm,swift}'
seed.exclude_files = options[:exclude_files] || []
elsif tag.is_a?(Hash)
seed.commit = tag[:commit][0..6]
seed.files = tag[:files] || '**/*.{h,m,mm,swift}'
seed.exclude_files = options[:exclude_files] || []
end
if seed.files.kind_of?(String)
seed.files = [seed.files]
end
if seed.exclude_files.kind_of?(String)
seed.exclude_files = [seed.exclude_files]
end
self.seeds[seed.name] = seed
self.targets[seed.name] ||= []
self.targets[seed.name] << @current_target_name.to_s
end
end
# Creates a new instance of {#Seeds::Seed::BitBucket} and adds to
# {#seeds}.
#
# @see #Seeds::Seed::BitBucket
#
# @!scope method
# @!visibility private
#
def bitbucket(repo, tag, options={})
self.validate_project
if not @current_target_name # apply to all targets
target *self.project.targets.map(&:name) do
send(__callee__, repo, tag, options)
end
elsif repo.split('/').count != 2
raise Seeds::Exception.new\
"#{repo}: BitBucket should have both username and repo name.\n"\
" (e.g. `devxoul/JLToast`)"
else
seed = Seeds::Seed::BitBucket.new
seed.url = "https://bitbucket.org/#{repo}"
seed.name = repo.split('/')[1]
if tag.is_a?(String)
if options[:commit]
raise Seeds::Exception.new\
"#{repo}: Version and commit are both specified."
end
seed.version = tag
seed.files = options[:files] || '**/*.{h,m,mm,swift}'
elsif tag.is_a?(Hash)
seed.commit = tag[:commit][0..6]
seed.files = tag[:files] || '**/*.{h,m,mm,swift}'
seed.exclude_files = options[:exclude_files] || []
end
if seed.files.kind_of?(String)
seed.files = [seed.files]
seed.exclude_files = options[:exclude_files] || []
end
if seed.exclude_files.kind_of?(String)
seed.exclude_files = [seed.exclude_files]
end
self.seeds[seed.name] = seed
self.targets[seed.name] ||= []
self.targets[seed.name] << @current_target_name.to_s
end
end
eval seedfile
end
# Removes disused seeds.
#
# @!visibility private
#
def remove_seeds
removings = self.locks.keys - self.seeds.keys
removings.each do |name|
say "Removing #{name} (#{self.locks[name].version})".red
dirname = File.join(self.root_path, "Seeds", name)
FileUtils.rm_rf(dirname)
end
end
# Installs new seeds or updates existing seeds.
#
# @!visibility private
#
def install_seeds
self.seeds.sort.each do |name, seed|
dirname = File.join(self.root_path, "Seeds", seed.name)
self.install_seed(seed, Shellwords.escape(dirname))
next if not seed.files
# add seed files to `source_files`
self.source_files[name] = []
seed.files.each do |file|
paths = Dir.glob(File.join(dirname, file))
# exclude files
seed.exclude_files.each do |exclude_file|
exclude_paths = Dir.glob(File.join(dirname, exclude_file))
exclude_paths.each do |exclude_path|
paths.delete(exclude_path)
end
end
paths.each do |path|
path = self.path_with_prefix(seed.name, path)
self.source_files[name].push(path)
end
end
end
end
# Installs new seed or updates existing seed in {#dirname}.
#
# @!visibility private
#
def install_seed(seed, dirname)
# if remote url has changed, remove directory and clone again
remote_url = `
cd #{dirname} 2>&1 &&
git remote show origin -n | grep Fetch | awk '{ print $3 }' 2>&1
`.strip
if remote_url != seed.url
FileUtils.rm_rf(dirname)
end
# clone and return if not exists
if not File.exist?(dirname)
say "Installing #{seed.name} (#{seed.version or seed.commit})".green
command = "git clone #{seed.url}"
command += " -b #{seed.version}" if seed.version
command += " #{dirname} 2>&1"
output = `#{command}`
unable_to_access = output.include?("unable to access")
if unable_to_access and output.include?("Failed to connect to")
raise Seeds::Exception.new\
"#{seed.name}: Failed to connect to #{seed.url}. \n#{output}"
end
not_found = output.include?("not found")
if not_found and output.include?("repository")
raise Seeds::Exception.new\
"#{seed.name}: Couldn't find the repository."
elsif not_found and output.include?("upstream")
raise Seeds::Exception.new\
"#{seed.name}: Couldn't find the tag `#{seed.version}`."
end
if seed.commit and not seed.version # checkout to commit
output = `cd #{dirname} 2>&1 && git checkout #{seed.commit} 2>&1`
if output.include?("did not match any")
raise Seeds::Exception.new\
"#{seed.name}: Couldn't find the commit `#{seed.commit}`."
end
end
return
end
# discard local changes
`cd #{dirname} 2>&1 &&\
git reset HEAD --hard 2>&1 &&\
git checkout . 2>&1 &&\
git clean -fd 2>&1`
if lock = self.locks[seed.name]
lock_version = lock.version
lock_commit = lock.commit
end
if seed.version == lock_version and seed.commit == lock_commit
say "Using #{seed.name} (#{lock_version or lock_commit})"
return
end
if seed.version
say "Installing #{seed.name} #{seed.version}"\
" (was #{lock_version or lock_commit})".green
output = `cd #{dirname} 2>&1 &&\
git fetch origin #{seed.version} --tags 2>&1 &&\
git checkout #{seed.version} 2>&1`
if output.include?("Couldn't find")
raise Seeds::Exception.new\
"#{seed.name}: Couldn't find the tag or branch `#{seed.version}`."
end
elsif seed.commit
say "Installing #{seed.name} #{seed.commit}"\
" (was #{lock_version or lock_commit})".green
output = `cd #{dirname} 2>&1 &&
git checkout master 2>&1 &&
git pull 2>&1 &&
git checkout #{seed.commit} 2>&1`
if output.include?("did not match any")
raise Seeds::Exception.new\
"#{seed.name}: Couldn't find the commit `#{seed.commit}`.".red
end
end
end
# Append seed name as a prefix to file name and returns the path.
#
# @!visibility private
#
def path_with_prefix(seedname, path)
if @swift_seedname_prefix
components = path.split("/")
prefix = seedname + "_" # Alamofire_
filename = components[-1] # Alamofire.swift
extension = File.extname(filename) # .swift
# only swift files can have prefix in filename
if extension == '.swift' and not filename.start_with? prefix
filename = prefix + filename # Alamofire_Alamofire.swift
newpath = components[0...-1].join('/') + '/' + filename
File.rename(path, newpath) # rename real files
path = newpath
end
end
path
end
# Adds source files to the group 'Seeds' and save its reference to
# {#file_references} and removes disused sources files,
#
# @see #file_references
#
# @!visibility private
#
def configure_project
say "Configuring #{self.project.path.basename}"
group = self.project["Seeds"]
if group
group.clear
else
uuid = Xcodeproj::uuid_with_name "Seeds"
group = self.project.new_group_with_uuid("Seeds", uuid)
end
# remove existing group that doesn't have any file references
group.groups.each do |seedgroup|
valid_files = seedgroup.children.select do |child|
File.exist?(child.real_path)
end
if valid_files.length == 0
seedgroup.remove_from_project
end
end
self.source_files.each do |seedname, filepaths|
uuid = Xcodeproj::uuid_with_name "Seeds/#{seedname}"
seedgroup = group[seedname] ||
group.new_group_with_uuid(seedname, uuid)
filepaths.each do |path|
filename = path.split('/')[-1]
relpath = path[self.root_path.length..-1]
uuid = Xcodeproj::uuid_with_name relpath
file_reference = seedgroup[filename] ||
seedgroup.new_reference_with_uuid(path, uuid)
self.file_references << file_reference
end
unusing_files = seedgroup.files - self.file_references
unusing_files.each { |file| file.remove_from_project }
end
end
# Adds file references to the 'Sources Build Phase'.
#
# @!visibility private
#
def configure_phase
self.project.targets.each do |target|
begin
phase = target.sources_build_phase
next unless phase
rescue NoMethodError
next
end
# remove zombie build files
phase.files_references.each do |file|
begin
file.real_path
rescue
phase.files.each do |build_file|
phase.files.delete(build_file) if build_file.file_ref == file
end
end
end
removings = [] # name of seeds going to be removed from the target
addings = [] # name of seeds going to be added to the target
self.targets.keys.sort.each do |seed_name|
target_names = self.targets[seed_name]
if not target_names.include?(target.name)
removings << seed_name if not removings.include?(seed_name)
else
addings << seed_name if not addings.include?(seed_name)
end
end
self.file_references.each do |file|
removings.each do |seed_names|
next if not seed_names.include?(file.parent.name)
phase.files.each do |build_file|
phase.files.delete(build_file) if build_file.file_ref == file
end
end
addings.each do |seed_names|
next if file.name.end_with? ".h"
next if not seed_names.include?(file.parent.name)
uuid = Xcodeproj::uuid_with_name "#{target.name}:#{file.name}"
phase.add_file_reference_with_uuid(file, uuid, true)
end
end
end
end
# Writes Seedfile.lock file.
#
# @!visibility private
#
def build_lockfile
tree = { "SEEDS" => [] }
self.seeds.each do |name, seed|
tree["SEEDS"] << "#{name} (#{seed.version or '$' + seed.commit})"
end
File.write(self.lockfile_path, YAML.dump(tree))
end
def validate_project
if self.project.nil?
raise Seeds::Exception.new "Couldn't find .xcodeproj file."
end
end
# Prints a message if {#mute} is `false`.
#
# @see #mute
#
def say(*strings)
puts strings.join(" ") if not @mute
end
end
end
|
module Seeds
class Core
attr_reader :root_path, :seedfile_path, :lockfile_path
attr_accessor :project, :seedfile, :lockfile
attr_reader :seeds, :locks
attr_reader :source_files, :file_references
def initialize(root_path)
@root_path = root_path
@seedfile_path = File.join(root_path, "Seedfile")
@lockfile_path = File.join(root_path, "Seeds", "Seedfile.lock")
@seeds = {}
@locks = {}
@source_files = {}
@file_references = []
end
def install
self.prepare_requirements
self.analyze_dependencies
self.remove_seeds
self.install_seeds
self.configure_project
self.configure_phase
self.project.save
self.build_lockfile
end
def prepare_requirements
# .xcodeproj
project_filename = Dir.glob("#{root_path}/*.xcodeproj")[0]
if not project_filename
puts "Couldn't find .xcodeproj file.".red
exit 1
end
self.project = Xcodeproj::Project.open(project_filename)
# Seedfile
begin
self.seedfile = File.read(self.seedfile_path)
rescue Errno::ENOENT
puts "Couldn't find Seedfile.".red
exit 1
end
# Seedfile.lock - optional
begin
self.lockfile = File.read(self.lockfile_path)
rescue Errno::ENOENT
end
end
def analyze_dependencies
puts "Anaylizing dependencies"
# Seedfile.lock
if self.lockfile
locks = YAML.load(self.lockfile)
locks["SEEDS"].each do |lock|
seed = Seeds::Seed.new
seed.name = lock.split(' (')[0]
seed.version = lock.split('(')[1].split(')')[0]
self.locks[seed.name] = seed
end
end
# Seedfile
eval self.seedfile
end
def github(repo, tag, options={})
seed = Seeds::Seed::GitHub.new
seed.url = "https://github.com/#{repo}"
seed.name = repo.split('/')[1]
seed.version = tag
seed.files = options[:files] || '**/*.{h,m,mm,swift}'
if seed.files.kind_of?(String)
seed.files = [seed.files]
end
self.seeds[seed.name] = seed
end
def remove_seeds
removings = self.locks.keys - self.seeds.keys
removings.each do |name|
puts "Removing #{name} (#{self.locks[name].version})".red
dirname = File.join(self.root_path, "Seeds", name)
FileUtils.rm_rf(dirname)
end
end
def install_seeds
self.seeds.each do |name, seed|
dirname = File.join(self.root_path, "Seeds", name)
if File.exist?(dirname)
tag = `cd #{dirname} && git describe --tags --abbrev=0 2>&1`
tag.strip!
if tag == seed.version
puts "Using #{name} (#{seed.version})"
else
puts "Installing #{name} #{seed.version} (was #{tag})".green
`cd #{dirname} 2>&1 &&\
git reset HEAD --hard 2>&1 &&\
git checkout . 2>&1 &&\
git clean -fd 2>&1 &&\
git fetch origin #{seed.version} 2>&1 &&\
git checkout #{seed.version} 2>&1`
end
else
puts "Installing #{name} (#{seed.version})".green
output = `git clone #{seed.url} -b #{seed.version} #{dirname} 2>&1`
if output.include?("not found")
if output.include?("repository")
puts "[!] #{name}: Couldn't find the repository.".red
elsif output.include?("upstream")
puts "[!] #{name}: Couldn't find the tag `#{seed.version}`.".red
end
end
end
if seed.files
seed.files.each do |file|
self.source_files[name] = Dir.glob(File.join(dirname, file))
end
end
end
end
def configure_project
puts "Configuring #{self.project.path.basename}"
group = self.project['Seeds'] || self.project.new_group('Seeds')
# remove existing group that doesn't have any file references
group.groups.each do |seedgroup|
valid_files = seedgroup.children.select do |child|
File.exist?(child.real_path)
end
if valid_files.length == 0
seedgroup.remove_from_project
end
end
self.source_files.each do |seedname, filepaths|
seedgroup = group[seedname] || group.new_group(seedname)
filepaths.each do |path|
filename = path.split('/')[-1]
file_reference = seedgroup[filename] || seedgroup.new_file(path)
self.file_references << file_reference
end
unusing_files = seedgroup.files - self.file_references
unusing_files.each { |file| file.remove_from_project }
end
end
def configure_phase
targets = self.project.targets.select do |t|
not t.name.end_with?('Tests')
end
targets.each do |target|
# detect 'Compile Sources' build phase
phases = target.build_phases.select do |phase|
phase.kind_of?(Xcodeproj::Project::Object::PBXSourcesBuildPhase)
end
phase = phases[0]
if not phase
puts "[!] Target `#{target}` doesn't have build phase "\
"'Compile Sources'.".red
exit 1
end
# remove zombie file references
phase.files_references.each do |file_reference|
begin
file_reference.real_path
rescue
phase.remove_file_reference(file_reference)
end
end
# add file references to sources build phase
self.file_references.each do |file|
if not phase.include?(file)
phase.add_file_reference(file)
end
end
end
end
def build_lockfile
tree = { "SEEDS" => [] }
self.seeds.each do |name, seed|
tree["SEEDS"] << "#{name} (#{seed.version})"
end
File.write(self.lockfile_path, YAML.dump(tree))
end
end
end
Reset variables after installing.
module Seeds
class Core
attr_reader :root_path, :seedfile_path, :lockfile_path
attr_accessor :project, :seedfile, :lockfile
attr_reader :seeds, :locks
attr_reader :source_files, :file_references
def initialize(root_path)
@root_path = root_path
@seedfile_path = File.join(root_path, "Seedfile")
@lockfile_path = File.join(root_path, "Seeds", "Seedfile.lock")
@seeds = {}
@locks = {}
@source_files = {}
@file_references = []
end
def install
self.prepare_requirements
self.analyze_dependencies
self.remove_seeds
self.install_seeds
self.configure_project
self.configure_phase
self.project.save
self.build_lockfile
@seeds = {}
@locks = {}
@source_files = {}
@file_references = []
end
def prepare_requirements
# .xcodeproj
project_filename = Dir.glob("#{root_path}/*.xcodeproj")[0]
if not project_filename
puts "Couldn't find .xcodeproj file.".red
exit 1
end
self.project = Xcodeproj::Project.open(project_filename)
# Seedfile
begin
self.seedfile = File.read(self.seedfile_path)
rescue Errno::ENOENT
puts "Couldn't find Seedfile.".red
exit 1
end
# Seedfile.lock - optional
begin
self.lockfile = File.read(self.lockfile_path)
rescue Errno::ENOENT
end
end
def analyze_dependencies
puts "Anaylizing dependencies"
# Seedfile.lock
if self.lockfile
locks = YAML.load(self.lockfile)
locks["SEEDS"].each do |lock|
seed = Seeds::Seed.new
seed.name = lock.split(' (')[0]
seed.version = lock.split('(')[1].split(')')[0]
self.locks[seed.name] = seed
end
end
# Seedfile
eval self.seedfile
end
def github(repo, tag, options={})
seed = Seeds::Seed::GitHub.new
seed.url = "https://github.com/#{repo}"
seed.name = repo.split('/')[1]
seed.version = tag
seed.files = options[:files] || '**/*.{h,m,mm,swift}'
if seed.files.kind_of?(String)
seed.files = [seed.files]
end
self.seeds[seed.name] = seed
end
def remove_seeds
removings = self.locks.keys - self.seeds.keys
removings.each do |name|
puts "Removing #{name} (#{self.locks[name].version})".red
dirname = File.join(self.root_path, "Seeds", name)
FileUtils.rm_rf(dirname)
end
end
def install_seeds
self.seeds.each do |name, seed|
dirname = File.join(self.root_path, "Seeds", name)
if File.exist?(dirname)
tag = `cd #{dirname} && git describe --tags --abbrev=0 2>&1`
tag.strip!
if tag == seed.version
puts "Using #{name} (#{seed.version})"
else
puts "Installing #{name} #{seed.version} (was #{tag})".green
`cd #{dirname} 2>&1 &&\
git reset HEAD --hard 2>&1 &&\
git checkout . 2>&1 &&\
git clean -fd 2>&1 &&\
git fetch origin #{seed.version} 2>&1 &&\
git checkout #{seed.version} 2>&1`
end
else
puts "Installing #{name} (#{seed.version})".green
output = `git clone #{seed.url} -b #{seed.version} #{dirname} 2>&1`
if output.include?("not found")
if output.include?("repository")
puts "[!] #{name}: Couldn't find the repository.".red
elsif output.include?("upstream")
puts "[!] #{name}: Couldn't find the tag `#{seed.version}`.".red
end
end
end
if seed.files
seed.files.each do |file|
self.source_files[name] = Dir.glob(File.join(dirname, file))
end
end
end
end
def configure_project
puts "Configuring #{self.project.path.basename}"
group = self.project['Seeds'] || self.project.new_group('Seeds')
# remove existing group that doesn't have any file references
group.groups.each do |seedgroup|
valid_files = seedgroup.children.select do |child|
File.exist?(child.real_path)
end
if valid_files.length == 0
seedgroup.remove_from_project
end
end
self.source_files.each do |seedname, filepaths|
seedgroup = group[seedname] || group.new_group(seedname)
filepaths.each do |path|
filename = path.split('/')[-1]
file_reference = seedgroup[filename] || seedgroup.new_file(path)
self.file_references << file_reference
end
unusing_files = seedgroup.files - self.file_references
unusing_files.each { |file| file.remove_from_project }
end
end
def configure_phase
targets = self.project.targets.select do |t|
not t.name.end_with?('Tests')
end
targets.each do |target|
# detect 'Compile Sources' build phase
phases = target.build_phases.select do |phase|
phase.kind_of?(Xcodeproj::Project::Object::PBXSourcesBuildPhase)
end
phase = phases[0]
if not phase
puts "[!] Target `#{target}` doesn't have build phase "\
"'Compile Sources'.".red
exit 1
end
# remove zombie file references
phase.files_references.each do |file_reference|
begin
file_reference.real_path
rescue
phase.remove_file_reference(file_reference)
end
end
# add file references to sources build phase
self.file_references.each do |file|
if not phase.include?(file)
phase.add_file_reference(file)
end
end
end
end
def build_lockfile
tree = { "SEEDS" => [] }
self.seeds.each do |name, seed|
tree["SEEDS"] << "#{name} (#{seed.version})"
end
File.write(self.lockfile_path, YAML.dump(tree))
end
end
end
|
# -*- encoding: utf-8 -*-
$LOAD_PATH.push File.expand_path('../lib', __FILE__)
require 'pronto/reek/version'
require 'English'
Gem::Specification.new do |s|
s.name = 'pronto-reek'
s.version = Pronto::ReekVersion::VERSION
s.platform = Gem::Platform::RUBY
s.author = 'Mindaugas Mozūras'
s.email = 'mindaugas.mozuras@gmail.com'
s.homepage = 'http://github.org/mmozuras/pronto-reek'
s.summary = 'Pronto runner for Reek, code smell detector for Ruby'
s.licenses = ['MIT']
s.required_ruby_version = '>= 2.0.0'
s.rubygems_version = '1.8.23'
s.files = `git ls-files`.split($RS).reject do |file|
file =~ %r{^(?:
spec/.*
|Gemfile
|Rakefile
|\.rspec
|\.gitignore
|\.rubocop.yml
|\.travis.yml
)$}x
end
s.test_files = []
s.extra_rdoc_files = ['LICENSE', 'README.md']
s.require_paths = ['lib']
s.add_dependency('pronto', '~> 0.8.0')
s.add_dependency('reek', '~> 4.2')
s.add_development_dependency('rake', '~> 11.0')
s.add_development_dependency('rspec', '~> 3.4')
s.add_development_dependency('rspec-its', '~> 1.2')
end
Fixes homepage in the gemspec
# -*- encoding: utf-8 -*-
$LOAD_PATH.push File.expand_path('../lib', __FILE__)
require 'pronto/reek/version'
require 'English'
Gem::Specification.new do |s|
s.name = 'pronto-reek'
s.version = Pronto::ReekVersion::VERSION
s.platform = Gem::Platform::RUBY
s.author = 'Mindaugas Mozūras'
s.email = 'mindaugas.mozuras@gmail.com'
s.homepage = 'http://github.com/mmozuras/pronto-reek'
s.summary = 'Pronto runner for Reek, code smell detector for Ruby'
s.licenses = ['MIT']
s.required_ruby_version = '>= 2.0.0'
s.rubygems_version = '1.8.23'
s.files = `git ls-files`.split($RS).reject do |file|
file =~ %r{^(?:
spec/.*
|Gemfile
|Rakefile
|\.rspec
|\.gitignore
|\.rubocop.yml
|\.travis.yml
)$}x
end
s.test_files = []
s.extra_rdoc_files = ['LICENSE', 'README.md']
s.require_paths = ['lib']
s.add_dependency('pronto', '~> 0.8.0')
s.add_dependency('reek', '~> 4.2')
s.add_development_dependency('rake', '~> 11.0')
s.add_development_dependency('rspec', '~> 3.4')
s.add_development_dependency('rspec-its', '~> 1.2')
end
|
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{bitarray}
s.version = "0.5.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["James E. Ingram"]
s.date = %q{2009-06-01}
s.description = %q{A bit array class for Ruby, implemented as a C extension. Includes methods for setting and clearing individual bits, and all bits at once. Also has the standard array access methods, [] and []=, and it mixes in Enumerable.}
s.email = %q{ingramj@gmail.com}
s.extensions = ["ext/extconf.rb"]
s.extra_rdoc_files = [
"LICENSE",
"README"
]
s.files = [
".gitignore",
"LICENSE",
"README",
"Rakefile",
"TODO",
"VERSION",
"bitarray.gemspec",
"examples/bloomfilter.rb",
"examples/boolnet.rb",
"ext/bitarray.c",
"ext/extconf.rb",
"test/bitfield.rb",
"test/bm.rb",
"test/test.rb"
]
s.has_rdoc = true
s.homepage = %q{http://github.com/ingramj/bitarray}
s.rdoc_options = ["--charset=UTF-8", "--exclude=ext/Makefile", "--exclude=ext/extconf.rb", "--title", "BitArray Documentation"]
s.require_paths = ["ext"]
s.required_ruby_version = Gem::Requirement.new(">= 1.8.6")
s.rubygems_version = %q{1.3.1}
s.summary = %q{A bitarray class for Ruby, implemented as a C extension.}
s.test_files = [
"test/bitfield.rb",
"test/test.rb",
"test/bm.rb",
"examples/bloomfilter.rb",
"examples/boolnet.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 2
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
else
end
else
end
end
Regenerated gemspec for version 0.5.1
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{bitarray}
s.version = "0.5.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["James E. Ingram"]
s.date = %q{2009-06-02}
s.description = %q{A bit array class for Ruby, implemented as a C extension. Includes methods for setting and clearing individual bits, and all bits at once. Also has the standard array access methods, [] and []=, and it mixes in Enumerable.}
s.email = %q{ingramj@gmail.com}
s.extensions = ["ext/extconf.rb"]
s.extra_rdoc_files = [
"LICENSE",
"README"
]
s.files = [
".gitignore",
"LICENSE",
"README",
"Rakefile",
"TODO",
"VERSION",
"bitarray.gemspec",
"examples/bloomfilter.rb",
"examples/boolnet.rb",
"ext/bitarray.c",
"ext/extconf.rb",
"test/bm.rb",
"test/test.rb"
]
s.has_rdoc = true
s.homepage = %q{http://github.com/ingramj/bitarray}
s.rdoc_options = ["--charset=UTF-8", "--exclude=ext/Makefile", "--exclude=ext/extconf.rb", "--title", "BitArray Documentation"]
s.require_paths = ["ext"]
s.required_ruby_version = Gem::Requirement.new(">= 1.8.6")
s.rubygems_version = %q{1.3.1}
s.summary = %q{A bitarray class for Ruby, implemented as a C extension.}
s.test_files = [
"test/test.rb",
"test/bm.rb",
"examples/bloomfilter.rb",
"examples/boolnet.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 2
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
else
end
else
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "bitstamp"
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Jeffrey Wilcke"]
s.date = "2013-05-16"
s.description = "TODO: longer description of your gem"
s.email = "stygeo@gmail.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".rspec",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"bitstamp.gemspec",
"lib/bitstamp.rb",
"lib/bitstamp/collection.rb",
"lib/bitstamp/model.rb",
"lib/bitstamp/net.rb",
"lib/bitstamp/orders.rb",
"spec/bitstamp_spec.rb",
"spec/collection_spec.rb",
"spec/orders_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = "http://github.com/stygeo/bitstamp"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.25"
s.summary = "TODO: one-line summary of your gem"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activemodel>, [">= 3.1"])
s.add_runtime_dependency(%q<activesupport>, [">= 3.1"])
s.add_runtime_dependency(%q<curb>, ["> 0.8.1"])
s.add_development_dependency(%q<rspec>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.3.5"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.4"])
else
s.add_dependency(%q<activemodel>, [">= 3.1"])
s.add_dependency(%q<activesupport>, [">= 3.1"])
s.add_dependency(%q<curb>, ["> 0.8.1"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.3.5"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
end
else
s.add_dependency(%q<activemodel>, [">= 3.1"])
s.add_dependency(%q<activesupport>, [">= 3.1"])
s.add_dependency(%q<curb>, ["> 0.8.1"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.3.5"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
end
end
Regenerate gemspec for version 0.1.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "bitstamp"
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Jeffrey Wilcke"]
s.date = "2013-05-16"
s.description = "Ruby API for use with bitstamp."
s.email = "stygeo@gmail.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".rspec",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"bitstamp.gemspec",
"lib/bitstamp.rb",
"lib/bitstamp/collection.rb",
"lib/bitstamp/model.rb",
"lib/bitstamp/net.rb",
"lib/bitstamp/orders.rb",
"spec/bitstamp_spec.rb",
"spec/collection_spec.rb",
"spec/orders_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = "http://github.com/kojnapp/bitstamp"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.25"
s.summary = "Bitstamp Ruby API"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activemodel>, [">= 3.1"])
s.add_runtime_dependency(%q<activesupport>, [">= 3.1"])
s.add_runtime_dependency(%q<curb>, ["> 0.8.1"])
s.add_development_dependency(%q<rspec>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.3.5"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.4"])
else
s.add_dependency(%q<activemodel>, [">= 3.1"])
s.add_dependency(%q<activesupport>, [">= 3.1"])
s.add_dependency(%q<curb>, ["> 0.8.1"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.3.5"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
end
else
s.add_dependency(%q<activemodel>, [">= 3.1"])
s.add_dependency(%q<activesupport>, [">= 3.1"])
s.add_dependency(%q<curb>, ["> 0.8.1"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.3.5"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
end
end
|
use_inline_resources
def whyrun_supported?
true
end
action :add do
filename = @current_resource.filename
keyname = @current_resource.keyname
caps = @new_resource.caps.map { |k, v| "#{k} '#{v}'" }.join(' ')
unless @current_resource.caps_match
converge_by("Set caps for #{@new_resource}") do
auth_set_key(keyname, caps)
end
end
file filename do
content file_content
owner 'root'
group 'root'
mode '640'
end
end
def load_current_resource
@current_resource = Chef::Resource::CephClient.new(@new_resource.name)
@current_resource.name(@new_resource.name)
@current_resource.as_keyring(@new_resource.as_keyring)
@current_resource.keyname(@new_resource.keyname || "client.#{current_resource.name}.#{node['hostname']}")
@current_resource.caps(get_caps(@current_resource.keyname))
default_filename = "/etc/ceph/ceph.client.#{@new_resource.name}.#{node['hostname']}.#{@new_resource.as_keyring ? "keyring" : "secret"}"
@current_resource.filename(@new_resource.filename || default_filename)
@current_resource.key(get_new_key(@current_resource.keyname))
@current_resource.caps_match = true if @current_resource.caps == @new_resource.caps
end
def file_content
@current_resource.as_keyring ? "[#{@current_resource.keyname}]\n\tkey = #{@current_resource.key}\n" : @current_resource.key
end
def get_new_key(keyname)
cmd = "ceph auth print_key #{keyname}"
Mixlib::ShellOut.new(cmd).run_command.stdout
end
def get_caps(keyname)
caps = {}
cmd = "ceph auth get #{keyname}"
output = Mixlib::ShellOut.new(cmd).run_command.stdout
output.scan(/caps\s*(\S+)\s*=\s*"([^"]*)"/) { |k, v| caps[k] = v }
caps
end
def auth_set_key(keyname, caps)
# find the monitor secret
mon_secret = ''
mons = get_mon_nodes
if !mons.empty?
mon_secret = mons[0]['ceph']['monitor-secret']
elsif mons.empty? && node['ceph']['monitor-secret']
mon_secret = node['ceph']['monitor-secret']
else
Chef::Log.warn('No monitor secret found')
end
# try to add the key
cmd = "ceph auth get-or-create #{keyname} #{caps} --name mon. --key='#{mon_secret}'"
get_or_create = Mixlib::ShellOut.new(cmd)
get_or_create.run_command
if get_or_create.stderr.scan(/EINVAL.*but cap.*does not match/)
Chef::Log.info('Deleting old key with incorrect caps')
# delete an old key if it exists and is wrong
Mixlib::ShellOut.new("ceph auth del #{keyname}").run_command
# try to create again
get_or_create.run_command
end
get_or_create.error!
end
Fixes loading the current client key
use_inline_resources
def whyrun_supported?
true
end
action :add do
filename = @current_resource.filename
keyname = @current_resource.keyname
caps = @new_resource.caps.map { |k, v| "#{k} '#{v}'" }.join(' ')
unless @current_resource.caps_match
converge_by("Set caps for #{@new_resource}") do
auth_set_key(keyname, caps)
end
end
file filename do
content file_content
owner 'root'
group 'root'
mode '640'
end
end
def load_current_resource
@current_resource = Chef::Resource::CephClient.new(@new_resource.name)
@current_resource.name(@new_resource.name)
@current_resource.as_keyring(@new_resource.as_keyring)
@current_resource.keyname(@new_resource.keyname || "client.#{current_resource.name}.#{node['hostname']}")
@current_resource.caps(get_caps(@current_resource.keyname))
default_filename = "/etc/ceph/ceph.client.#{@new_resource.name}.#{node['hostname']}.#{@new_resource.as_keyring ? "keyring" : "secret"}"
@current_resource.filename(@new_resource.filename || default_filename)
@current_resource.key = get_key(@current_resource.keyname)
@current_resource.caps_match = true if @current_resource.caps == @new_resource.caps
end
def file_content
@current_resource.as_keyring ? "[#{@current_resource.keyname}]\n\tkey = #{@current_resource.key}\n" : @current_resource.key
end
def get_key(keyname)
cmd = "ceph auth print_key #{keyname}"
Mixlib::ShellOut.new(cmd).run_command.stdout
end
def get_caps(keyname)
caps = {}
cmd = "ceph auth get #{keyname}"
output = Mixlib::ShellOut.new(cmd).run_command.stdout
output.scan(/caps\s*(\S+)\s*=\s*"([^"]*)"/) { |k, v| caps[k] = v }
caps
end
def auth_set_key(keyname, caps)
# find the monitor secret
mon_secret = ''
mons = get_mon_nodes
if !mons.empty?
mon_secret = mons[0]['ceph']['monitor-secret']
elsif mons.empty? && node['ceph']['monitor-secret']
mon_secret = node['ceph']['monitor-secret']
else
Chef::Log.warn('No monitor secret found')
end
# try to add the key
cmd = "ceph auth get-or-create #{keyname} #{caps} --name mon. --key='#{mon_secret}'"
get_or_create = Mixlib::ShellOut.new(cmd)
get_or_create.run_command
if get_or_create.stderr.scan(/EINVAL.*but cap.*does not match/)
Chef::Log.info('Deleting old key with incorrect caps')
# delete an old key if it exists and is wrong
Mixlib::ShellOut.new("ceph auth del #{keyname}").run_command
# try to create again
get_or_create.run_command
end
get_or_create.error!
end
|
action :install do
if new_resource.create_user
group new_resource.group do
action :create
end
user new_resource.owner do
action :create
gid new_resource.group
end
end
# Update the code.
git new_resource.path do
action :sync
repository new_resource.repository
checkout_branch new_resource.revision
destination new_resource.path
user new_resource.owner
group new_resource.group
end
# If a config file template has been specified, create it.
template new_resource.config_template do
only_if { !new_resource.config_template.nil }
action :create
source new_resource.config_template
path new_resource.config_destination
variables new_resource.config_vars
owner new_resource.owner
group new_resource.group
end
# Install the application requirements.
# If a requirements file has been specified, use pip.
# otherwise use the setup.py
if new_resource.requirements_file
execute 'pip install' do
action :run
cwd new_resource.path
command "pip install -r #{new_resource.requirements_file}"
end
else
execute 'python setup.py install' do
action :run
cwd new_resource.path
end
end
new_resource.updated_by_last_action(true)
end
Set up directory for checkout with proper perms
action :install do
if new_resource.create_user
group new_resource.group do
action :create
end
user new_resource.owner do
action :create
gid new_resource.group
end
end
directory new_resource.path do
action :create
owner new_resource.owner
group new_resource.group
end
# Update the code.
git new_resource.path do
action :sync
repository new_resource.repository
checkout_branch new_resource.revision
destination new_resource.path
user new_resource.owner
group new_resource.group
end
# If a config file template has been specified, create it.
template new_resource.config_template do
only_if { !new_resource.config_template.nil }
action :create
source new_resource.config_template
path new_resource.config_destination
variables new_resource.config_vars
owner new_resource.owner
group new_resource.group
end
# Install the application requirements.
# If a requirements file has been specified, use pip.
# otherwise use the setup.py
if new_resource.requirements_file
execute 'pip install' do
action :run
cwd new_resource.path
command "pip install -r #{new_resource.requirements_file}"
end
else
execute 'python setup.py install' do
action :run
cwd new_resource.path
end
end
new_resource.updated_by_last_action(true)
end
|
#
# Author:: Derek Groh (<dgroh@arch.tamu.edu>)
# Cookbook Name:: windows_print
# Provider:: driver
#
# Copyright 2013, Texas A&M
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
require 'mixlib/shellout'
action :install do
if driver_exists?
Chef::Log.info("#{new_resource.driver_name} already installed - nothing to do.")
new_resource.updated_by_last_action(false)
else
execute "Sanitize Network Drives" do
command "net use * /d /y"
end
execute "Map Network Drive" do
command "net use z: \"#{new_resource.inf_path}\" /user:\"#{new_resource.domain_username}\" \"#{new_resource.domain_password}\""
end
execute "Create Local Cache" do
command "xcopy \"#{new_resource.inf_path}\" \"C:\\chef\\cache\\#{new_resource.driver_name}\" /Y /S /I"
end
execute "Unmap Network Drive" do
command "net use z: /d"
end
execute "Creating print driver: #{new_resource.driver_name}" do
command "rundll32 printui.dll PrintUIEntry /ia /m \"#{new_resource.driver_name}\" /h \"#{ new_resource.environment}\" /v \"#{new_resource.version}\" /f \"C:\\chef\\cache\\#{new_resource.driver_name}\\#{new_resource.inf_file}\""
end
Chef::Log.info("#{ new_resource.driver_name } installed.")
new_resource.updated_by_last_action(true)
execute "Cleanup" do
command "rmdir \"C:\\chef\\cache\\#{new_resource.driver_name}\" /S /Q"
end
end
end
action :delete do
if exists?
execute "Deleting print driver: #{new_resource.driver_name}" do
command "rundll32 printui.dll PrintUIEntry /dd /m \"#{new_resource.driver_name}\" /h \"#{new_resource.environment}\" /v \"#{new_resource.version}\""
end
new_resource.updated_by_last_action(true)
else
Chef::Log.info("#{ new_resource.driver_name } doesn't exist - can't delete.")
new_resource.updated_by_last_action(false)
end
end
def driver_exists?
case new_resource.environment
when "x64"
check = Mixlib::ShellOut.new("powershell.exe \"Get-wmiobject -Class Win32_PrinterDriver -EnableAllPrivileges | where {$_.name -like '#{new_resource.driver_name},3,Windows x64'} | fl name\"").run_command
Chef::Log.info("\"#{new_resource.driver_name}\" x64 driver found.")
when "x86"
check = Mixlib::ShellOut.new("powershell.exe \"Get-wmiobject -Class Win32_PrinterDriver -EnableAllPrivileges | where {$_.name -like '#{new_resource.driver_name},3,Windows NT x86'} | fl name\"").run_command
Chef::Log.info("\"#{new_resource.driver_name}\" x86 driver found.")
when "Itanium"
check = Mixlib::ShellOut.new("powershell.exe \"Get-wmiobject -Class Win32_PrinterDriver -EnableAllPrivileges | where {$_.name -like '#{new_resource.driver_name},3,Itanium'} | fl name\"").run_command
Chef::Log.info("\"#{new_resource.driver_name}\" xItanium driver found.")
else
Chef::Log.info("Please use \"x64\", \"x86\" or \"Itanium\" as the environment type")
end
check.stdout.include? new_resource.driver_name
end
# Attempt to prevent typos in new_resource.name
def driver_name
case new_resource.environment
when "x64"
File.readlines(new_resource.inf_path).grep(/NTamd64/)
#Grab Next line String Between " and " and make that new_resource.name
when "x86"
File.readlines(new_resource.inf_path).grep(/NTx86/)
#Grab Next line String Between " and " and make that new_resource.name
when "Itanium"
File.readlines(new_resource.inf_path).grep(/NTx86/)
#Grab Next line String Between " and " and make that new_resource.name
else
Chef::Log.info("Please use \"x64\", \"x86\" or \"Itanium\" as the environment type")
end
end
added return code 1, execute can fail to delete the folder successfully
#
# Author:: Derek Groh (<dgroh@arch.tamu.edu>)
# Cookbook Name:: windows_print
# Provider:: driver
#
# Copyright 2013, Texas A&M
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
require 'mixlib/shellout'
action :install do
if driver_exists?
Chef::Log.info("#{new_resource.driver_name} already installed - nothing to do.")
new_resource.updated_by_last_action(false)
else
execute "Sanitize Network Drives" do
command "net use * /d /y"
end
execute "Map Network Drive" do
command "net use z: \"#{new_resource.inf_path}\" /user:\"#{new_resource.domain_username}\" \"#{new_resource.domain_password}\""
end
execute "Create Local Cache" do
command "xcopy \"#{new_resource.inf_path}\" \"C:\\chef\\cache\\#{new_resource.driver_name}\" /Y /S /I"
end
execute "Unmap Network Drive" do
command "net use z: /d"
end
execute "Creating print driver: #{new_resource.driver_name}" do
command "rundll32 printui.dll PrintUIEntry /ia /m \"#{new_resource.driver_name}\" /h \"#{ new_resource.environment}\" /v \"#{new_resource.version}\" /f \"C:\\chef\\cache\\#{new_resource.driver_name}\\#{new_resource.inf_file}\""
end
Chef::Log.info("#{ new_resource.driver_name } installed.")
new_resource.updated_by_last_action(true)
execute "Cleanup" do
command "rmdir \"C:\\chef\\cache\\#{new_resource.driver_name}\" /S /Q"
returns [0,1]
end
end
end
action :delete do
if exists?
execute "Deleting print driver: #{new_resource.driver_name}" do
command "rundll32 printui.dll PrintUIEntry /dd /m \"#{new_resource.driver_name}\" /h \"#{new_resource.environment}\" /v \"#{new_resource.version}\""
end
new_resource.updated_by_last_action(true)
else
Chef::Log.info("#{ new_resource.driver_name } doesn't exist - can't delete.")
new_resource.updated_by_last_action(false)
end
end
def driver_exists?
case new_resource.environment
when "x64"
check = Mixlib::ShellOut.new("powershell.exe \"Get-wmiobject -Class Win32_PrinterDriver -EnableAllPrivileges | where {$_.name -like '#{new_resource.driver_name},3,Windows x64'} | fl name\"").run_command
Chef::Log.info("\"#{new_resource.driver_name}\" x64 driver found.")
when "x86"
check = Mixlib::ShellOut.new("powershell.exe \"Get-wmiobject -Class Win32_PrinterDriver -EnableAllPrivileges | where {$_.name -like '#{new_resource.driver_name},3,Windows NT x86'} | fl name\"").run_command
Chef::Log.info("\"#{new_resource.driver_name}\" x86 driver found.")
when "Itanium"
check = Mixlib::ShellOut.new("powershell.exe \"Get-wmiobject -Class Win32_PrinterDriver -EnableAllPrivileges | where {$_.name -like '#{new_resource.driver_name},3,Itanium'} | fl name\"").run_command
Chef::Log.info("\"#{new_resource.driver_name}\" xItanium driver found.")
else
Chef::Log.info("Please use \"x64\", \"x86\" or \"Itanium\" as the environment type")
end
check.stdout.include? new_resource.driver_name
end
# Attempt to prevent typos in new_resource.name
def driver_name
case new_resource.environment
when "x64"
File.readlines(new_resource.inf_path).grep(/NTamd64/)
#Grab Next line String Between " and " and make that new_resource.name
when "x86"
File.readlines(new_resource.inf_path).grep(/NTx86/)
#Grab Next line String Between " and " and make that new_resource.name
when "Itanium"
File.readlines(new_resource.inf_path).grep(/NTx86/)
#Grab Next line String Between " and " and make that new_resource.name
else
Chef::Log.info("Please use \"x64\", \"x86\" or \"Itanium\" as the environment type")
end
end |
#
# Author:: Conrad Kramer <conrad@kramerapps.com>
# Cookbook Name:: application_node
# Resource:: node
#
# Copyright:: 2013, Kramer Software Productions, LLC. <conrad@kramerapps.com>
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include Chef::DSL::IncludeRecipe
action :before_compile do
include_recipe 'nodejs::install_from_source'
if new_resource.npm
include_recipe 'nodejs::npm'
end
unless new_resource.restart_command
new_resource.restart_command do
service "#{new_resource.application.name}_nodejs" do
provider Chef::Provider::Service::Upstart
supports :restart => true, :start => true, :stop => true
action [:enable, :restart]
end
end
end
new_resource.environment.update({
'NODE_ENV' => new_resource.environment_name
})
end
action :before_deploy do
new_resource.environment['NODE_ENV'] = new_resource.environment_name
end
action :before_migrate do
if new_resource.npm
execute 'npm install' do
cwd new_resource.release_path
user new_resource.owner
group new_resource.group
environment new_resource.environment.merge({ 'HOME' => new_resource.shared_path })
end
end
end
action :before_symlink do
end
action :before_restart do
template "#{new_resource.application.name}.upstart.conf" do
path "/etc/init/#{new_resource.application.name}_nodejs.conf"
source new_resource.template ? new_resource.template : 'nodejs.upstart.conf.erb'
cookbook new_resource.template ? new_resource.cookbook_name.to_s : 'application_nodejs'
owner 'root'
group 'root'
mode '0644'
variables(
:user => new_resource.owner,
:group => new_resource.group,
:node_dir => node['nodejs']['dir'],
:app_dir => new_resource.release_path,
:entry => new_resource.entry_point,
:environment => new_resource.environment
)
end
end
action :after_restart do
end
Trying fix referenced here: https://github.com/poise/application_ruby/pull/42/files
#
# Author:: Conrad Kramer <conrad@kramerapps.com>
# Cookbook Name:: application_node
# Resource:: node
#
# Copyright:: 2013, Kramer Software Productions, LLC. <conrad@kramerapps.com>
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include Chef::DSL::IncludeRecipe
action :before_compile do
include_recipe 'nodejs::install_from_source'
if new_resource.npm
include_recipe 'nodejs::npm'
end
r = new_resource
unless r.restart_command
r.restart_command do
service "#{r.application.name}_nodejs" do
provider Chef::Provider::Service::Upstart
supports :restart => true, :start => true, :stop => true
action [:enable, :restart]
end
end
end
new_resource.environment.update({
'NODE_ENV' => new_resource.environment_name
})
end
action :before_deploy do
new_resource.environment['NODE_ENV'] = new_resource.environment_name
end
action :before_migrate do
if new_resource.npm
execute 'npm install' do
cwd new_resource.release_path
user new_resource.owner
group new_resource.group
environment new_resource.environment.merge({ 'HOME' => new_resource.shared_path })
end
end
end
action :before_symlink do
end
action :before_restart do
template "#{new_resource.application.name}.upstart.conf" do
path "/etc/init/#{new_resource.application.name}_nodejs.conf"
source new_resource.template ? new_resource.template : 'nodejs.upstart.conf.erb'
cookbook new_resource.template ? new_resource.cookbook_name.to_s : 'application_nodejs'
owner 'root'
group 'root'
mode '0644'
variables(
:user => new_resource.owner,
:group => new_resource.group,
:node_dir => node['nodejs']['dir'],
:app_dir => new_resource.release_path,
:entry => new_resource.entry_point,
:environment => new_resource.environment
)
end
end
action :after_restart do
end
|
#
# Author:: Conrad Kramer <conrad@kramerapps.com>
# Cookbook Name:: application_node
# Resource:: node
#
# Copyright:: 2013, Kramer Software Productions, LLC. <conrad@kramerapps.com>
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include Chef::Mixin::LanguageIncludeRecipe
action :before_compile do
include_recipe 'nodejs::install_from_source'
if new_resource.npm
include_recipe 'nodejs::npm'
end
unless new_resource.restart_command
new_resource.restart_command do
service "#{new_resource.application.name}_nodejs" do
provider Chef::Provider::Service::Upstart
supports :restart => true, :start => true, :stop => true
action [:enable, :restart]
end
end
end
new_resource.environment.update({
'NODE_ENV' => new_resource.environment_name
})
end
action :before_deploy do
new_resource.environment['NODE_ENV'] = new_resource.environment_name
end
action :before_migrate do
if new_resource.npm
execute 'npm install' do
cwd new_resource.release_path
user new_resource.owner
group new_resource.group
environment new_resource.environment.merge({ 'HOME' => new_resource.shared_path })
end
end
end
action :before_symlink do
end
action :before_restart do
template "#{new_resource.application.name}.upstart.conf" do
path "/etc/init/#{new_resource.application.name}_nodejs.conf"
source new_resource.template ? new_resource.template : 'nodejs.upstart.conf.erb'
cookbook new_resource.template ? new_resource.cookbook_name : 'application_nodejs'
owner 'root'
group 'root'
mode '0644'
variables(
:user => new_resource.owner,
:group => new_resource.group,
:node_dir => node['nodejs']['dir'],
:app_dir => new_resource.release_path,
:entry => new_resource.entry_point,
:environment => new_resource.environment
)
end
end
action :after_restart do
end
Fix deprecation warning regarding Chef::Mixin::LanguageIncludeRecipe
#
# Author:: Conrad Kramer <conrad@kramerapps.com>
# Cookbook Name:: application_node
# Resource:: node
#
# Copyright:: 2013, Kramer Software Productions, LLC. <conrad@kramerapps.com>
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include Chef::DSL::IncludeRecipe
action :before_compile do
include_recipe 'nodejs::install_from_source'
if new_resource.npm
include_recipe 'nodejs::npm'
end
unless new_resource.restart_command
new_resource.restart_command do
service "#{new_resource.application.name}_nodejs" do
provider Chef::Provider::Service::Upstart
supports :restart => true, :start => true, :stop => true
action [:enable, :restart]
end
end
end
new_resource.environment.update({
'NODE_ENV' => new_resource.environment_name
})
end
action :before_deploy do
new_resource.environment['NODE_ENV'] = new_resource.environment_name
end
action :before_migrate do
if new_resource.npm
execute 'npm install' do
cwd new_resource.release_path
user new_resource.owner
group new_resource.group
environment new_resource.environment.merge({ 'HOME' => new_resource.shared_path })
end
end
end
action :before_symlink do
end
action :before_restart do
template "#{new_resource.application.name}.upstart.conf" do
path "/etc/init/#{new_resource.application.name}_nodejs.conf"
source new_resource.template ? new_resource.template : 'nodejs.upstart.conf.erb'
cookbook new_resource.template ? new_resource.cookbook_name : 'application_nodejs'
owner 'root'
group 'root'
mode '0644'
variables(
:user => new_resource.owner,
:group => new_resource.group,
:node_dir => node['nodejs']['dir'],
:app_dir => new_resource.release_path,
:entry => new_resource.entry_point,
:environment => new_resource.environment
)
end
end
action :after_restart do
end
|
# frozen_string_literal: true
#
# Cookbook Name:: rabbitmq
# Provider:: plugin
#
# Copyright 2012-2013, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include Opscode::RabbitMQ
def plugin_enabled?(name)
ENV['PATH'] = "#{ENV['PATH']}:/usr/lib/rabbitmq/bin"
cmdstr = "rabbitmq-plugins list -e '#{name}\\b'"
cmd = Mixlib::ShellOut.new(cmdstr, :env => shell_environment)
cmd.run_command
Chef::Log.debug "rabbitmq_plugin_enabled?: #{cmdstr}"
Chef::Log.debug "rabbitmq_plugin_enabled?: #{cmd.stdout}"
cmd.error!
cmd.stdout =~ /\b#{name}\b/
end
use_inline_resources if defined?(:use_inline_resources) # ~FC113
action :enable do
unless plugin_enabled?(new_resource.plugin)
execute "rabbitmq-plugins enable #{new_resource.plugin}" do
umask 0022
Chef::Log.info "Enabling RabbitMQ plugin '#{new_resource.plugin}'."
environment shell_environment.merge(
'PATH' => "#{ENV['PATH']}:/usr/lib/rabbitmq/bin"
)
new_resource.updated_by_last_action(true)
end
end
end
action :disable do
if plugin_enabled?(new_resource.plugin)
execute "rabbitmq-plugins disable #{new_resource.plugin}" do
umask 0022
Chef::Log.info "Disabling RabbitMQ plugin '#{new_resource.plugin}'."
environment shell_environment.merge(
'PATH' => "#{ENV['PATH']}:/usr/lib/rabbitmq/bin"
)
new_resource.updated_by_last_action(true)
end
end
end
Suppress additional output CLI tools now produce
Should resolve plugin status detection.
References rabbitmq/rabbitmq-cli#264, rabbitmq/rabbitmq-cli#273.
# frozen_string_literal: true
#
# Cookbook Name:: rabbitmq
# Provider:: plugin
#
# Copyright 2012-2013, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include Opscode::RabbitMQ
def plugin_enabled?(name)
ENV['PATH'] = "#{ENV['PATH']}:/usr/lib/rabbitmq/bin"
cmdstr = "rabbitmq-plugins list -q -e '#{name}\\b'"
cmd = Mixlib::ShellOut.new(cmdstr, :env => shell_environment)
cmd.run_command
Chef::Log.debug "rabbitmq_plugin_enabled?: #{cmdstr}"
Chef::Log.debug "rabbitmq_plugin_enabled?: #{cmd.stdout}"
cmd.error!
cmd.stdout =~ /\b#{name}\b/
end
use_inline_resources if defined?(:use_inline_resources) # ~FC113
action :enable do
unless plugin_enabled?(new_resource.plugin)
execute "rabbitmq-plugins enable #{new_resource.plugin}" do
umask 0022
Chef::Log.info "Enabling RabbitMQ plugin '#{new_resource.plugin}'."
environment shell_environment.merge(
'PATH' => "#{ENV['PATH']}:/usr/lib/rabbitmq/bin"
)
new_resource.updated_by_last_action(true)
end
end
end
action :disable do
if plugin_enabled?(new_resource.plugin)
execute "rabbitmq-plugins disable #{new_resource.plugin}" do
umask 0022
Chef::Log.info "Disabling RabbitMQ plugin '#{new_resource.plugin}'."
environment shell_environment.merge(
'PATH' => "#{ENV['PATH']}:/usr/lib/rabbitmq/bin"
)
new_resource.updated_by_last_action(true)
end
end
end
|
require "spec_helper"
require "bumper/workers/update_checker"
RSpec.describe Workers::UpdateChecker do
let(:worker) { described_class.new }
let(:sqs_message) { double("sqs_message") }
let(:body) do
{
"repo" => {
"name" => "gocardless/bump",
"language" => "ruby",
},
"dependency" => {
"name" => "business",
"version" => "1.4.0",
},
"dependency_files" => [
{ "name" => "Gemfile", "content" => "xyz" },
{ "name" => "Gemfile.lock", "content" => "xyz" },
]
}
end
context "when an update is required" do
before do
allow_any_instance_of(UpdateCheckers::RubyUpdateChecker).
to receive(:needs_update?).and_return(true)
allow_any_instance_of(UpdateCheckers::RubyUpdateChecker).
to receive(:latest_version).and_return("1.5.0")
end
it "writes a message into the queue" do
expect(worker).to receive(:update_dependency)
worker.perform(sqs_message, body)
end
end
context "when no update is required" do
before do
allow_any_instance_of(UpdateCheckers::RubyUpdateChecker).
to receive(:needs_update?).and_return(false)
end
it "doesn't write a message into the queue" do
expect(worker).to_not receive(:send_update_notification)
worker.perform(sqs_message, body)
end
end
end
Better spec for Workers::UpdateChecker
require "spec_helper"
require "bumper/workers/update_checker"
RSpec.describe Workers::UpdateChecker do
let(:worker) { described_class.new }
let(:sqs_message) { double("sqs_message") }
let(:body) do
{
"repo" => {
"name" => "gocardless/bump",
"language" => "ruby",
},
"dependency" => {
"name" => "business",
"version" => "1.4.0",
},
"dependency_files" => [
{ "name" => "Gemfile", "content" => fixture("Gemfile") },
{ "name" => "Gemfile.lock", "content" => fixture("Gemfile.lock") },
]
}
end
context "when an update is required" do
before do
allow_any_instance_of(UpdateCheckers::RubyUpdateChecker).
to receive(:needs_update?).and_return(true)
allow_any_instance_of(UpdateCheckers::RubyUpdateChecker).
to receive(:latest_version).and_return("1.5.0")
end
it "enqueues a DependencyFileUpdater with the correct arguments" do
expect(Workers::DependencyFileUpdater).
to receive(:perform_async).
with(
"repo" => body["repo"],
"dependency_files" => body["dependency_files"],
"updated_dependency" => {
"name" => "business",
"version" => "1.5.0",
})
worker.perform(sqs_message, body)
end
end
context "when no update is required" do
before do
allow_any_instance_of(UpdateCheckers::RubyUpdateChecker).
to receive(:needs_update?).and_return(false)
end
it "doesn't write a message into the queue" do
expect(Workers::DependencyFileUpdater).to_not receive(:perform_async)
worker.perform(sqs_message, body)
end
end
end
|
require 'spec_helper'
describe AgentsController do
let(:valid_attributes) { { "name" => "MyString" } }
let(:valid_session) { {} }
describe "GET index" do
it "assigns all agents as @agents" do
agent = Agent.create! valid_attributes
get :index, {}, valid_session
assigns(:agents).should eq([agent])
end
end
describe "GET show" do
it "assigns the requested agent as @agent" do
agent = Agent.create! valid_attributes
get :show, {:id => agent.to_param}, valid_session
assigns(:agent).should eq(agent)
end
end
describe "GET new" do
it "assigns a new agent as @agent" do
get :new, {}, valid_session
assigns(:agent).should be_a_new(Agent)
end
end
describe "GET edit" do
it "assigns the requested agent as @agent" do
agent = Agent.create! valid_attributes
get :edit, {:id => agent.to_param}, valid_session
assigns(:agent).should eq(agent)
end
end
describe "POST create" do
describe "with valid params" do
it "creates a new Agent" do
expect {
post :create, {:agent => valid_attributes}, valid_session
}.to change(Agent, :count).by(1)
end
it "assigns a newly created agent as @agent" do
post :create, {:agent => valid_attributes}, valid_session
assigns(:agent).should be_a(Agent)
assigns(:agent).should be_persisted
end
it "redirects to the created agent" do
post :create, {:agent => valid_attributes}, valid_session
response.should redirect_to(Agent.last)
end
end
describe "with invalid params" do
it "assigns a newly created but unsaved agent as @agent" do
# Trigger the behavior that occurs when invalid params are submitted
Agent.any_instance.stub(:save).and_return(false)
post :create, {:agent => { "name" => "invalid value" }}, valid_session
assigns(:agent).should be_a_new(Agent)
end
it "re-renders the 'new' template" do
# Trigger the behavior that occurs when invalid params are submitted
Agent.any_instance.stub(:save).and_return(false)
post :create, {:agent => { "name" => "invalid value" }}, valid_session
response.should render_template("new")
end
end
end
describe "PUT update" do
describe "with valid params" do
it "updates the requested agent" do
agent = Agent.create! valid_attributes
# Assuming there are no other agents in the database, this
# specifies that the Agent created on the previous line
# receives the :update_attributes message with whatever params are
# submitted in the request.
Agent.any_instance.should_receive(:update).with({ "name" => "MyString" })
put :update, {:id => agent.to_param, :agent => { "name" => "MyString" }}, valid_session
end
it "assigns the requested agent as @agent" do
agent = Agent.create! valid_attributes
put :update, {:id => agent.to_param, :agent => valid_attributes}, valid_session
assigns(:agent).should eq(agent)
end
it "redirects to the agent" do
agent = Agent.create! valid_attributes
put :update, {:id => agent.to_param, :agent => valid_attributes}, valid_session
response.should redirect_to(agent)
end
end
describe "with invalid params" do
it "assigns the agent as @agent" do
agent = Agent.create! valid_attributes
# Trigger the behavior that occurs when invalid params are submitted
Agent.any_instance.stub(:save).and_return(false)
put :update, {:id => agent.to_param, :agent => { "name" => "invalid value" }}, valid_session
assigns(:agent).should eq(agent)
end
it "re-renders the 'edit' template" do
agent = Agent.create! valid_attributes
# Trigger the behavior that occurs when invalid params are submitted
Agent.any_instance.stub(:save).and_return(false)
put :update, {:id => agent.to_param, :agent => { "name" => "invalid value" }}, valid_session
response.should render_template("edit")
end
end
end
describe "DELETE destroy" do
it "destroys the requested agent" do
agent = Agent.create! valid_attributes
expect {
delete :destroy, {:id => agent.to_param}, valid_session
}.to change(Agent, :count).by(-1)
end
it "redirects to the agents list" do
agent = Agent.create! valid_attributes
delete :destroy, {:id => agent.to_param}, valid_session
response.should redirect_to(agents_url)
end
end
end
fix: работающий тест контроллера Agents
RSpec.describe AgentsController, :type => :controller do
let(:valid_attributes) { { "name" => "MyString" } }
let(:valid_session) { {} }
describe "GET index" do
it "assigns all agents as @agents" do
agent = Agent.create! valid_attributes
get :index, {}, valid_session
#assigns(:agents).to eq([agent])
expect(response).to be_success
expect(response).to have_http_status(:success)
expect(response).to render_template('agents/index')
end
it "loads all of the agents into @agents" do
agent1 = Agent.create! valid_attributes
agent2 = Agent.create! valid_attributes
get :index
expect(assigns(:agents)).to match_array([agent1, agent2])
end
end
describe "GET show" do
it "assigns the requested agent as @agent" do
agent = Agent.create! valid_attributes
get :show, {:id => agent.to_param}, valid_session
expect(assigns(:agent)).to eq(agent)
end
end
describe "GET new" do
it "assigns a new agent as @agent" do
get :new, {}, valid_session
expect(assigns(:agent)).to be_a_new(Agent)
end
end
describe "GET edit" do
it "assigns the requested agent as @agent" do
agent = Agent.create! valid_attributes
get :edit, {:id => agent.to_param}, valid_session
expect(assigns(:agent)).to eq(agent)
end
end
describe "POST create" do
describe "with valid params" do
it "creates a new Agent" do
expect {
post :create, {:agent => valid_attributes}, valid_session
}.to change(Agent, :count).by(1)
end
it "assigns a newly created agent as @agent" do
post :create, {:agent => valid_attributes}, valid_session
expect(assigns(:agent)).to be_a(Agent)
expect(assigns(:agent)).to be_persisted
end
it "redirects to the created agent" do
post :create, {:agent => valid_attributes}, valid_session
expect(response).to redirect_to(Agent.last)
end
end
describe "with invalid params" do
it "assigns a newly created but unsaved agent as @agent" do
# Trigger the behavior that occurs when invalid params are submitted
expect_any_instance_of(Agent).to receive(:save).and_return(false)
post :create, {:agent => { "name" => "invalid value" }}, valid_session
expect(assigns(:agent)).to be_a_new(Agent)
end
it "re-renders the 'new' template" do
# Trigger the behavior that occurs when invalid params are submitted
expect_any_instance_of(Agent).to receive(:save).and_return(false)
post :create, {:agent => { "name" => "invalid value" }}, valid_session
expect(response).to render_template("new")
end
end
end
describe "PUT update" do
describe "with valid params" do
it "updates the requested agent" do
agent = Agent.create! valid_attributes
# Assuming there are no other agents in the database, this
# specifies that the Agent created on the previous line
# receives the :update_attributes message with whatever params are
# submitted in the request.
# put :update, :id => @teacher_leader, :teacher_leader => { :teacher_id => @teacher.id }
#Agent.any_instance.to_receive(:update).with({ "name" => "MyString" })
expect_any_instance_of(Agent).to receive(:save).at_least(:once)
put :update, {:id => agent.to_param, :agent => { "name" => "MyString" }}, valid_session
end
it "assigns the requested agent as @agent" do
agent = Agent.create! valid_attributes
put :update, {:id => agent.to_param, :agent => valid_attributes}, valid_session
expect(assigns(:agent)).to eq(agent)
end
it "redirects to the agent" do
agent = Agent.create! valid_attributes
put :update, {:id => agent.to_param, :agent => valid_attributes}, valid_session
expect(response).to redirect_to(agent)
end
end
describe "with invalid params" do
it "assigns the agent as @agent" do
agent = Agent.create! valid_attributes
# Trigger the behavior that occurs when invalid params are submitted
expect_any_instance_of(Agent).to receive(:save).and_return(false)
put :update, {:id => agent.to_param, :agent => { "name" => "invalid value" }}, valid_session
expect(assigns(:agent)).to eq(agent)
end
it "re-renders the 'edit' template" do
agent = Agent.create! valid_attributes
# Trigger the behavior that occurs when invalid params are submitted
expect_any_instance_of(Agent).to receive(:save).and_return(false)
put :update, {:id => agent.to_param, :agent => { "name" => "invalid value" }}, valid_session
expect(response).to render_template("edit")
end
end
end
describe "DELETE destroy" do
it "destroys the requested agent" do
agent = Agent.create! valid_attributes
expect {
delete :destroy, {:id => agent.to_param}, valid_session
}.to change(Agent, :count).by(-1)
end
it "redirects to the agents list" do
agent = Agent.create! valid_attributes
delete :destroy, {:id => agent.to_param}, valid_session
expect(response).to redirect_to(agents_url)
end
end
end
|
require 'spec_helper'
require './spec/controllers/squash_many_duplicates_examples'
describe EventsController, :type => :controller do
describe "#index" do
render_views
describe "as HTML" do
it "should produce HTML" do
get :index, :format => "html"
expect(response.body).to have_selector "table.event_table"
end
end
describe "as XML" do
describe "without events" do
before do
get :index, :format => "xml"
@struct = Hash.from_xml(response.body)["events"]
end
it "should not have entries" do
expect(@struct).to be_blank
end
end
describe "with events" do
before do
FactoryGirl.create(:event, :with_venue)
FactoryGirl.create(:event, :with_venue)
get :index, :format => "xml"
@struct = Hash.from_xml(response.body)["events"]
end
it "should return an array" do
expect(@struct).to be_a_kind_of Array
end
it "should have entries" do
expect(@struct).to be_present
end
it "should include venue details" do
event = @struct.first
venue = event["venue"]
venue_title = venue["title"] # Why XML? Why?
expect(venue_title).to be_a_kind_of String
expect(venue_title).to be_present
end
end
end
describe "as JSON" do
it "should accept a JSONP callback" do
post :index, :format => "json", :callback => "some_function"
expect(response.body.split("\n").join).to match /^\s*some_function\(.*\);?\s*$/
end
describe "without events" do
before do
post :index, :format => "json"
@struct = ActiveSupport::JSON.decode(response.body)
end
it "should return an array" do
expect(@struct).to be_a_kind_of Array
end
it "should not have entries" do
expect(@struct).to be_empty
end
end
describe "with events" do
before do
@event = FactoryGirl.create(:event, :with_venue)
@venue = @event.venue
post :index, :format => "json"
@struct = ActiveSupport::JSON.decode(response.body)
end
it "should return an array" do
expect(@struct).to be_a_kind_of Array
end
it "should return an event" do
event = @struct.first
expect(event['id']).to eq @event.id
expect(event['title']).to eq @event.title
end
it "should return an event's venue" do
event = @struct.first
venue = event['venue']
expect(venue['id']).to eq @venue.id
expect(venue['title']).to eq @venue.title
end
end
end
describe "as ATOM" do
describe "without events" do
before do
post :index, :format => "atom"
@struct = Hash.from_xml(response.body)
end
it "should be a feed" do
expect(@struct['feed']['xmlns']).to be_present
end
it "should not have events" do
expect(@struct['feed']['entry']).to be_blank
end
end
describe "with events" do
before do
FactoryGirl.create(:event, :with_venue)
FactoryGirl.create(:event, :with_venue)
post :index, :format => "atom"
@struct = Hash.from_xml(response.body)
end
let(:entries) { @struct["feed"]["entry"] }
it "should be a feed" do
expect(@struct['feed']['xmlns']).to be_present
end
it "should have entries" do
expect(entries).to be_present
end
it "should have an event" do
entry = entries.first
record = Event.find(entry['id'][%r{(\d+)$}, 1])
expect(Nokogiri.parse(entry['content']).search('.description p').inner_html).to eq record.description
expect(entry['end_time']).to eq record.end_time.xmlschema
expect(entry['start_time']).to eq record.start_time.xmlschema
expect(entry['summary']).to be_present
expect(entry['title']).to eq record.title
expect(entry['updated']).to eq record.updated_at.xmlschema
expect(entry['url']).to eq event_url(record)
end
end
end
describe "as iCalendar" do
describe "without events" do
before do
post :index, :format => "ics"
end
it "should have a calendar" do
expect(response.body).to match /BEGIN:VCALENDAR/
end
it "should not have events" do
expect(response.body).not_to match /BEGIN:VEVENT/
end
end
describe "with events" do
before do
@current_event = FactoryGirl.create(:event, :start_time => today + 1.hour)
@past_event = FactoryGirl.create(:event, :start_time => today - 1.hour)
post :index, :format => "ics"
end
it "should have a calendar" do
expect(response.body).to match /BEGIN:VCALENDAR/
end
it "should have events" do
expect(response.body).to match /BEGIN:VEVENT/
end
it "should render all future events" do
expect(response.body).to match /SUMMARY:#{@current_event.title}/
end
it "should not render past events" do
expect(response.body).not_to match(/SUMMARY:#{@past_event.title}/)
end
end
end
describe "and filtering by date range" do
[:start, :end].each do |date_kind|
describe "for #{date_kind} date" do
let(:start_date) { Date.parse("2010-01-01") }
let(:end_date) { Date.parse("2010-04-01") }
let(:date_field) { "#{date_kind}_date" }
around do |example|
Timecop.freeze(start_date) do
example.run
end
end
it "should use the default if not given the parameter" do
get :index, :date => {}
expect(assigns[date_field]).to eq send(date_field)
expect(flash[:failure]).to be_nil
end
it "should use the default if given a malformed parameter" do
get :index, :date => "omgkittens"
expect(assigns[date_field]).to eq send(date_field)
expect(response.body).to have_selector(".flash_failure", text: 'invalid')
end
it "should use the default if given a missing parameter" do
get :index, :date => {:foo => "bar"}
expect(assigns[date_field]).to eq send(date_field)
expect(response.body).to have_selector(".flash_failure", text: 'invalid')
end
it "should use the default if given an empty parameter" do
get :index, :date => {date_kind => ""}
expect(assigns[date_field]).to eq send(date_field)
expect(response.body).to have_selector(".flash_failure", text: 'invalid')
end
it "should use the default if given an invalid parameter" do
get :index, :date => {date_kind => "omgkittens"}
expect(assigns[date_field]).to eq send(date_field)
expect(response.body).to have_selector(".flash_failure", text: 'invalid')
end
it "should use the value if valid" do
expected = Date.yesterday
get :index, :date => {date_kind => expected.to_s("%Y-%m-%d")}
expect(assigns[date_field]).to eq expected
end
end
end
it "should return matching events" do
# Given
matching = [
Event.create!(
:title => "matching1",
:start_time => Time.zone.parse("2010-01-16 00:00"),
:end_time => Time.zone.parse("2010-01-16 01:00")
),
Event.create!(:title => "matching2",
:start_time => Time.zone.parse("2010-01-16 23:00"),
:end_time => Time.zone.parse("2010-01-17 00:00")
),
]
non_matching = [
Event.create!(
:title => "nonmatchingbefore",
:start_time => Time.zone.parse("2010-01-15 23:00"),
:end_time => Time.zone.parse("2010-01-15 23:59")
),
Event.create!(
:title => "nonmatchingafter",
:start_time => Time.zone.parse("2010-01-17 00:01"),
:end_time => Time.zone.parse("2010-01-17 01:00")
),
]
# When
get :index, :date => {:start => "2010-01-16", :end => "2010-01-16"}
results = assigns[:events]
# Then
expect(results.size).to eq 2
expect(results).to eq matching
end
end
end
describe "#show" do
it "should show an event" do
event = Event.new(:start_time => now)
expect(Event).to receive(:find).and_return(event)
get "show", :id => 1234
expect(response).to be_success
end
it "should redirect from a duplicate event to its master" do
master = FactoryGirl.create(:event, id: 4321)
event = Event.new(:start_time => now, :duplicate_of => master)
expect(Event).to receive(:find).and_return(event)
get "show", :id => 1234
expect(response).to redirect_to(event_path(master))
end
it "should show an error when asked to display a non-existent event" do
expect(Event).to receive(:find).and_raise(ActiveRecord::RecordNotFound)
get "show", :id => 1234
expect(response).to redirect_to(events_path)
expect(flash[:failure]).not_to be_blank
end
end
describe "when creating and updating events" do
before do
@organization = FactoryGirl.create(:organization)
session[:organization_id] = @organization.id
@params = {
"end_date" => "2008-06-04",
"start_date" => "2008-06-03",
"event" => {
"title" => "MyVenue",
"url" => "http://my.venue",
"description" => "Wheeeee"
},
"end_time" => "",
"start_time" => ""
}.with_indifferent_access
@venue = FactoryGirl.build(:venue)
@event = FactoryGirl.build(:event, :venue => @venue, organization: @organization)
end
describe "#new" do
it "should display form for creating new event" do
get "new"
expect(response).to be_success
expect(response).to render_template :new
end
end
describe "#create" do
render_views
it "should create a new event without a venue" do
@params[:event][:venue_id] = nil
post "create", @params
@event = Event.find_by_title(@params[:event][:title])
expect(response).to redirect_to(@event)
end
it "should associate a venue based on a given venue id" do
@venue.save!
@params[:event][:venue_id] = @venue.id.to_s
post "create", @params
@event = Event.find_by_title(@params[:event][:title])
expect(@event.venue).to eq(@venue)
expect(response).to redirect_to(@event)
end
it "should associate a venue based on a given venue name" do
@venue.save!
@params[:venue_name] = @venue.title
post "create", @params
@event = Event.find_by_title(@params[:event][:title])
expect(@event.venue).to eq(@venue)
expect(response).to redirect_to(@event)
end
it "should associate a venue by id when both an id and a name are provided" do
@venue.save!
@venue2 = FactoryGirl.create(:venue)
@params[:event][:venue_id] = @venue.id.to_s
@params[:venue_name] = @venue2.title
post "create", @params
@event = Event.find_by_title(@params[:event][:title])
expect(@event.venue).to eq(@venue)
expect(response).to redirect_to(@event)
end
it "should create a new event and new venue, and redirect to venue edit form" do
@params[:venue_name] = "New Venue"
post "create", @params
@event = Event.find_by_title(@params[:event][:title])
@venue = Venue.find_by_title("New Venue")
expect(@event.venue).to eq(@venue)
expect(response).to redirect_to(edit_venue_url(@venue, :from_event => @event.id))
end
it "should catch errors and redisplay the new event form" do
post "create"
expect(response).to render_template :new
end
it "should stop evil robots" do
binding.pry
post "create", :trap_field => "I AM AN EVIL ROBOT, I EAT OLD PEOPLE'S MEDICINE FOR FOOD!"
binding.pry
expect(response).to render_template :new
expect(flash[:failure]).to match /evil robot/i
end
it "should not allow too many links in the description" do
@params[:event][:description] = <<-DESC
http://example.com
https://example.com
http://example.net
https://example.net
DESC
post "create", @params
expect(response).to render_template :new
expect(flash[:failure]).to match /too many links/i
end
it "should accept HTTP-rich presentation descriptions without too many links" do
@params[:event][:description] = <<-DESC
I hereby offer to give a presentation at the August ruby meeting about the faraday
gem (https://github.com/lostisland/faraday) and how compares to or compliments other
HTTP client libraries such as httparty (https://github.com/jnunemaker/httparty).
--
I wouldn't mind seeing a PDX.pm talk about HTTP::Tiny vs Net::HTTP::Tiny vs Net::HTTP
vs HTTP::Client vs HTTP::Client::Parallel
DESC
post "create", @params
expect(flash[:failure]).to be_nil
end
it "should allow the user to preview the event" do
@params[:preview] = "Preview"
post "create", @params
expect(response).to render_template :new
expect(response.body).to have_selector '#event_preview'
end
it "should create an event for an existing venue" do
venue = FactoryGirl.create(:venue)
post "create",
:start_time => now.strftime("%Y-%m-%d"),
:end_time => (now + 1.hour).strftime("%Y-%m-%d"),
:event => {
:title => "My Event",
:tag_list => ",,foo,bar, baz,",
},
:venue_name => venue.title
expect(response).to be_redirect
expect(flash[:success]).to be_present
event = assigns[:event]
expect(event.title).to eq "My Event"
expect(event.venue.title).to eq venue.title
expect(event.venue.id).to eq venue.id
expect(event.tag_list.to_a.sort).to eq %w[bar baz foo]
end
end
describe "#update" do
before(:each) do
@event = FactoryGirl.create(:event, :with_venue, id: 42)
@venue = @event.venue
@params.merge!(id: 42)
end
it "should display form for editing event" do
get "edit", id: 42
expect(response).to be_success
expect(response).to render_template :edit
end
it "should update an event without a venue" do
@event.venue = nil
put "update", @params
expect(response).to redirect_to(@event)
end
it "should associate a venue based on a given venue id" do
@venue = FactoryGirl.create(:venue)
@params[:event][:venue_id] = @venue.id.to_s
put "update", @params
expect(@event.reload.venue).to eq(@venue)
expect(response).to redirect_to(@event)
end
it "should associate a venue based on a given venue name" do
@venue = FactoryGirl.create(:venue)
@params[:venue_name] = @venue.title
put "update", @params
expect(@event.reload.venue).to eq(@venue)
expect(response).to redirect_to(@event)
end
it "should associate a venue by id when both an id and a name are provided" do
@venue = FactoryGirl.create(:venue)
@venue2 = FactoryGirl.create(:venue)
@params[:event][:venue_id] = @venue.id.to_s
@params[:venue_name] = @venue2.title
put "update", @params
expect(@event.reload.venue).to eq(@venue)
expect(response).to redirect_to(@event)
end
it "should update an event and create a new venue, and redirect to the venue edit form" do
@params[:venue_name] = "New Venue"
put "update", @params
@venue = Venue.find_by_title("New Venue")
expect(response).to redirect_to(edit_venue_url(@venue, :from_event => @event.id))
end
it "should catch errors and redisplay the new event form" do
@params[:event][:title] = nil
put "update", @params
expect(response).to render_template :edit
end
it "should stop evil robots" do
@params[:trap_field] = "I AM AN EVIL ROBOT, I EAT OLD PEOPLE'S MEDICINE FOR FOOD!"
put "update", @params
expect(response).to render_template :edit
expect(flash[:failure]).to match /evil robot/i
end
it "should not allow too many links in the description" do
@params[:event][:description] = <<-DESC
http://example.com
https://example.com
http://example.net
https://example.net
DESC
put "update", @params
expect(response).to render_template :edit
expect(flash[:failure]).to match /too many links/i
end
it "should allow the user to preview the event" do
put "update", @params.merge(:preview => "Preview")
expect(response).to render_template :edit
end
it "should not allow a user to update a locked event" do
@event.lock_editing!
put "update", @params
expect(response).to be_redirect
expect(flash[:failure]).to match /not permitted/i
end
end
describe "#clone" do
before do
@event = FactoryGirl.create(:event)
allow(Event).to receive(:find).and_return(@event)
get "clone", :id => 1
end
it "should build an unsaved record" do
record = assigns[:event]
expect(record).to be_a_new_record
expect(record.id).to be_nil
end
it "should build a cloned record similar to the existing record" do
record = assigns[:event]
%w[title description venue_id venue_details].each do |field|
expect(record.attributes[field]).to eq @event.attributes[field]
end
end
it "should display a new event form" do
expect(response).to be_success
expect(response).to render_template :new
end
it "should have notice with cloning instructions" do
expect(flash[:success]).to match /clone/i
end
end
end
describe "#duplicates" do
render_views
it "should find current duplicates and not past duplicates" do
current_master = FactoryGirl.create(:event, :title => "Current")
current_duplicate = FactoryGirl.create(:event, :title => current_master.title)
past_master = FactoryGirl.create(:event, :title => "Past", :start_time => now - 2.days)
past_duplicate = FactoryGirl.create(:event, :title => past_master.title, :start_time => now - 1.day)
get 'duplicates', :type => 'title'
# Current duplicates
assigns[:grouped_events].select{|keys,values| keys.include?(current_master.title)}.tap do |events|
expect(events).not_to be_empty
expect(events.first.last.size).to eq 2
end
# Past duplicates
expect(assigns[:grouped_events].select{|keys,values| keys.include?(past_master.title)}).to be_empty
end
it "should redirect duplicate events to their master" do
event_master = FactoryGirl.create(:event)
event_duplicate = FactoryGirl.create(:event)
get 'show', :id => event_duplicate.id
expect(response).not_to be_redirect
expect(assigns(:event).id).to eq event_duplicate.id
event_duplicate.duplicate_of = event_master
event_duplicate.save!
get 'show', :id => event_duplicate.id
expect(response).to be_redirect
expect(response).to redirect_to(event_url(event_master.id))
end
it "should display an error message if given invalid arguments" do
get 'duplicates', :type => 'omgwtfbbq'
expect(response).to be_success
expect(response.body).to have_selector('.failure', text: 'omgwtfbbq')
end
end
context do
include_examples "#squash_many_duplicates", :event
end
describe "#search" do
describe "when returning results" do
render_views
let!(:current_event) { FactoryGirl.create(:event, :with_venue, title: "MyQuery") }
let!(:current_event_2) { FactoryGirl.create(:event, :with_venue, description: "WOW myquery!") }
let!(:past_event) { FactoryGirl.create(:event, :with_venue, title: "old myquery") }
describe "in HTML format" do
before do
post :search, :query => "myquery", :format => "html"
end
it "should assign search result" do
expect(assigns[:search]).to be_a Event::Search
end
it "should assign matching events" do
expect(assigns[:events]).to match_array([current_event, current_event_2, past_event])
end
it "should render matching events" do
have_selector "table.event_table" do
have_selector ".vevent a.summary", :href => event_url(results[:past])
have_selector ".vevent a.summary", :href => event_url(results[:current])
end
end
describe "sidebar" do
it "should have iCalendar feed" do
have_selector ".sidebar a", :href => search_events_url(:query => @query, :format => "ics", :protocol => "webcal")
end
it "should have Atom feed" do
have_selector ".sidebar a", :href => search_events_url(:query => @query, :format => "atom")
end
it "should have Google subscription" do
ics_url = search_events_url(query: @query, format: 'ics')
google_url = "https://www.google.com/calendar/render?cid=#{ics_url}"
have_selector ".sidebar a", href: google_url
end
end
end
describe "in XML format" do
it "should produce XML" do
post :search, :query => "myquery", :format => "xml"
hash = Hash.from_xml(response.body)
expect(hash["events"]).to be_a_kind_of Array
end
it "should include venue details" do
post :search, :query => "myquery", :format => "xml"
hash = Hash.from_xml(response.body)
event = hash["events"].first
venue = event["venue"]
venue_title = venue["title"]
expect(venue_title).to be_a_kind_of String
expect(venue_title.length).to be_present
end
end
describe "in JSON format" do
it "should produce JSON" do
post :search, :query => "myquery", :format => "json"
struct = ActiveSupport::JSON.decode(response.body)
expect(struct).to be_a_kind_of Array
end
it "should accept a JSONP callback" do
post :search, :query => "myquery", :format => "json", :callback => "some_function"
expect(response.body).to match /^\s*some_function\(.*\);?\s*$/
end
it "should include venue details" do
post :search, :query => "myquery", :format => "json"
struct = ActiveSupport::JSON.decode(response.body)
event = struct.first
expect(event["venue"]["title"]).to be_a_kind_of String
expect(event["venue"]["title"].length).to be_present
end
end
describe "in ATOM format" do
it "should produce ATOM" do
post :search, :query => "myquery", :format => "atom"
hash = Hash.from_xml(response.body)
expect(hash["feed"]["entry"]).to be_a_kind_of Array
end
end
describe "in ICS format" do
it "should produce ICS" do
post :search, :query => "myquery", :format => "ics"
expect(response.body).to match /BEGIN:VEVENT/
end
it "should produce events matching the query" do
post :search, :query => "myquery", :format => "ics"
expect(response.body).to match /SUMMARY:#{current_event_2.title}/
expect(response.body).to match /SUMMARY:#{past_event.title}/
end
end
describe "failures" do
it "sets search failures in the flash message" do
allow_any_instance_of(Event::Search).to receive_messages failure_message: "OMG"
post :search
expect(flash[:failure]).to eq("OMG")
end
it "redirects to home if hard failure" do
allow_any_instance_of(Event::Search).to receive_messages hard_failure?: true
post :search
expect(response).to redirect_to(root_path)
end
end
end
end
describe "#destroy" do
it "should destroy events" do
event = FactoryGirl.build(:event)
expect(event).to receive(:destroy)
expect(Event).to receive(:find).and_return(event)
delete 'destroy', :id => 1234
expect(response).to redirect_to(events_url)
end
it "should not allow a user to destroy a locked event" do
event = FactoryGirl.create(:event)
event.lock_editing!
delete 'destroy', :id => event.id
expect(response).to be_redirect
expect(flash[:failure]).to match /not permitted/i
end
end
end
Remove binding.pry
require 'spec_helper'
require './spec/controllers/squash_many_duplicates_examples'
describe EventsController, :type => :controller do
describe "#index" do
render_views
describe "as HTML" do
it "should produce HTML" do
get :index, :format => "html"
expect(response.body).to have_selector "table.event_table"
end
end
describe "as XML" do
describe "without events" do
before do
get :index, :format => "xml"
@struct = Hash.from_xml(response.body)["events"]
end
it "should not have entries" do
expect(@struct).to be_blank
end
end
describe "with events" do
before do
FactoryGirl.create(:event, :with_venue)
FactoryGirl.create(:event, :with_venue)
get :index, :format => "xml"
@struct = Hash.from_xml(response.body)["events"]
end
it "should return an array" do
expect(@struct).to be_a_kind_of Array
end
it "should have entries" do
expect(@struct).to be_present
end
it "should include venue details" do
event = @struct.first
venue = event["venue"]
venue_title = venue["title"] # Why XML? Why?
expect(venue_title).to be_a_kind_of String
expect(venue_title).to be_present
end
end
end
describe "as JSON" do
it "should accept a JSONP callback" do
post :index, :format => "json", :callback => "some_function"
expect(response.body.split("\n").join).to match /^\s*some_function\(.*\);?\s*$/
end
describe "without events" do
before do
post :index, :format => "json"
@struct = ActiveSupport::JSON.decode(response.body)
end
it "should return an array" do
expect(@struct).to be_a_kind_of Array
end
it "should not have entries" do
expect(@struct).to be_empty
end
end
describe "with events" do
before do
@event = FactoryGirl.create(:event, :with_venue)
@venue = @event.venue
post :index, :format => "json"
@struct = ActiveSupport::JSON.decode(response.body)
end
it "should return an array" do
expect(@struct).to be_a_kind_of Array
end
it "should return an event" do
event = @struct.first
expect(event['id']).to eq @event.id
expect(event['title']).to eq @event.title
end
it "should return an event's venue" do
event = @struct.first
venue = event['venue']
expect(venue['id']).to eq @venue.id
expect(venue['title']).to eq @venue.title
end
end
end
describe "as ATOM" do
describe "without events" do
before do
post :index, :format => "atom"
@struct = Hash.from_xml(response.body)
end
it "should be a feed" do
expect(@struct['feed']['xmlns']).to be_present
end
it "should not have events" do
expect(@struct['feed']['entry']).to be_blank
end
end
describe "with events" do
before do
FactoryGirl.create(:event, :with_venue)
FactoryGirl.create(:event, :with_venue)
post :index, :format => "atom"
@struct = Hash.from_xml(response.body)
end
let(:entries) { @struct["feed"]["entry"] }
it "should be a feed" do
expect(@struct['feed']['xmlns']).to be_present
end
it "should have entries" do
expect(entries).to be_present
end
it "should have an event" do
entry = entries.first
record = Event.find(entry['id'][%r{(\d+)$}, 1])
expect(Nokogiri.parse(entry['content']).search('.description p').inner_html).to eq record.description
expect(entry['end_time']).to eq record.end_time.xmlschema
expect(entry['start_time']).to eq record.start_time.xmlschema
expect(entry['summary']).to be_present
expect(entry['title']).to eq record.title
expect(entry['updated']).to eq record.updated_at.xmlschema
expect(entry['url']).to eq event_url(record)
end
end
end
describe "as iCalendar" do
describe "without events" do
before do
post :index, :format => "ics"
end
it "should have a calendar" do
expect(response.body).to match /BEGIN:VCALENDAR/
end
it "should not have events" do
expect(response.body).not_to match /BEGIN:VEVENT/
end
end
describe "with events" do
before do
@current_event = FactoryGirl.create(:event, :start_time => today + 1.hour)
@past_event = FactoryGirl.create(:event, :start_time => today - 1.hour)
post :index, :format => "ics"
end
it "should have a calendar" do
expect(response.body).to match /BEGIN:VCALENDAR/
end
it "should have events" do
expect(response.body).to match /BEGIN:VEVENT/
end
it "should render all future events" do
expect(response.body).to match /SUMMARY:#{@current_event.title}/
end
it "should not render past events" do
expect(response.body).not_to match(/SUMMARY:#{@past_event.title}/)
end
end
end
describe "and filtering by date range" do
[:start, :end].each do |date_kind|
describe "for #{date_kind} date" do
let(:start_date) { Date.parse("2010-01-01") }
let(:end_date) { Date.parse("2010-04-01") }
let(:date_field) { "#{date_kind}_date" }
around do |example|
Timecop.freeze(start_date) do
example.run
end
end
it "should use the default if not given the parameter" do
get :index, :date => {}
expect(assigns[date_field]).to eq send(date_field)
expect(flash[:failure]).to be_nil
end
it "should use the default if given a malformed parameter" do
get :index, :date => "omgkittens"
expect(assigns[date_field]).to eq send(date_field)
expect(response.body).to have_selector(".flash_failure", text: 'invalid')
end
it "should use the default if given a missing parameter" do
get :index, :date => {:foo => "bar"}
expect(assigns[date_field]).to eq send(date_field)
expect(response.body).to have_selector(".flash_failure", text: 'invalid')
end
it "should use the default if given an empty parameter" do
get :index, :date => {date_kind => ""}
expect(assigns[date_field]).to eq send(date_field)
expect(response.body).to have_selector(".flash_failure", text: 'invalid')
end
it "should use the default if given an invalid parameter" do
get :index, :date => {date_kind => "omgkittens"}
expect(assigns[date_field]).to eq send(date_field)
expect(response.body).to have_selector(".flash_failure", text: 'invalid')
end
it "should use the value if valid" do
expected = Date.yesterday
get :index, :date => {date_kind => expected.to_s("%Y-%m-%d")}
expect(assigns[date_field]).to eq expected
end
end
end
it "should return matching events" do
# Given
matching = [
Event.create!(
:title => "matching1",
:start_time => Time.zone.parse("2010-01-16 00:00"),
:end_time => Time.zone.parse("2010-01-16 01:00")
),
Event.create!(:title => "matching2",
:start_time => Time.zone.parse("2010-01-16 23:00"),
:end_time => Time.zone.parse("2010-01-17 00:00")
),
]
non_matching = [
Event.create!(
:title => "nonmatchingbefore",
:start_time => Time.zone.parse("2010-01-15 23:00"),
:end_time => Time.zone.parse("2010-01-15 23:59")
),
Event.create!(
:title => "nonmatchingafter",
:start_time => Time.zone.parse("2010-01-17 00:01"),
:end_time => Time.zone.parse("2010-01-17 01:00")
),
]
# When
get :index, :date => {:start => "2010-01-16", :end => "2010-01-16"}
results = assigns[:events]
# Then
expect(results.size).to eq 2
expect(results).to eq matching
end
end
end
describe "#show" do
it "should show an event" do
event = Event.new(:start_time => now)
expect(Event).to receive(:find).and_return(event)
get "show", :id => 1234
expect(response).to be_success
end
it "should redirect from a duplicate event to its master" do
master = FactoryGirl.create(:event, id: 4321)
event = Event.new(:start_time => now, :duplicate_of => master)
expect(Event).to receive(:find).and_return(event)
get "show", :id => 1234
expect(response).to redirect_to(event_path(master))
end
it "should show an error when asked to display a non-existent event" do
expect(Event).to receive(:find).and_raise(ActiveRecord::RecordNotFound)
get "show", :id => 1234
expect(response).to redirect_to(events_path)
expect(flash[:failure]).not_to be_blank
end
end
describe "when creating and updating events" do
before do
@organization = FactoryGirl.create(:organization)
session[:organization_id] = @organization.id
@params = {
"end_date" => "2008-06-04",
"start_date" => "2008-06-03",
"event" => {
"title" => "MyVenue",
"url" => "http://my.venue",
"description" => "Wheeeee"
},
"end_time" => "",
"start_time" => ""
}.with_indifferent_access
@venue = FactoryGirl.build(:venue)
@event = FactoryGirl.build(:event, :venue => @venue, organization: @organization)
end
describe "#new" do
it "should display form for creating new event" do
get "new"
expect(response).to be_success
expect(response).to render_template :new
end
end
describe "#create" do
render_views
it "should create a new event without a venue" do
@params[:event][:venue_id] = nil
post "create", @params
@event = Event.find_by_title(@params[:event][:title])
expect(response).to redirect_to(@event)
end
it "should associate a venue based on a given venue id" do
@venue.save!
@params[:event][:venue_id] = @venue.id.to_s
post "create", @params
@event = Event.find_by_title(@params[:event][:title])
expect(@event.venue).to eq(@venue)
expect(response).to redirect_to(@event)
end
it "should associate a venue based on a given venue name" do
@venue.save!
@params[:venue_name] = @venue.title
post "create", @params
@event = Event.find_by_title(@params[:event][:title])
expect(@event.venue).to eq(@venue)
expect(response).to redirect_to(@event)
end
it "should associate a venue by id when both an id and a name are provided" do
@venue.save!
@venue2 = FactoryGirl.create(:venue)
@params[:event][:venue_id] = @venue.id.to_s
@params[:venue_name] = @venue2.title
post "create", @params
@event = Event.find_by_title(@params[:event][:title])
expect(@event.venue).to eq(@venue)
expect(response).to redirect_to(@event)
end
it "should create a new event and new venue, and redirect to venue edit form" do
@params[:venue_name] = "New Venue"
post "create", @params
@event = Event.find_by_title(@params[:event][:title])
@venue = Venue.find_by_title("New Venue")
expect(@event.venue).to eq(@venue)
expect(response).to redirect_to(edit_venue_url(@venue, :from_event => @event.id))
end
it "should catch errors and redisplay the new event form" do
post "create"
expect(response).to render_template :new
end
it "should stop evil robots" do
post "create", :trap_field => "I AM AN EVIL ROBOT, I EAT OLD PEOPLE'S MEDICINE FOR FOOD!"
expect(response).to render_template :new
expect(flash[:failure]).to match /evil robot/i
end
it "should not allow too many links in the description" do
@params[:event][:description] = <<-DESC
http://example.com
https://example.com
http://example.net
https://example.net
DESC
post "create", @params
expect(response).to render_template :new
expect(flash[:failure]).to match /too many links/i
end
it "should accept HTTP-rich presentation descriptions without too many links" do
@params[:event][:description] = <<-DESC
I hereby offer to give a presentation at the August ruby meeting about the faraday
gem (https://github.com/lostisland/faraday) and how compares to or compliments other
HTTP client libraries such as httparty (https://github.com/jnunemaker/httparty).
--
I wouldn't mind seeing a PDX.pm talk about HTTP::Tiny vs Net::HTTP::Tiny vs Net::HTTP
vs HTTP::Client vs HTTP::Client::Parallel
DESC
post "create", @params
expect(flash[:failure]).to be_nil
end
it "should allow the user to preview the event" do
@params[:preview] = "Preview"
post "create", @params
expect(response).to render_template :new
expect(response.body).to have_selector '#event_preview'
end
it "should create an event for an existing venue" do
venue = FactoryGirl.create(:venue)
post "create",
:start_time => now.strftime("%Y-%m-%d"),
:end_time => (now + 1.hour).strftime("%Y-%m-%d"),
:event => {
:title => "My Event",
:tag_list => ",,foo,bar, baz,",
},
:venue_name => venue.title
expect(response).to be_redirect
expect(flash[:success]).to be_present
event = assigns[:event]
expect(event.title).to eq "My Event"
expect(event.venue.title).to eq venue.title
expect(event.venue.id).to eq venue.id
expect(event.tag_list.to_a.sort).to eq %w[bar baz foo]
end
end
describe "#update" do
before(:each) do
@event = FactoryGirl.create(:event, :with_venue, id: 42)
@venue = @event.venue
@params.merge!(id: 42)
end
it "should display form for editing event" do
get "edit", id: 42
expect(response).to be_success
expect(response).to render_template :edit
end
it "should update an event without a venue" do
@event.venue = nil
put "update", @params
expect(response).to redirect_to(@event)
end
it "should associate a venue based on a given venue id" do
@venue = FactoryGirl.create(:venue)
@params[:event][:venue_id] = @venue.id.to_s
put "update", @params
expect(@event.reload.venue).to eq(@venue)
expect(response).to redirect_to(@event)
end
it "should associate a venue based on a given venue name" do
@venue = FactoryGirl.create(:venue)
@params[:venue_name] = @venue.title
put "update", @params
expect(@event.reload.venue).to eq(@venue)
expect(response).to redirect_to(@event)
end
it "should associate a venue by id when both an id and a name are provided" do
@venue = FactoryGirl.create(:venue)
@venue2 = FactoryGirl.create(:venue)
@params[:event][:venue_id] = @venue.id.to_s
@params[:venue_name] = @venue2.title
put "update", @params
expect(@event.reload.venue).to eq(@venue)
expect(response).to redirect_to(@event)
end
it "should update an event and create a new venue, and redirect to the venue edit form" do
@params[:venue_name] = "New Venue"
put "update", @params
@venue = Venue.find_by_title("New Venue")
expect(response).to redirect_to(edit_venue_url(@venue, :from_event => @event.id))
end
it "should catch errors and redisplay the new event form" do
@params[:event][:title] = nil
put "update", @params
expect(response).to render_template :edit
end
it "should stop evil robots" do
@params[:trap_field] = "I AM AN EVIL ROBOT, I EAT OLD PEOPLE'S MEDICINE FOR FOOD!"
put "update", @params
expect(response).to render_template :edit
expect(flash[:failure]).to match /evil robot/i
end
it "should not allow too many links in the description" do
@params[:event][:description] = <<-DESC
http://example.com
https://example.com
http://example.net
https://example.net
DESC
put "update", @params
expect(response).to render_template :edit
expect(flash[:failure]).to match /too many links/i
end
it "should allow the user to preview the event" do
put "update", @params.merge(:preview => "Preview")
expect(response).to render_template :edit
end
it "should not allow a user to update a locked event" do
@event.lock_editing!
put "update", @params
expect(response).to be_redirect
expect(flash[:failure]).to match /not permitted/i
end
end
describe "#clone" do
before do
@event = FactoryGirl.create(:event)
allow(Event).to receive(:find).and_return(@event)
get "clone", :id => 1
end
it "should build an unsaved record" do
record = assigns[:event]
expect(record).to be_a_new_record
expect(record.id).to be_nil
end
it "should build a cloned record similar to the existing record" do
record = assigns[:event]
%w[title description venue_id venue_details].each do |field|
expect(record.attributes[field]).to eq @event.attributes[field]
end
end
it "should display a new event form" do
expect(response).to be_success
expect(response).to render_template :new
end
it "should have notice with cloning instructions" do
expect(flash[:success]).to match /clone/i
end
end
end
describe "#duplicates" do
render_views
it "should find current duplicates and not past duplicates" do
current_master = FactoryGirl.create(:event, :title => "Current")
current_duplicate = FactoryGirl.create(:event, :title => current_master.title)
past_master = FactoryGirl.create(:event, :title => "Past", :start_time => now - 2.days)
past_duplicate = FactoryGirl.create(:event, :title => past_master.title, :start_time => now - 1.day)
get 'duplicates', :type => 'title'
# Current duplicates
assigns[:grouped_events].select{|keys,values| keys.include?(current_master.title)}.tap do |events|
expect(events).not_to be_empty
expect(events.first.last.size).to eq 2
end
# Past duplicates
expect(assigns[:grouped_events].select{|keys,values| keys.include?(past_master.title)}).to be_empty
end
it "should redirect duplicate events to their master" do
event_master = FactoryGirl.create(:event)
event_duplicate = FactoryGirl.create(:event)
get 'show', :id => event_duplicate.id
expect(response).not_to be_redirect
expect(assigns(:event).id).to eq event_duplicate.id
event_duplicate.duplicate_of = event_master
event_duplicate.save!
get 'show', :id => event_duplicate.id
expect(response).to be_redirect
expect(response).to redirect_to(event_url(event_master.id))
end
it "should display an error message if given invalid arguments" do
get 'duplicates', :type => 'omgwtfbbq'
expect(response).to be_success
expect(response.body).to have_selector('.failure', text: 'omgwtfbbq')
end
end
context do
include_examples "#squash_many_duplicates", :event
end
describe "#search" do
describe "when returning results" do
render_views
let!(:current_event) { FactoryGirl.create(:event, :with_venue, title: "MyQuery") }
let!(:current_event_2) { FactoryGirl.create(:event, :with_venue, description: "WOW myquery!") }
let!(:past_event) { FactoryGirl.create(:event, :with_venue, title: "old myquery") }
describe "in HTML format" do
before do
post :search, :query => "myquery", :format => "html"
end
it "should assign search result" do
expect(assigns[:search]).to be_a Event::Search
end
it "should assign matching events" do
expect(assigns[:events]).to match_array([current_event, current_event_2, past_event])
end
it "should render matching events" do
have_selector "table.event_table" do
have_selector ".vevent a.summary", :href => event_url(results[:past])
have_selector ".vevent a.summary", :href => event_url(results[:current])
end
end
describe "sidebar" do
it "should have iCalendar feed" do
have_selector ".sidebar a", :href => search_events_url(:query => @query, :format => "ics", :protocol => "webcal")
end
it "should have Atom feed" do
have_selector ".sidebar a", :href => search_events_url(:query => @query, :format => "atom")
end
it "should have Google subscription" do
ics_url = search_events_url(query: @query, format: 'ics')
google_url = "https://www.google.com/calendar/render?cid=#{ics_url}"
have_selector ".sidebar a", href: google_url
end
end
end
describe "in XML format" do
it "should produce XML" do
post :search, :query => "myquery", :format => "xml"
hash = Hash.from_xml(response.body)
expect(hash["events"]).to be_a_kind_of Array
end
it "should include venue details" do
post :search, :query => "myquery", :format => "xml"
hash = Hash.from_xml(response.body)
event = hash["events"].first
venue = event["venue"]
venue_title = venue["title"]
expect(venue_title).to be_a_kind_of String
expect(venue_title.length).to be_present
end
end
describe "in JSON format" do
it "should produce JSON" do
post :search, :query => "myquery", :format => "json"
struct = ActiveSupport::JSON.decode(response.body)
expect(struct).to be_a_kind_of Array
end
it "should accept a JSONP callback" do
post :search, :query => "myquery", :format => "json", :callback => "some_function"
expect(response.body).to match /^\s*some_function\(.*\);?\s*$/
end
it "should include venue details" do
post :search, :query => "myquery", :format => "json"
struct = ActiveSupport::JSON.decode(response.body)
event = struct.first
expect(event["venue"]["title"]).to be_a_kind_of String
expect(event["venue"]["title"].length).to be_present
end
end
describe "in ATOM format" do
it "should produce ATOM" do
post :search, :query => "myquery", :format => "atom"
hash = Hash.from_xml(response.body)
expect(hash["feed"]["entry"]).to be_a_kind_of Array
end
end
describe "in ICS format" do
it "should produce ICS" do
post :search, :query => "myquery", :format => "ics"
expect(response.body).to match /BEGIN:VEVENT/
end
it "should produce events matching the query" do
post :search, :query => "myquery", :format => "ics"
expect(response.body).to match /SUMMARY:#{current_event_2.title}/
expect(response.body).to match /SUMMARY:#{past_event.title}/
end
end
describe "failures" do
it "sets search failures in the flash message" do
allow_any_instance_of(Event::Search).to receive_messages failure_message: "OMG"
post :search
expect(flash[:failure]).to eq("OMG")
end
it "redirects to home if hard failure" do
allow_any_instance_of(Event::Search).to receive_messages hard_failure?: true
post :search
expect(response).to redirect_to(root_path)
end
end
end
end
describe "#destroy" do
it "should destroy events" do
event = FactoryGirl.build(:event)
expect(event).to receive(:destroy)
expect(Event).to receive(:find).and_return(event)
delete 'destroy', :id => 1234
expect(response).to redirect_to(events_url)
end
it "should not allow a user to destroy a locked event" do
event = FactoryGirl.create(:event)
event.lock_editing!
delete 'destroy', :id => event.id
expect(response).to be_redirect
expect(flash[:failure]).to match /not permitted/i
end
end
end
|
require File.dirname(__FILE__) + '/../spec_helper'
describe ForumsController, "GET #index" do
define_models :stubbed
act! { get :index }
before do
@site = sites(:default)
@forums = [forums(:other), forums(:default)]
@site.stub!(:ordered_forums).and_return(@forums)
@controller.stub!(:current_site).and_return(@site)
@controller.stub!(:admin_required).and_return(true)
session[:forum_page] = 5
end
it.assigns :forums, :session => { :forum_page => nil }
it.renders :template, :index
describe ForumsController, "(xml)" do
define_models :stubbed
act! { get :index, :format => 'xml' }
it.assigns :forums
it.renders :xml, :forums
end
end
describe ForumsController, "GET #show" do
define_models :stubbed
act! { get :show, :id => 1 }
before do
@site = sites(:default)
@forum = forums(:default)
@topics = [topics(:default)]
@site.forums.stub!(:find).with('1').and_return(@forum)
@forum.topics.stub!(:paginate).with(:page => nil).and_return(@topics)
@controller.stub!(:current_site).and_return(@site)
@controller.stub!(:admin_required).and_return(true)
@controller.stub!(:logged_in?).and_return(false)
end
it.assigns :topics, :forum, :session => { :forums => :undefined, :forum_page => :undefined }
it.renders :template, :show
it "sets session[:forums] if logged in" do
@controller.stub!(:logged_in?).and_return(true)
act!
session[:forums][@forum.id].should == current_time
end
describe ForumsController, "(paged)" do
define_models :stubbed
act! { get :show, :id => 1, :page => 5 }
before do
@forum.topics.stub!(:paginate).with(:page => '5').and_return(@topics)
end
it.assigns :session => { :forum_page => lambda { {@forum.id => 5} } }
end
describe ForumsController, "(xml)" do
define_models :stubbed
act! { get :show, :id => 1, :format => 'xml' }
it.assigns :topics => :undefined
it.renders :xml, :forum
end
end
describe ForumsController, "GET #new" do
define_models :stubbed
act! { get :new }
before do
@site = sites(:default)
@forum = Forum.new
@controller.stub!(:current_site).and_return(@site)
@controller.stub!(:admin_required).and_return(true)
end
it "assigns @forum" do
act!
@forum.should be_new_record
end
it.renders :template, :new
describe ForumsController, "(xml)" do
define_models :stubbed
act! { get :new, :format => 'xml' }
it.renders :xml, :forum
end
end
describe ForumsController, "GET #edit" do
define_models :stubbed
act! { get :edit, :id => 1 }
before do
@site = sites(:default)
@forum = forums(:default)
@site.forums.stub!(:find).with('1').and_return(@forum)
@controller.stub!(:current_site).and_return(@site)
@controller.stub!(:admin_required).and_return(true)
end
it.assigns :forum
it.renders :template, :edit
end
describe ForumsController, "POST #create" do
before do
@attributes = {'name' => "Default"}
@forum = mock_model Forum, :new_record? => false, :errors => []
@site = sites(:default)
@site.forums.stub!(:build).with(@attributes).and_return(@forum)
@controller.stub!(:current_site).and_return(@site)
@controller.stub!(:admin_required).and_return(true)
end
describe ForumsController, "(successful creation)" do
define_models :stubbed
act! { post :create, :forum => @attributes }
before do
@forum.stub!(:save).and_return(true)
end
it.assigns :forum, :flash => { :notice => :not_nil }
it.redirects_to { forum_path(@forum) }
end
describe ForumsController, "(successful creation, xml)" do
define_models :stubbed
act! { post :create, :forum => @attributes, :format => 'xml' }
before do
@forum.stub!(:save).and_return(true)
@forum.stub!(:to_xml).and_return("<forum />")
end
it.assigns :forum, :headers => { :Location => lambda { forum_url(@forum) } }
it.renders :xml, :forum, :status => :created
end
describe ForumsController, "(unsuccessful creation)" do
define_models :stubbed
act! { post :create, :forum => @attributes }
before do
@forum.stub!(:save).and_return(false)
end
it.assigns :forum
it.renders :template, :new
end
describe ForumsController, "(unsuccessful creation, xml)" do
define_models :stubbed
act! { post :create, :forum => @attributes, :format => 'xml' }
before do
@forum.stub!(:save).and_return(false)
end
it.assigns :forum
it.renders :xml, "forum.errors", :status => :unprocessable_entity
end
end
describe ForumsController, "PUT #update" do
before do
@attributes = {'name' => "Default"}
@forum = forums(:default)
@site = sites(:default)
@site.forums.stub!(:find).with('1').and_return(@forum)
@controller.stub!(:current_site).and_return(@site)
@controller.stub!(:admin_required).and_return(true)
end
describe ForumsController, "(successful save)" do
define_models :stubbed
act! { put :update, :id => 1, :forum => @attributes }
before do
@forum.stub!(:save).and_return(true)
end
it.assigns :forum, :flash => { :notice => :not_nil }
it.redirects_to { forum_path(@forum) }
end
describe ForumsController, "(successful save, xml)" do
define_models :stubbed
act! { put :update, :id => 1, :forum => @attributes, :format => 'xml' }
before do
@forum.stub!(:save).and_return(true)
end
it.assigns :forum
it.renders :blank
end
describe ForumsController, "(unsuccessful save)" do
define_models :stubbed
act! { put :update, :id => 1, :forum => @attributes }
before do
@forum.stub!(:save).and_return(false)
end
it.assigns :forum
it.renders :template, :edit
end
describe ForumsController, "(unsuccessful save, xml)" do
define_models :stubbed
act! { put :update, :id => 1, :forum => @attributes, :format => 'xml' }
before do
@forum.stub!(:save).and_return(false)
end
it.assigns :forum
it.renders :xml, "forum.errors", :status => :unprocessable_entity
end
end
describe ForumsController, "DELETE #destroy" do
define_models :stubbed
act! { delete :destroy, :id => 1 }
before do
@forum = forums(:default)
@forum.stub!(:destroy)
@site = sites(:default)
@site.forums.stub!(:find).with('1').and_return(@forum)
@controller.stub!(:current_site).and_return(@site)
@controller.stub!(:admin_required).and_return(true)
end
it.assigns :forum
it.redirects_to { forums_path }
describe ForumsController, "(xml)" do
define_models :stubbed
act! { delete :destroy, :id => 1, :format => 'xml' }
it.assigns :forum
it.renders :blank
end
end
check the controller's instance variable, not the mocked example variable
require File.dirname(__FILE__) + '/../spec_helper'
describe ForumsController, "GET #index" do
define_models :stubbed
act! { get :index }
before do
@site = sites(:default)
@forums = [forums(:other), forums(:default)]
@site.stub!(:ordered_forums).and_return(@forums)
@controller.stub!(:current_site).and_return(@site)
@controller.stub!(:admin_required).and_return(true)
session[:forum_page] = 5
end
it.assigns :forums, :session => { :forum_page => nil }
it.renders :template, :index
describe ForumsController, "(xml)" do
define_models :stubbed
act! { get :index, :format => 'xml' }
it.assigns :forums
it.renders :xml, :forums
end
end
describe ForumsController, "GET #show" do
define_models :stubbed
act! { get :show, :id => 1 }
before do
@site = sites(:default)
@forum = forums(:default)
@topics = [topics(:default)]
@site.forums.stub!(:find).with('1').and_return(@forum)
@forum.topics.stub!(:paginate).with(:page => nil).and_return(@topics)
@controller.stub!(:current_site).and_return(@site)
@controller.stub!(:admin_required).and_return(true)
@controller.stub!(:logged_in?).and_return(false)
end
it.assigns :topics, :forum, :session => { :forums => :undefined, :forum_page => :undefined }
it.renders :template, :show
it "sets session[:forums] if logged in" do
@controller.stub!(:logged_in?).and_return(true)
act!
session[:forums][@forum.id].should == current_time
end
describe ForumsController, "(paged)" do
define_models :stubbed
act! { get :show, :id => 1, :page => 5 }
before do
@forum.topics.stub!(:paginate).with(:page => '5').and_return(@topics)
end
it.assigns :session => { :forum_page => lambda { {@forum.id => 5} } }
end
describe ForumsController, "(xml)" do
define_models :stubbed
act! { get :show, :id => 1, :format => 'xml' }
it.assigns :topics => :undefined
it.renders :xml, :forum
end
end
describe ForumsController, "GET #new" do
define_models :stubbed
act! { get :new }
before do
@site = sites(:default)
@forum = Forum.new
@controller.stub!(:current_site).and_return(@site)
@controller.stub!(:admin_required).and_return(true)
end
it "assigns @forum" do
act!
assigns[:forum].should be_new_record
end
it.renders :template, :new
describe ForumsController, "(xml)" do
define_models :stubbed
act! { get :new, :format => 'xml' }
it.renders :xml, :forum
end
end
describe ForumsController, "GET #edit" do
define_models :stubbed
act! { get :edit, :id => 1 }
before do
@site = sites(:default)
@forum = forums(:default)
@site.forums.stub!(:find).with('1').and_return(@forum)
@controller.stub!(:current_site).and_return(@site)
@controller.stub!(:admin_required).and_return(true)
end
it.assigns :forum
it.renders :template, :edit
end
describe ForumsController, "POST #create" do
before do
@attributes = {'name' => "Default"}
@forum = mock_model Forum, :new_record? => false, :errors => []
@site = sites(:default)
@site.forums.stub!(:build).with(@attributes).and_return(@forum)
@controller.stub!(:current_site).and_return(@site)
@controller.stub!(:admin_required).and_return(true)
end
describe ForumsController, "(successful creation)" do
define_models :stubbed
act! { post :create, :forum => @attributes }
before do
@forum.stub!(:save).and_return(true)
end
it.assigns :forum, :flash => { :notice => :not_nil }
it.redirects_to { forum_path(@forum) }
end
describe ForumsController, "(successful creation, xml)" do
define_models :stubbed
act! { post :create, :forum => @attributes, :format => 'xml' }
before do
@forum.stub!(:save).and_return(true)
@forum.stub!(:to_xml).and_return("<forum />")
end
it.assigns :forum, :headers => { :Location => lambda { forum_url(@forum) } }
it.renders :xml, :forum, :status => :created
end
describe ForumsController, "(unsuccessful creation)" do
define_models :stubbed
act! { post :create, :forum => @attributes }
before do
@forum.stub!(:save).and_return(false)
end
it.assigns :forum
it.renders :template, :new
end
describe ForumsController, "(unsuccessful creation, xml)" do
define_models :stubbed
act! { post :create, :forum => @attributes, :format => 'xml' }
before do
@forum.stub!(:save).and_return(false)
end
it.assigns :forum
it.renders :xml, "forum.errors", :status => :unprocessable_entity
end
end
describe ForumsController, "PUT #update" do
before do
@attributes = {'name' => "Default"}
@forum = forums(:default)
@site = sites(:default)
@site.forums.stub!(:find).with('1').and_return(@forum)
@controller.stub!(:current_site).and_return(@site)
@controller.stub!(:admin_required).and_return(true)
end
describe ForumsController, "(successful save)" do
define_models :stubbed
act! { put :update, :id => 1, :forum => @attributes }
before do
@forum.stub!(:save).and_return(true)
end
it.assigns :forum, :flash => { :notice => :not_nil }
it.redirects_to { forum_path(@forum) }
end
describe ForumsController, "(successful save, xml)" do
define_models :stubbed
act! { put :update, :id => 1, :forum => @attributes, :format => 'xml' }
before do
@forum.stub!(:save).and_return(true)
end
it.assigns :forum
it.renders :blank
end
describe ForumsController, "(unsuccessful save)" do
define_models :stubbed
act! { put :update, :id => 1, :forum => @attributes }
before do
@forum.stub!(:save).and_return(false)
end
it.assigns :forum
it.renders :template, :edit
end
describe ForumsController, "(unsuccessful save, xml)" do
define_models :stubbed
act! { put :update, :id => 1, :forum => @attributes, :format => 'xml' }
before do
@forum.stub!(:save).and_return(false)
end
it.assigns :forum
it.renders :xml, "forum.errors", :status => :unprocessable_entity
end
end
describe ForumsController, "DELETE #destroy" do
define_models :stubbed
act! { delete :destroy, :id => 1 }
before do
@forum = forums(:default)
@forum.stub!(:destroy)
@site = sites(:default)
@site.forums.stub!(:find).with('1').and_return(@forum)
@controller.stub!(:current_site).and_return(@site)
@controller.stub!(:admin_required).and_return(true)
end
it.assigns :forum
it.redirects_to { forums_path }
describe ForumsController, "(xml)" do
define_models :stubbed
act! { delete :destroy, :id => 1, :format => 'xml' }
it.assigns :forum
it.renders :blank
end
end |
require File.dirname(__FILE__) + '/../spec_helper'
describe SearchController do
before(:each) do
@address = "169 N. Berkeley Ave., Pasadena, CA"
@location = mock_location(:geocode_precision => "address", :geocode_address => "123 Fake Lane, City, State", :lat =>0, :lng => 0)
end
it "should assign radii and types on radius_search" do
get :radius
assigns[:location]
assigns[:radii]
assigns[:types]
end
it "should find the closest result if no search results are present" do
Location.should_receive(:find).and_return([])
Location.should_receive(:find_closest).with(:origin => @address,
:within => 100, :conditions => 'lat is not null and lng is not null and hidden = false')
get :radius, :address => @address, :radius => "5"
end
it "should call find with the results of the find_params method" do
controller.should_receive(:find_params).and_return(:find_params)
Location.should_receive(:find).with(:all, :find_params).and_return([@location])
get :radius, :address => @address, :radius => "5"
end
describe "with type" do
it 'should not raise error when type is invalid' do
Location.stub!(:find).and_return([])
lambda{get :radius, :type => "bogus_type", :radius => "5", :address => @address}.should_not raise_error
end
it "should find the closest result if no search results are present" do
go_club = mock_model(Type, :name => "Go Club")
Location.should_receive(:find).and_return([])
closest = stub_model(Location)
Location.should_receive(:find_closest).
with(:origin => @address, :within => 100,
:conditions => ['lat is not null and lng is not null and hidden = false and type_id = ?', go_club.id]).
and_return(closest)
get :radius, :type_id => go_club.id, :radius => "5",
:address => @address
assigns[:closest].should == closest
end
end
describe "should add location headings" do
it "when results have no address" do
view_results = [
Location::LocationHeader.new("City, State", :city, "4.1"),
mock_location(:type => :go_club, :name => "Location 1", :geocode_precision => "city", :geocode_address => "City, State", :distance => "4.1", :lat => 0, :lng => 0),
mock_location(:type => :go_club, :name => "Location 2", :geocode_precision => "city", :geocode_address => "City, State", :distance => "4.1", :lat => 0, :lng => 0)
]
db_results = view_results.clone.delete_if{|loc| loc.is_a? Location::LocationHeader}
Location.should_receive(:find).and_return(db_results)
get :radius, :radius => "5", :address => '00000'
assigns[:results].should == view_results
end
it "when results have addresses, use average distance" do
view_results = [
Location::LocationHeader.new("City, State", :city, "4.1"),
mock_location(:type => :go_club, :name => "Location 1", :geocode_precision => "address", :geocode_address => '234 Sesame St., City, State', :distance => "4.0", :lat => 0, :lng => 0),
mock_location(:type => :go_club, :name => "Location 2", :geocode_precision => "address", :geocode_address => '123 Sesame St., City, State', :distance => "4.2", :lat => 0, :lng => 0)
]
db_results = view_results.clone.delete_if{|loc| loc.is_a? Location::LocationHeader}
Location.should_receive(:find).and_return(db_results)
get :radius, :radius => "5", :address => '00000'
assigns[:results].should == view_results
end
it "when some results have addresses and some don't" do
view_results = [
Location::LocationHeader.new("City, State", :city, "4.2"),
mock_location(:type => :go_club, :name => "Location 1", :geocode_precision => "address", :geocode_address => '234 Sesame St., City, State', :distance => "4.0", :lat => 0, :lng => 0),
mock_location(:type => :go_club, :name => "Location 2", :geocode_precision => "city", :geocode_address => 'City, State', :distance => "4.2", :lat => 0, :lng => 0)
]
db_results = view_results.clone.delete_if{|loc| loc.is_a? Location::LocationHeader}
Location.should_receive(:find).and_return(db_results)
get :radius, :radius => "5", :address => '00000'
assigns[:results].should == view_results
end
it "for each city" do
view_results = [
Location::LocationHeader.new("City, State", :city, "4.2"),
mock_location(:type => :go_club, :name => "Location 1", :geocode_precision => "address", :geocode_address => '234 Sesame St., City, State', :distance => "4.0", :lat => 0, :lng => 0),
mock_location(:type => :go_club, :name => "Location 2", :geocode_precision => "city", :geocode_address => 'City, State', :distance => "4.2", :lat => 0, :lng => 0),
Location::LocationHeader.new("City 2, State", :city, "5.2"),
mock_location(:type => :go_club, :name => "Location 1", :geocode_precision => "address", :geocode_address => '234 Sesame St., City 2, State', :distance => "5.0", :lat => 0, :lng => 0),
mock_location(:type => :go_club, :name => "Location 2", :geocode_precision => "city", :geocode_address => 'City 2, State', :distance => "5.2", :lat => 0, :lng => 0)
]
db_results = view_results.clone.delete_if{|loc| loc.is_a? Location::LocationHeader}
Location.should_receive(:find).and_return(db_results)
get :radius, :radius => "5", :address => '00000'
assigns[:results].should == view_results
end
end
describe :find_params do
it "should return a Hash" do
controller.send(:find_params).should be_kind_of(Hash)
end
it "should contain origin, within, and order params" do
controller.instance_eval do
@address = :address
@radius = :radius
end
controller.send(:find_params).should == {:origin => :address, :within => :radius, :order => :distance, :conditions => "hidden = false"}
end
it "should include the type_id if it is greater than 0" do
controller.instance_eval {@type_id = 42}
controller.send(:find_params)[:conditions].should == ['type_id = ? AND hidden = false', 42]
end
end
describe :location_heading do
before :each do
@location = Location.new(:city => "City", :state => "State",
:zip_code => '00000', :country => "USA")
end
it "should display 'city, state' when city and state are present" do
controller.send(:location_heading, @location).should == 'City, State'
end
it "should display 'city, state' when city and state are present" do
controller.send(:location_heading, @location).should == 'City, State'
end
it "should display 'zip, state' when city is not present but zip is" do
@location.city = nil
controller.send(:location_heading, @location).should == '00000, State'
end
it "should display 'state, country' when city and zip are blank" do
@location.city = @location.zip_code = nil
controller.send(:location_heading, @location).should == 'State, USA'
end
it "should display 'country' when only country is present" do
@location.city = @location.zip_code = @location.state = nil
controller.send(:location_heading, @location).should == 'USA'
end
end
def mock_location(options)
options[:geocode_precision] ||= "city"
options[:geocode_address] ||= case options[:geocode_precision]
when "address"
"123 Number St., City, State"
when "city"
"City, State"
end
components = options[:geocode_address].split(/,/)
if components.size == 3 # address, city, state
options[:street_address] ||= components[0].strip
options[:city] ||= components[1].strip
options[:state] ||= components[2].strip
elsif components.size == 2 # city, state
options[:city] ||= components[0].strip
options[:state] ||= components[1].strip
else
raise "Invalid number of components in address"
end
options[:distance] ||= "0"
mock_model(Location, options)
end
end
Fixing broken rspec test
require File.dirname(__FILE__) + '/../spec_helper'
describe SearchController do
before(:each) do
@address = "169 N. Berkeley Ave., Pasadena, CA"
@location = mock_location(:geocode_precision => "address", :geocode_address => "123 Fake Lane, City, State", :lat =>0, :lng => 0)
end
it "should assign radii and types on radius_search" do
get :radius
assigns[:location]
assigns[:radii]
assigns[:types]
end
it "should find the closest result if no search results are present" do
Location.should_receive(:find).and_return([])
Location.should_receive(:find_closest).with(:origin => @address,
:within => 100, :conditions => 'lat is not null and lng is not null and hidden = false')
get :radius, :address => @address, :radius => "5"
end
it "should call find with the results of the find_params method" do
controller.should_receive(:find_params).and_return(:find_params)
Location.should_receive(:find).with(:all, :find_params).and_return([@location])
get :radius, :address => @address, :radius => "5"
end
describe "with type" do
it 'should not raise error when type is invalid' do
Location.stub!(:find).and_return([])
lambda{get :radius, :type => "bogus_type", :radius => "5", :address => @address}.should_not raise_error
end
it "should find the closest result if no search results are present" do
go_club = mock_model(Type, :name => "Go Club")
Location.should_receive(:find).and_return([])
closest = stub_model(Location, :distance => 0)
Location.should_receive(:find_closest).
with(:origin => @address, :within => 100,
:conditions => ['lat is not null and lng is not null and hidden = false and type_id = ?', go_club.id]).
and_return(closest)
get :radius, :type_id => go_club.id, :radius => "5",
:address => @address
assigns[:closest].should == closest
end
end
describe "should add location headings" do
it "when results have no address" do
view_results = [
Location::LocationHeader.new("City, State", :city, "4.1"),
mock_location(:type => :go_club, :name => "Location 1", :geocode_precision => "city", :geocode_address => "City, State", :distance => "4.1", :lat => 0, :lng => 0),
mock_location(:type => :go_club, :name => "Location 2", :geocode_precision => "city", :geocode_address => "City, State", :distance => "4.1", :lat => 0, :lng => 0)
]
db_results = view_results.clone.delete_if{|loc| loc.is_a? Location::LocationHeader}
Location.should_receive(:find).and_return(db_results)
get :radius, :radius => "5", :address => '00000'
assigns[:results].should == view_results
end
it "when results have addresses, use average distance" do
view_results = [
Location::LocationHeader.new("City, State", :city, "4.1"),
mock_location(:type => :go_club, :name => "Location 1", :geocode_precision => "address", :geocode_address => '234 Sesame St., City, State', :distance => "4.0", :lat => 0, :lng => 0),
mock_location(:type => :go_club, :name => "Location 2", :geocode_precision => "address", :geocode_address => '123 Sesame St., City, State', :distance => "4.2", :lat => 0, :lng => 0)
]
db_results = view_results.clone.delete_if{|loc| loc.is_a? Location::LocationHeader}
Location.should_receive(:find).and_return(db_results)
get :radius, :radius => "5", :address => '00000'
assigns[:results].should == view_results
end
it "when some results have addresses and some don't" do
view_results = [
Location::LocationHeader.new("City, State", :city, "4.2"),
mock_location(:type => :go_club, :name => "Location 1", :geocode_precision => "address", :geocode_address => '234 Sesame St., City, State', :distance => "4.0", :lat => 0, :lng => 0),
mock_location(:type => :go_club, :name => "Location 2", :geocode_precision => "city", :geocode_address => 'City, State', :distance => "4.2", :lat => 0, :lng => 0)
]
db_results = view_results.clone.delete_if{|loc| loc.is_a? Location::LocationHeader}
Location.should_receive(:find).and_return(db_results)
get :radius, :radius => "5", :address => '00000'
assigns[:results].should == view_results
end
it "for each city" do
view_results = [
Location::LocationHeader.new("City, State", :city, "4.2"),
mock_location(:type => :go_club, :name => "Location 1", :geocode_precision => "address", :geocode_address => '234 Sesame St., City, State', :distance => "4.0", :lat => 0, :lng => 0),
mock_location(:type => :go_club, :name => "Location 2", :geocode_precision => "city", :geocode_address => 'City, State', :distance => "4.2", :lat => 0, :lng => 0),
Location::LocationHeader.new("City 2, State", :city, "5.2"),
mock_location(:type => :go_club, :name => "Location 1", :geocode_precision => "address", :geocode_address => '234 Sesame St., City 2, State', :distance => "5.0", :lat => 0, :lng => 0),
mock_location(:type => :go_club, :name => "Location 2", :geocode_precision => "city", :geocode_address => 'City 2, State', :distance => "5.2", :lat => 0, :lng => 0)
]
db_results = view_results.clone.delete_if{|loc| loc.is_a? Location::LocationHeader}
Location.should_receive(:find).and_return(db_results)
get :radius, :radius => "5", :address => '00000'
assigns[:results].should == view_results
end
end
describe :find_params do
it "should return a Hash" do
controller.send(:find_params).should be_kind_of(Hash)
end
it "should contain origin, within, and order params" do
controller.instance_eval do
@address = :address
@radius = :radius
end
controller.send(:find_params).should == {:origin => :address, :within => :radius, :order => :distance, :conditions => "hidden = false"}
end
it "should include the type_id if it is greater than 0" do
controller.instance_eval {@type_id = 42}
controller.send(:find_params)[:conditions].should == ['type_id = ? AND hidden = false', 42]
end
end
describe :location_heading do
before :each do
@location = Location.new(:city => "City", :state => "State",
:zip_code => '00000', :country => "USA")
end
it "should display 'city, state' when city and state are present" do
controller.send(:location_heading, @location).should == 'City, State'
end
it "should display 'city, state' when city and state are present" do
controller.send(:location_heading, @location).should == 'City, State'
end
it "should display 'zip, state' when city is not present but zip is" do
@location.city = nil
controller.send(:location_heading, @location).should == '00000, State'
end
it "should display 'state, country' when city and zip are blank" do
@location.city = @location.zip_code = nil
controller.send(:location_heading, @location).should == 'State, USA'
end
it "should display 'country' when only country is present" do
@location.city = @location.zip_code = @location.state = nil
controller.send(:location_heading, @location).should == 'USA'
end
end
def mock_location(options)
options[:geocode_precision] ||= "city"
options[:geocode_address] ||= case options[:geocode_precision]
when "address"
"123 Number St., City, State"
when "city"
"City, State"
end
components = options[:geocode_address].split(/,/)
if components.size == 3 # address, city, state
options[:street_address] ||= components[0].strip
options[:city] ||= components[1].strip
options[:state] ||= components[2].strip
elsif components.size == 2 # city, state
options[:city] ||= components[0].strip
options[:state] ||= components[1].strip
else
raise "Invalid number of components in address"
end
options[:distance] ||= "0"
mock_model(Location, options)
end
end
|
require 'rails_helper'
RSpec.describe ShiftsController, type: :controller do
# describe 'GET index' do
# context 'when logged out' do
# it 'redirects to user sign_in' do
# get :index
# expect(response).to redirect_to new_user_session_path
# end
# end
# context 'when logged in as ngo' do
# before { sign_in create(:ngo, :confirmed) }
#
# it 'redirects to user sign_in' do
# get :index
# expect(response).to redirect_to new_user_session_path
# end
# end
# context 'when logged in as user' do
# let!(:recent_upcoming_shifts) { create_list :shift, 25,
# event: create(:event, :published),
# starts_at: Faker::Date.between(1.day.from_now, 2.days.from_now) }
# let!(:next_upcoming_shifts) { create_list :shift, 25,
# event: create(:event, :published),
# starts_at: Faker::Date.between(3.day.from_now, 5.days.from_now) }
# let(:past_shift) { create :shift,
# event: create(:event, :published), starts_at: Faker::Date.backward(2) }
# let(:full_shift) { create :shift,
# event: create(:event, :published),
# starts_at: Faker::Date.forward(2), volunteers_needed: 2, volunteers_count: 2 }
# let(:shift_unpublished) { create :shift,
# starts_at: Faker::Date.forward(1) }
#
# before { sign_in create(:user) }
#
# context 'when html request' do
# before { get :index }
#
# it 'assigns first 25 upcoming @shifts' do
# expect(assigns :shifts).to match_array recent_upcoming_shifts
# end
#
# it 'excludes past shifts' do
# expect(assigns :shifts).not_to include past_shift
# end
#
# it 'excludes full shifts' do
# expect(assigns :shifts).not_to include full_shift
# end
#
# it 'excludes unpublished shifts' do
# expect(assigns :shifts).not_to include shift_unpublished
# end
#
# it 'renders index.html' do
# expect(response.content_type).to eq 'text/html'
# expect(response).to render_template :index
# end
# end
# context 'when js request (pagination) with page param' do
# before { get :index, xhr: true, params: { page: 2 } }
#
# it 'assigns next 25 upcoming shifts' do
# expect(assigns :shifts).to match_array next_upcoming_shifts
# end
#
# it 'renders index.js' do
# expect(response.content_type).to eq 'text/javascript'
# expect(response).to render_template :index
# end
# end
# end
# end
# describe 'GET show' do
# let(:shift) { create :shift }
#
# context 'when logged out' do
# it 'redirects to user sign_in' do
# get :show, params: { id: shift }
# expect(response).to redirect_to new_user_session_path
# end
# end
# context 'when logged in as ngo' do
# before { sign_in create(:ngo, :confirmed) }
#
# it 'redirects to user sign_in' do
# get :show, params: { id: shift }
# expect(response).to redirect_to new_user_session_path
# end
# end
# context 'when logged in as user' do
# before do
# sign_in create(:user)
# get :show, params: { id: shift }
# end
#
# it 'assigns @shift' do
# expect(assigns :shift).to eq shift
# end
#
# it 'renders :show' do
# expect(response).to render_template :show
# end
# end
# end
describe 'POST opt_in' do
let(:shift) { create :shift }
context 'when logged out' do
it 'redirects to user sign_in' do
post :opt_in, params: { shift_id: shift }
expect(response).to redirect_to new_user_session_path
end
end
context 'when logged in' do
let(:user) { create :user }
before { sign_in user }
context 'when not opted in yet' do
it 'creates shifts_user record' do
expect{
post :opt_in, params: { shift_id: shift }
}.to change{ShiftsUser.count}.by 1
end
it 'assigns @shift' do
post :opt_in, params: { shift_id: shift }
expect(assigns :shift).to eq shift
end
it 'renders :opt_in' do
post :opt_in, params: { shift_id: shift }
expect(response).to render_template :opt_in
end
end
context 'when already opted in' do
before { create :shifts_user, user: user, shift: shift }
it 'does not create shifts_user record' do
expect{
post :opt_in, params: { shift_id: shift }
}.not_to change{ShiftsUser.count}
end
it 'assigns @shift' do
post :opt_in, params: { shift_id: shift }
expect(assigns :shift).to eq shift
end
it 'redirect_to shift' do
post :opt_in, params: { shift_id: shift }
expect(response).to redirect_to shift
end
end
end
end
describe 'DELETE opt_out' do
let(:shift) { create :shift }
context 'when logged out' do
it 'redirects to user sign_in' do
delete :opt_out, params: { shift_id: shift }
expect(response).to redirect_to new_user_session_path
end
end
context 'when logged in' do
let(:user) { create :user }
before { sign_in user }
context 'when opted in yet' do
before { create :shifts_user, user: user, shift: shift }
it 'deletes shifts_user record' do
expect{
delete :opt_out, params: { shift_id: shift }
}.to change{ShiftsUser.count}.by -1
end
it 'assigns @shift' do
delete :opt_out, params: { shift_id: shift }
expect(assigns :shift).to eq shift
end
it 'redirect_to schedule' do
delete :opt_out, params: { shift_id: shift }
expect(response).to redirect_to schedule_path
end
end
context 'when not opted in' do
it 'does not change shifts_user records' do
expect{
delete :opt_out, params: { shift_id: shift }
}.not_to change{ShiftsUser.count}
end
it 'assigns @shift' do
delete :opt_out, params: { shift_id: shift }
expect(assigns :shift).to eq shift
end
it 'redirect_to schedule' do
delete :opt_out, params: { shift_id: shift }
expect(response).to redirect_to schedule_path
end
end
end
end
describe 'GET schedule' do
context 'when logged out' do
it 'redirects to user sign_in' do
get :schedule
expect(response).to redirect_to new_user_session_path
end
end
context 'when logged in' do
let(:user) { create :user }
let(:upcoming_shifts) { create_list :shift, 10, starts_at: Date.tomorrow }
let(:past_shifts) { create_list :shift, 10, starts_at: Date.yesterday }
before do
sign_in user
upcoming_shifts.each{|s| create :shifts_user, user: user, shift: s}
past_shifts.each{|s| create :shifts_user, user: user, shift: s}
end
context 'when html request' do
context 'without filter' do
it 'assigns first 10 shifts' do
get :schedule
expect(assigns :shifts).to match_array upcoming_shifts
end
end
context 'with :past filter' do
it 'assigns first 10 past' do
get :schedule, params: {filter: :past}
expect(assigns :shifts).to match_array past_shifts
end
end
context 'with :all filter' do
it 'assigns first 10' do
get :schedule, params: {filter: :all}
expect(assigns :shifts).to match_array past_shifts
end
end
end
context 'when js request' do
let(:next_upcoming_shifts) { create_list :shift, 10, starts_at: Date.tomorrow+1.day }
before do
next_upcoming_shifts.each{|s| create :shifts_user, user: user, shift: s}
end
context 'without filter' do
it 'assigns next 10 shifts' do
get :schedule, xhr: true, params: { page: 2 }
expect(assigns :shifts).to match_array next_upcoming_shifts
end
end
context 'with :all filter' do
it 'assigns next 10 overall' do
get :schedule, xhr: true, params: { page: 2, filter: :all }
expect(assigns :shifts).to match_array upcoming_shifts
end
end
end
end
end
end
Update redirecting spec
require 'rails_helper'
RSpec.describe ShiftsController, type: :controller do
# describe 'GET index' do
# context 'when logged out' do
# it 'redirects to user sign_in' do
# get :index
# expect(response).to redirect_to new_user_session_path
# end
# end
# context 'when logged in as ngo' do
# before { sign_in create(:ngo, :confirmed) }
#
# it 'redirects to user sign_in' do
# get :index
# expect(response).to redirect_to new_user_session_path
# end
# end
# context 'when logged in as user' do
# let!(:recent_upcoming_shifts) { create_list :shift, 25,
# event: create(:event, :published),
# starts_at: Faker::Date.between(1.day.from_now, 2.days.from_now) }
# let!(:next_upcoming_shifts) { create_list :shift, 25,
# event: create(:event, :published),
# starts_at: Faker::Date.between(3.day.from_now, 5.days.from_now) }
# let(:past_shift) { create :shift,
# event: create(:event, :published), starts_at: Faker::Date.backward(2) }
# let(:full_shift) { create :shift,
# event: create(:event, :published),
# starts_at: Faker::Date.forward(2), volunteers_needed: 2, volunteers_count: 2 }
# let(:shift_unpublished) { create :shift,
# starts_at: Faker::Date.forward(1) }
#
# before { sign_in create(:user) }
#
# context 'when html request' do
# before { get :index }
#
# it 'assigns first 25 upcoming @shifts' do
# expect(assigns :shifts).to match_array recent_upcoming_shifts
# end
#
# it 'excludes past shifts' do
# expect(assigns :shifts).not_to include past_shift
# end
#
# it 'excludes full shifts' do
# expect(assigns :shifts).not_to include full_shift
# end
#
# it 'excludes unpublished shifts' do
# expect(assigns :shifts).not_to include shift_unpublished
# end
#
# it 'renders index.html' do
# expect(response.content_type).to eq 'text/html'
# expect(response).to render_template :index
# end
# end
# context 'when js request (pagination) with page param' do
# before { get :index, xhr: true, params: { page: 2 } }
#
# it 'assigns next 25 upcoming shifts' do
# expect(assigns :shifts).to match_array next_upcoming_shifts
# end
#
# it 'renders index.js' do
# expect(response.content_type).to eq 'text/javascript'
# expect(response).to render_template :index
# end
# end
# end
# end
# describe 'GET show' do
# let(:shift) { create :shift }
#
# context 'when logged out' do
# it 'redirects to user sign_in' do
# get :show, params: { id: shift }
# expect(response).to redirect_to new_user_session_path
# end
# end
# context 'when logged in as ngo' do
# before { sign_in create(:ngo, :confirmed) }
#
# it 'redirects to user sign_in' do
# get :show, params: { id: shift }
# expect(response).to redirect_to new_user_session_path
# end
# end
# context 'when logged in as user' do
# before do
# sign_in create(:user)
# get :show, params: { id: shift }
# end
#
# it 'assigns @shift' do
# expect(assigns :shift).to eq shift
# end
#
# it 'renders :show' do
# expect(response).to render_template :show
# end
# end
# end
describe 'POST opt_in' do
let(:shift) { create :shift }
context 'when logged out' do
it 'redirects to user sign_in' do
post :opt_in, params: { shift_id: shift }
expect(response).to redirect_to new_user_session_path
end
end
context 'when logged in' do
let(:user) { create :user }
before { sign_in user }
context 'when not opted in yet' do
it 'creates shifts_user record' do
expect{
post :opt_in, params: { shift_id: shift }
}.to change{ShiftsUser.count}.by 1
end
it 'assigns @shift' do
post :opt_in, params: { shift_id: shift }
expect(assigns :shift).to eq shift
end
it 'renders :opt_in' do
post :opt_in, params: { shift_id: shift }
expect(response).to render_template :opt_in
end
end
context 'when already opted in' do
before { create :shifts_user, user: user, shift: shift }
it 'does not create shifts_user record' do
expect{
post :opt_in, params: { shift_id: shift }
}.not_to change{ShiftsUser.count}
end
it 'assigns @shift' do
post :opt_in, params: { shift_id: shift }
expect(assigns :shift).to eq shift
end
it 'redirect_to event' do
post :opt_in, params: { shift_id: shift }
expect(response).to redirect_to shift.event
end
end
end
end
describe 'DELETE opt_out' do
let(:shift) { create :shift }
context 'when logged out' do
it 'redirects to user sign_in' do
delete :opt_out, params: { shift_id: shift }
expect(response).to redirect_to new_user_session_path
end
end
context 'when logged in' do
let(:user) { create :user }
before { sign_in user }
context 'when opted in yet' do
before { create :shifts_user, user: user, shift: shift }
it 'deletes shifts_user record' do
expect{
delete :opt_out, params: { shift_id: shift }
}.to change{ShiftsUser.count}.by -1
end
it 'assigns @shift' do
delete :opt_out, params: { shift_id: shift }
expect(assigns :shift).to eq shift
end
it 'redirect_to schedule' do
delete :opt_out, params: { shift_id: shift }
expect(response).to redirect_to schedule_path
end
end
context 'when not opted in' do
it 'does not change shifts_user records' do
expect{
delete :opt_out, params: { shift_id: shift }
}.not_to change{ShiftsUser.count}
end
it 'assigns @shift' do
delete :opt_out, params: { shift_id: shift }
expect(assigns :shift).to eq shift
end
it 'redirect_to schedule' do
delete :opt_out, params: { shift_id: shift }
expect(response).to redirect_to schedule_path
end
end
end
end
describe 'GET schedule' do
context 'when logged out' do
it 'redirects to user sign_in' do
get :schedule
expect(response).to redirect_to new_user_session_path
end
end
context 'when logged in' do
let(:user) { create :user }
let(:upcoming_shifts) { create_list :shift, 10, starts_at: Date.tomorrow }
let(:past_shifts) { create_list :shift, 10, starts_at: Date.yesterday }
before do
sign_in user
upcoming_shifts.each{|s| create :shifts_user, user: user, shift: s}
past_shifts.each{|s| create :shifts_user, user: user, shift: s}
end
context 'when html request' do
context 'without filter' do
it 'assigns first 10 shifts' do
get :schedule
expect(assigns :shifts).to match_array upcoming_shifts
end
end
context 'with :past filter' do
it 'assigns first 10 past' do
get :schedule, params: {filter: :past}
expect(assigns :shifts).to match_array past_shifts
end
end
context 'with :all filter' do
it 'assigns first 10' do
get :schedule, params: {filter: :all}
expect(assigns :shifts).to match_array past_shifts
end
end
end
context 'when js request' do
let(:next_upcoming_shifts) { create_list :shift, 10, starts_at: Date.tomorrow+1.day }
before do
next_upcoming_shifts.each{|s| create :shifts_user, user: user, shift: s}
end
context 'without filter' do
it 'assigns next 10 shifts' do
get :schedule, xhr: true, params: { page: 2 }
expect(assigns :shifts).to match_array next_upcoming_shifts
end
end
context 'with :all filter' do
it 'assigns next 10 overall' do
get :schedule, xhr: true, params: { page: 2, filter: :all }
expect(assigns :shifts).to match_array upcoming_shifts
end
end
end
end
end
end
|
require 'spec_helper'
describe TopicsController do
let(:topic) { Topic.make! }
before :each do
@user = User.make!
sign_in @user
end
def topic_params(topic)
wanted_keys = ['id', 'description', 'title', 'category_ids', 'promise_ids', 'field_ids']
topic.as_json.keep_if { |key| key.in? wanted_keys }
end
it "should get :index" do
topics = [topic]
get :index
assigns(:topics).should == topics
response.should have_rendered(:index)
end
it 'should get :show' do
get :show, id: topic
assigns(:topic).should == topic
assigns(:promises_by_party).should_not be_nil
response.should have_rendered(:show)
end
it 'should get :new' do
get :new
response.should have_rendered(:new)
session[:topic_step].should == 'categories'
assigns(:categories).should_not be_nil
end
it 'edit redirects to the categories step if no step was specified' do
get :edit, id: topic
response.should redirect_to(edit_topic_step_path(topic, step: 'categories'))
end
it 'edits the vote step if specified' do
get :edit, id: topic, step: 'votes'
response.should have_rendered(:edit)
assigns(:votes_and_connections).should_not be_nil
end
it 'edits the promises step if specified' do
get :edit, id: topic, step: 'promises'
response.should have_rendered(:edit)
assigns(:promises).should_not be_nil
end
it 'edits the cateogires step if specified' do
get :edit, id: topic, step: 'categories'
response.should have_rendered(:edit)
assigns(:categories).should_not be_nil
end
it 'edits the fields step if specified' do
get :edit, id: topic, step: 'fields'
response.should have_rendered(:edit)
assigns(:fields).should_not be_nil
end
it "should create a new topic with a name" do
post :create, topic: { title: 'More Cowbell' }, finish: true
Topic.count.should == 1
topic = assigns(:topic)
topic.should be_kind_of(Topic)
response.should redirect_to(topic_path(topic))
end
it 'should render new if save was unsuccessful' do
post :create, topic: {title: ''}, finish: true
flash.alert.should_not be_nil
response.should render_template(:new)
end
context "failed update" do
it 'should re-edit categories if update was unsuccessful' do
pending "can this actually happen?"
end
it 'should re-edit promises if update was unsuccessful' do
pending "can this actually happen?"
end
it 'should re-edit votes if update was unsuccessful' do
pending "can this actually happen?"
end
it 'should re-edit fields if update was unsuccessful' do
pending "can this actually happen?"
end
end
context 'destroy' do
it 'should destroy the topic' do
delete :destroy, id: topic
Topic.count.should == 0
response.should redirect_to(topics_url)
end
end
context "next" do
it "should show the promises step when hit next from create" do
post :create, topic: { title: "Less Cowbell!" }
topic = assigns(:topic)
topic.should be_kind_of(Topic)
expected_url = edit_topic_step_path(id: topic, step: 'promises')
response.should redirect_to(expected_url)
end
it "should show votes step when hit next from promises" do
session[:topic_step] = 'promises'
put :update, topic: topic_params(topic), id: topic
assigns(:topic).should == topic
session[:topic_step].should == 'votes'
response.should redirect_to(edit_topic_step_url(topic, step: 'votes'))
end
it "should show fields step when hit next from votes" do
session[:topic_step] = 'votes'
put :update, topic: topic_params(topic), id: topic
session[:topic_step].should == 'fields'
assigns(:topic).should == topic
response.should redirect_to(edit_topic_step_url(step: 'fields' ))
end
end
context "previous" do
it "should show votes step when hit previous from fields" do
session[:topic_step] = 'fields'
put :update, previous: true, topic: topic_params(topic), id: topic
session[:topic_step].should == 'votes'
assigns(:topic).should == topic
response.should redirect_to edit_topic_step_url(topic, step: 'votes' )
end
it "should show promises when hit previous from votes" do
session[:topic_step] = 'votes'
put :update, previous: true, topic: topic_params(topic), id: topic
session[:topic_step].should == 'promises'
assigns(:topic).should == topic
response.should redirect_to edit_topic_step_url(topic, step: 'promises')
end
it "should show the categories step when hit previous from promises" do
session[:topic_step] = 'promises'
put :update, previous: true, topic: topic_params(topic), id: topic
session[:topic_step] = 'categories'
assigns(:topic).should == topic
response.should redirect_to edit_topic_step_url(topic, step: 'categories')
end
end
context "finish" do
it "should save and redirect to topic when hit finish from edit step" do
session[:topic_step] = 'votes'
put :update, finish: true, topic: topic_params(topic), id: topic
assigns(:topic).should == topic
response.should redirect_to topic_path(topic)
end
end
end
Add a spec that renders topics#show.
require 'spec_helper'
describe TopicsController do
let(:topic) { Topic.make! }
before :each do
@user = User.make!
sign_in @user
end
def topic_params(topic)
wanted_keys = ['id', 'description', 'title', 'category_ids', 'promise_ids', 'field_ids']
topic.as_json.keep_if { |key| key.in? wanted_keys }
end
it "should get :index" do
topics = [topic]
get :index
assigns(:topics).should == topics
response.should have_rendered(:index)
end
it 'should get :show' do
get :show, id: topic
assigns(:topic).should == topic
assigns(:promises_by_party).should_not be_nil
response.should have_rendered(:show)
end
it 'should get :new' do
get :new
response.should have_rendered(:new)
session[:topic_step].should == 'categories'
assigns(:categories).should_not be_nil
end
it 'edit redirects to the categories step if no step was specified' do
get :edit, id: topic
response.should redirect_to(edit_topic_step_path(topic, step: 'categories'))
end
it 'edits the vote step if specified' do
get :edit, id: topic, step: 'votes'
response.should have_rendered(:edit)
assigns(:votes_and_connections).should_not be_nil
end
it 'edits the promises step if specified' do
get :edit, id: topic, step: 'promises'
response.should have_rendered(:edit)
assigns(:promises).should_not be_nil
end
it 'edits the cateogires step if specified' do
get :edit, id: topic, step: 'categories'
response.should have_rendered(:edit)
assigns(:categories).should_not be_nil
end
it 'edits the fields step if specified' do
get :edit, id: topic, step: 'fields'
response.should have_rendered(:edit)
assigns(:fields).should_not be_nil
end
it "should create a new topic with a name" do
post :create, topic: { title: 'More Cowbell' }, finish: true
Topic.count.should == 1
topic = assigns(:topic)
topic.should be_kind_of(Topic)
response.should redirect_to(topic_path(topic))
end
it 'should render new if save was unsuccessful' do
post :create, topic: {title: ''}, finish: true
flash.alert.should_not be_nil
response.should render_template(:new)
end
context "failed update" do
it 'should re-edit categories if update was unsuccessful' do
pending "can this actually happen?"
end
it 'should re-edit promises if update was unsuccessful' do
pending "can this actually happen?"
end
it 'should re-edit votes if update was unsuccessful' do
pending "can this actually happen?"
end
it 'should re-edit fields if update was unsuccessful' do
pending "can this actually happen?"
end
end
context 'destroy' do
it 'should destroy the topic' do
delete :destroy, id: topic
Topic.count.should == 0
response.should redirect_to(topics_url)
end
end
context "next" do
it "should show the promises step when hit next from create" do
post :create, topic: { title: "Less Cowbell!" }
topic = assigns(:topic)
topic.should be_kind_of(Topic)
expected_url = edit_topic_step_path(id: topic, step: 'promises')
response.should redirect_to(expected_url)
end
it "should show votes step when hit next from promises" do
session[:topic_step] = 'promises'
put :update, topic: topic_params(topic), id: topic
assigns(:topic).should == topic
session[:topic_step].should == 'votes'
response.should redirect_to(edit_topic_step_url(topic, step: 'votes'))
end
it "should show fields step when hit next from votes" do
session[:topic_step] = 'votes'
put :update, topic: topic_params(topic), id: topic
session[:topic_step].should == 'fields'
assigns(:topic).should == topic
response.should redirect_to(edit_topic_step_url(step: 'fields' ))
end
end
context "previous" do
it "should show votes step when hit previous from fields" do
session[:topic_step] = 'fields'
put :update, previous: true, topic: topic_params(topic), id: topic
session[:topic_step].should == 'votes'
assigns(:topic).should == topic
response.should redirect_to edit_topic_step_url(topic, step: 'votes' )
end
it "should show promises when hit previous from votes" do
session[:topic_step] = 'votes'
put :update, previous: true, topic: topic_params(topic), id: topic
session[:topic_step].should == 'promises'
assigns(:topic).should == topic
response.should redirect_to edit_topic_step_url(topic, step: 'promises')
end
it "should show the categories step when hit previous from promises" do
session[:topic_step] = 'promises'
put :update, previous: true, topic: topic_params(topic), id: topic
session[:topic_step] = 'categories'
assigns(:topic).should == topic
response.should redirect_to edit_topic_step_url(topic, step: 'categories')
end
end
context "finish" do
it "should save and redirect to topic when hit finish from edit step" do
session[:topic_step] = 'votes'
put :update, finish: true, topic: topic_params(topic), id: topic
assigns(:topic).should == topic
response.should redirect_to topic_path(topic)
end
end
context "with rendered views" do
render_views
it "should render :show" do
get :show, id: Topic.make!
response.should have_rendered(:show)
end
end
end
|
# ServiceResponse factory
def nyu_aleph_status(circulation_status_value)
circulation_status =
Exlibris::Aleph::Item::CirculationStatus.new(circulation_status_value)
Exlibris::Nyu::Aleph::Status.new(circulation_status)
end
admin_library = Exlibris::Aleph::AdminLibrary.new('NYU50')
sub_library = Exlibris::Aleph::SubLibrary.new('BOBST', 'NYU Bobst', admin_library)
collection = Exlibris::Aleph::Collection.new('MAIN', 'Main Collection', sub_library)
call_number = Exlibris::Aleph::Item::CallNumber.new('DS126 .M62 2002', nil)
nyu_call_number = Exlibris::Nyu::Aleph::CallNumber.new(call_number)
nyu_aleph_service_data = {
record_id: 'nyu_aleph000741245',
original_id: 'nyu_aleph000741245',
title: 'An aesthetic occupation : the immediacy of architecture and the Palestine conflict',
author: 'Daniel Bertrand Monk 1960-',
display_type: 'book',
source_id: 'nyu_aleph',
original_source_id: 'NYU01',
source_record_id: '000741245',
ils_api_id: 'NYU01000741245',
institution_code: 'NYU',
institution: 'NYU',
library_code: 'BOBST',
library: sub_library,
collection: collection,
call_number: nyu_call_number,
coverage: [],
status: nyu_aleph_status('05/27/14'),
from_aleph: true,
requestability: 'deferred',
collection_str: 'NYU Bobst Main Collection',
coverage_str: '',
edition_str: '',
coverage_str_array: [],
match_reliability: "exact",
source_data: {
item_id: 'NYU50000741245000010',
doc_library: 'NYU01',
sub_library_code: 'BOBST',
sub_library: sub_library,
collection: collection,
call_number: nyu_call_number,
doc_number: '000741245',
rest_api_id: 'NYU01000741245'
}
}
FactoryGirl.define do
factory :service_response do
service_id 'NYU_SFX'
display_text 'Dummy Service'
url 'http://www.example.com'
notes 'Some notes'
service_data do
{
key1: 'value1',
key2: 'value1'
}
end
service_type_value_name 'fulltext'
request
trait :holding do
service_type_value_name 'holding'
service_data do
{
collection_str: "NYU Bobst Main Collection",
call_number: "(DS126 .M62 2002 )",
coverage: [],
status: "Check Availability",
edition_str: '',
match_reliability: "exact"
}
end
end
trait :primo do
service_id 'NYU_Primo'
end
trait :primo_source do
service_id 'NYU_Primo_Source'
end
trait :nyu_aleph_without_source_data do
service_data do
nyu_aleph_service_data.reject do |key, value|
key == :source_data
end
end
end
trait :nyu_aleph do
service_data do
nyu_aleph_service_data
end
end
trait :nyu_aleph_not_from_aleph do
service_data do
nyu_aleph_service_data.merge({from_aleph: false, status: 'Check Availability'})
end
end
trait :expired_nyu_aleph do
service_data do
nyu_aleph_service_data.merge({expired: true})
end
end
trait :single_pickup_location_nyu_aleph do
item_hash = {item_id: 'NYU50000741245000020'}
single_pickup_location_source_data =
nyu_aleph_service_data[:source_data].merge(item_hash)
service_data do
nyu_aleph_service_data.merge(source_data: single_pickup_location_source_data)
end
end
trait :abu_dhabi_nyu_aleph do
abu_dhabi_admin_library = Exlibris::Aleph::AdminLibrary.new('NYU51')
abu_dhabi_sub_library = Exlibris::Aleph::SubLibrary.new('NABUD', 'NYU Abu Dhabi Library (UAE)', abu_dhabi_admin_library)
service_data do
nyu_aleph_service_data.merge({library: abu_dhabi_sub_library})
end
end
trait :bobst_reserve_nyu_aleph do
bobst_reserve_sub_library = Exlibris::Aleph::SubLibrary.new('BRES', 'NYU Bobst Reserve Collection', admin_library)
service_data do
nyu_aleph_service_data.merge({library: bobst_reserve_sub_library})
end
end
trait :avery_fisher_nyu_aleph do
avery_fisher_sub_library = Exlibris::Aleph::SubLibrary.new('BAFC', 'NYU Bobst Avery Fisher Center', admin_library)
service_data do
nyu_aleph_service_data.merge({library: avery_fisher_sub_library})
end
end
trait :on_shelf_nyu_aleph do
status = nyu_aleph_status('On Shelf')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :available_nyu_aleph do
status = nyu_aleph_status('Available')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :checked_out_nyu_aleph do
status = nyu_aleph_status('06/28/14')
requestability = 'yes'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :billed_as_lost_nyu_aleph do
status = nyu_aleph_status('Billed as Lost')
requestability = 'yes'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :claimed_returned_nyu_aleph do
status = nyu_aleph_status('Claimed Returned')
requestability = 'yes'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :reshelving_nyu_aleph do
status = nyu_aleph_status('Reshelving')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :ill_nyu_aleph do
status = nyu_aleph_status('Request ILL')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :processing_nyu_aleph do
status = nyu_aleph_status('In Processing')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :transit_nyu_aleph do
status = nyu_aleph_status('In Transit')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :on_order_nyu_aleph do
status = nyu_aleph_status('On Order')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :offsite_nyu_aleph do
status = nyu_aleph_status('Offsite Available')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :requested_nyu_aleph do
status = nyu_aleph_status('Requested')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
factory :holding_service_response, traits: [:holding]
factory :primo_service_response, traits: [:holding, :primo]
factory :primo_source_service_response, traits: [:holding, :primo_source]
factory :nyu_aleph_service_response_without_source_data, traits: [:holding, :primo_source, :nyu_aleph_without_source_data]
factory :nyu_aleph_not_from_aleph_service_response, traits: [:holding, :primo_source, :nyu_aleph_not_from_aleph]
factory :nyu_aleph_service_response, traits: [:holding, :primo_source, :nyu_aleph]
factory :expired_nyu_aleph_service_response, traits: [:holding, :primo_source, :expired_nyu_aleph]
factory :single_pickup_location_nyu_aleph_service_response, traits: [:holding, :primo_source, :single_pickup_location_nyu_aleph]
factory :abu_dhabi_nyu_aleph_service_response, traits: [:holding, :primo_source, :abu_dhabi_nyu_aleph]
factory :bobst_reserve_nyu_aleph_service_response, traits: [:holding, :primo_source, :bobst_reserve_nyu_aleph]
factory :avery_fisher_nyu_aleph_service_response, traits: [:holding, :primo_source, :avery_fisher_nyu_aleph]
factory :on_shelf_nyu_aleph_service_response, traits: [:holding, :primo_source, :on_shelf_nyu_aleph]
factory :available_nyu_aleph_service_response, traits: [:holding, :primo_source, :available_nyu_aleph]
factory :checked_out_nyu_aleph_service_response, traits: [:holding, :primo_source, :checked_out_nyu_aleph]
factory :billed_as_lost_nyu_aleph_service_response, traits: [:holding, :primo_source, :billed_as_lost_nyu_aleph]
factory :claimed_returned_nyu_aleph_service_response, traits: [:holding, :primo_source, :claimed_returned_nyu_aleph]
factory :reshelving_nyu_aleph_service_response, traits: [:holding, :primo_source, :reshelving_nyu_aleph]
factory :ill_nyu_aleph_service_response, traits: [:holding, :primo_source, :ill_nyu_aleph]
factory :processing_nyu_aleph_service_response, traits: [:holding, :primo_source, :processing_nyu_aleph]
factory :transit_nyu_aleph_service_response, traits: [:holding, :primo_source, :transit_nyu_aleph]
factory :on_order_nyu_aleph_service_response, traits: [:holding, :primo_source, :on_order_nyu_aleph]
factory :offsite_nyu_aleph_service_response, traits: [:holding, :primo_source, :offsite_nyu_aleph]
factory :requested_nyu_aleph_service_response, traits: [:holding, :primo_source, :requested_nyu_aleph]
end
# factory :nyu_aleph_service_response, class: ServiceResponse do
# service_id 'NYU_Primo_Source'
# service_data do
# nyu_aleph_service_data
# end
# trait :requested do
# service_data do
# nyu_aleph_service_data
# end
# end
# trait :available_service_data do
# service_data do
# {
# status: 'Available',
# requestability: 'deferred'
# }
# end
# end
# trait :checked_out do
# service_data do
# {
# status: 'Due: 01/31/13',
# requestability: 'yes'
# }
# end
# end
# trait :ill do
# service_data do
# {
# status: 'Request ILL',
# requestability: 'yes'
# }
# end
# end
# trait :on_order do
# service_data do
# {
# status: 'On Order',
# requestability: 'yes'
# }
# end
# end
# trait :billed_as_lost do
# service_data do
# {
# status: 'Request ILL',
# requestability: 'yes'
# }
# end
# end
# trait :requested do
# service_data do
# {
# status: 'Requested',
# requestability: 'yes'
# }
# end
# end
# trait :offsite do
# service_data do
# {
# status: 'Offsite Available',
# requestability: 'yes'
# }
# end
# end
# trait :processing do
# service_data do
# {
# status: 'In Processing',
# requestability: 'yes'
# }
# end
# end
# trait :afc_recalled do
# service_data do
# {
# status: 'Due: 01/01/14',
# requestability: 'deferred'
# }
# end
# end
# trait :bobst_recalled do
# service_data do
# {
# status: 'Due: 01/01/14',
# requestability: 'deferred'
# }
# end
# end
# trait :deferred_requestability do
# service_data do
# {
# status: 'Available',
# requestability: 'deferred'
# }
# end
# end
# trait :always_requestable do
# service_data do
# {
# status: 'Due: 01/31/13',
# requestability: 'yes'
# }
# end
# end
# trait :never_requestable do
# service_data do
# {
# status: 'Reshelving',
# requestability: 'no'
# }
# end
# end
# factory :ill_service_response, traits: [:ill]
# factory :billed_as_lost_service_response, traits: [:billed_as_lost]
# factory :requested_service_response, traits: [:requested]
# factory :on_order_service_response, traits: [:on_order]
# factory :checked_out_service_response, traits: [:checked_out]
# factory :available_service_response, traits: [:available]
# factory :offsite_service_response, traits: [:offsite]
# factory :processing_service_response, traits: [:processing]
# factory :afc_recalled_service_response, traits: [:afc_recalled]
# factory :bobst_recalled_service_response, traits: [:bobst_recalled]
# factory :deferred_requestability_service_response, traits: [:deferred_requestability]
# factory :always_requestable_service_response, traits: [:always_requestable]
# factory :never_requestable_service_response, traits: [:never_requestable]
# end
end
Update the NYU Bobst Reserves Collection ServiceResponse factory based on the updated Exlibris::Nyu gem
# ServiceResponse factory
def nyu_aleph_status(circulation_status_value)
circulation_status =
Exlibris::Aleph::Item::CirculationStatus.new(circulation_status_value)
Exlibris::Nyu::Aleph::Status.new(circulation_status)
end
def reserves_status(circulation_status_value, status_code, status_display)
item_status = Exlibris::Aleph::Item::Status.new(status_code, status_display)
Exlibris::Nyu::Aleph::ReservesStatus.new(nyu_aleph_status('Available'), item_status)
end
admin_library = Exlibris::Aleph::AdminLibrary.new('NYU50')
sub_library = Exlibris::Aleph::SubLibrary.new('BOBST', 'NYU Bobst', admin_library)
collection = Exlibris::Aleph::Collection.new('MAIN', 'Main Collection', sub_library)
call_number = Exlibris::Aleph::Item::CallNumber.new('DS126 .M62 2002', nil)
nyu_call_number = Exlibris::Nyu::Aleph::CallNumber.new(call_number)
nyu_aleph_service_data = {
record_id: 'nyu_aleph000741245',
original_id: 'nyu_aleph000741245',
title: 'An aesthetic occupation : the immediacy of architecture and the Palestine conflict',
author: 'Daniel Bertrand Monk 1960-',
display_type: 'book',
source_id: 'nyu_aleph',
original_source_id: 'NYU01',
source_record_id: '000741245',
ils_api_id: 'NYU01000741245',
institution_code: 'NYU',
institution: 'NYU',
library_code: 'BOBST',
library: sub_library,
collection: collection,
call_number: nyu_call_number,
coverage: [],
status: nyu_aleph_status('05/27/14'),
from_aleph: true,
requestability: 'deferred',
collection_str: 'NYU Bobst Main Collection',
coverage_str: '',
edition_str: '',
coverage_str_array: [],
match_reliability: "exact",
source_data: {
item_id: 'NYU50000741245000010',
doc_library: 'NYU01',
sub_library_code: 'BOBST',
sub_library: sub_library,
collection: collection,
call_number: nyu_call_number,
doc_number: '000741245',
rest_api_id: 'NYU01000741245'
}
}
FactoryGirl.define do
factory :service_response do
service_id 'NYU_SFX'
display_text 'Dummy Service'
url 'http://www.example.com'
notes 'Some notes'
service_data do
{
key1: 'value1',
key2: 'value1'
}
end
service_type_value_name 'fulltext'
request
trait :holding do
service_type_value_name 'holding'
service_data do
{
collection_str: "NYU Bobst Main Collection",
call_number: "(DS126 .M62 2002 )",
coverage: [],
status: "Check Availability",
edition_str: '',
match_reliability: "exact"
}
end
end
trait :primo do
service_id 'NYU_Primo'
end
trait :primo_source do
service_id 'NYU_Primo_Source'
end
trait :nyu_aleph_without_source_data do
service_data do
nyu_aleph_service_data.reject do |key, value|
key == :source_data
end
end
end
trait :nyu_aleph do
service_data do
nyu_aleph_service_data
end
end
trait :nyu_aleph_not_from_aleph do
service_data do
nyu_aleph_service_data.merge({from_aleph: false, status: 'Check Availability'})
end
end
trait :expired_nyu_aleph do
service_data do
nyu_aleph_service_data.merge({expired: true})
end
end
trait :single_pickup_location_nyu_aleph do
item_hash = {item_id: 'NYU50000741245000020'}
single_pickup_location_source_data =
nyu_aleph_service_data[:source_data].merge(item_hash)
service_data do
nyu_aleph_service_data.merge(source_data: single_pickup_location_source_data)
end
end
trait :abu_dhabi_nyu_aleph do
abu_dhabi_admin_library = Exlibris::Aleph::AdminLibrary.new('NYU51')
abu_dhabi_sub_library = Exlibris::Aleph::SubLibrary.new('NABUD', 'NYU Abu Dhabi Library (UAE)', abu_dhabi_admin_library)
service_data do
nyu_aleph_service_data.merge({library: abu_dhabi_sub_library})
end
end
trait :bobst_reserve_nyu_aleph do
bobst_reserve_sub_library = Exlibris::Aleph::SubLibrary.new('BRES', 'NYU Bobst Reserve Collection', admin_library)
status = reserves_status('On Shelf', '20', 'Reserve 2 hour loan')
requestability = 'deferred'
service_data do
nyu_aleph_service_data.merge({library: bobst_reserve_sub_library, status: status, requestability: requestability})
end
end
trait :avery_fisher_nyu_aleph do
avery_fisher_sub_library = Exlibris::Aleph::SubLibrary.new('BAFC', 'NYU Bobst Avery Fisher Center', admin_library)
service_data do
nyu_aleph_service_data.merge({library: avery_fisher_sub_library})
end
end
trait :on_shelf_nyu_aleph do
status = nyu_aleph_status('On Shelf')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :available_nyu_aleph do
status = nyu_aleph_status('Available')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :checked_out_nyu_aleph do
status = nyu_aleph_status('06/28/14')
requestability = 'yes'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :billed_as_lost_nyu_aleph do
status = nyu_aleph_status('Billed as Lost')
requestability = 'yes'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :claimed_returned_nyu_aleph do
status = nyu_aleph_status('Claimed Returned')
requestability = 'yes'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :reshelving_nyu_aleph do
status = nyu_aleph_status('Reshelving')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :ill_nyu_aleph do
status = nyu_aleph_status('Request ILL')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :processing_nyu_aleph do
status = nyu_aleph_status('In Processing')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :transit_nyu_aleph do
status = nyu_aleph_status('In Transit')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :on_order_nyu_aleph do
status = nyu_aleph_status('On Order')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :offsite_nyu_aleph do
status = nyu_aleph_status('Offsite Available')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
trait :requested_nyu_aleph do
status = nyu_aleph_status('Requested')
requestability = 'deferred'
status_hash = {status: status, requestability: requestability}
service_data do
nyu_aleph_service_data.merge(status_hash)
end
end
factory :holding_service_response, traits: [:holding]
factory :primo_service_response, traits: [:holding, :primo]
factory :primo_source_service_response, traits: [:holding, :primo_source]
factory :nyu_aleph_service_response_without_source_data, traits: [:holding, :primo_source, :nyu_aleph_without_source_data]
factory :nyu_aleph_not_from_aleph_service_response, traits: [:holding, :primo_source, :nyu_aleph_not_from_aleph]
factory :nyu_aleph_service_response, traits: [:holding, :primo_source, :nyu_aleph]
factory :expired_nyu_aleph_service_response, traits: [:holding, :primo_source, :expired_nyu_aleph]
factory :single_pickup_location_nyu_aleph_service_response, traits: [:holding, :primo_source, :single_pickup_location_nyu_aleph]
factory :abu_dhabi_nyu_aleph_service_response, traits: [:holding, :primo_source, :abu_dhabi_nyu_aleph]
factory :bobst_reserve_nyu_aleph_service_response, traits: [:holding, :primo_source, :bobst_reserve_nyu_aleph]
factory :avery_fisher_nyu_aleph_service_response, traits: [:holding, :primo_source, :avery_fisher_nyu_aleph]
factory :on_shelf_nyu_aleph_service_response, traits: [:holding, :primo_source, :on_shelf_nyu_aleph]
factory :available_nyu_aleph_service_response, traits: [:holding, :primo_source, :available_nyu_aleph]
factory :checked_out_nyu_aleph_service_response, traits: [:holding, :primo_source, :checked_out_nyu_aleph]
factory :billed_as_lost_nyu_aleph_service_response, traits: [:holding, :primo_source, :billed_as_lost_nyu_aleph]
factory :claimed_returned_nyu_aleph_service_response, traits: [:holding, :primo_source, :claimed_returned_nyu_aleph]
factory :reshelving_nyu_aleph_service_response, traits: [:holding, :primo_source, :reshelving_nyu_aleph]
factory :ill_nyu_aleph_service_response, traits: [:holding, :primo_source, :ill_nyu_aleph]
factory :processing_nyu_aleph_service_response, traits: [:holding, :primo_source, :processing_nyu_aleph]
factory :transit_nyu_aleph_service_response, traits: [:holding, :primo_source, :transit_nyu_aleph]
factory :on_order_nyu_aleph_service_response, traits: [:holding, :primo_source, :on_order_nyu_aleph]
factory :offsite_nyu_aleph_service_response, traits: [:holding, :primo_source, :offsite_nyu_aleph]
factory :requested_nyu_aleph_service_response, traits: [:holding, :primo_source, :requested_nyu_aleph]
end
# factory :nyu_aleph_service_response, class: ServiceResponse do
# service_id 'NYU_Primo_Source'
# service_data do
# nyu_aleph_service_data
# end
# trait :requested do
# service_data do
# nyu_aleph_service_data
# end
# end
# trait :available_service_data do
# service_data do
# {
# status: 'Available',
# requestability: 'deferred'
# }
# end
# end
# trait :checked_out do
# service_data do
# {
# status: 'Due: 01/31/13',
# requestability: 'yes'
# }
# end
# end
# trait :ill do
# service_data do
# {
# status: 'Request ILL',
# requestability: 'yes'
# }
# end
# end
# trait :on_order do
# service_data do
# {
# status: 'On Order',
# requestability: 'yes'
# }
# end
# end
# trait :billed_as_lost do
# service_data do
# {
# status: 'Request ILL',
# requestability: 'yes'
# }
# end
# end
# trait :requested do
# service_data do
# {
# status: 'Requested',
# requestability: 'yes'
# }
# end
# end
# trait :offsite do
# service_data do
# {
# status: 'Offsite Available',
# requestability: 'yes'
# }
# end
# end
# trait :processing do
# service_data do
# {
# status: 'In Processing',
# requestability: 'yes'
# }
# end
# end
# trait :afc_recalled do
# service_data do
# {
# status: 'Due: 01/01/14',
# requestability: 'deferred'
# }
# end
# end
# trait :bobst_recalled do
# service_data do
# {
# status: 'Due: 01/01/14',
# requestability: 'deferred'
# }
# end
# end
# trait :deferred_requestability do
# service_data do
# {
# status: 'Available',
# requestability: 'deferred'
# }
# end
# end
# trait :always_requestable do
# service_data do
# {
# status: 'Due: 01/31/13',
# requestability: 'yes'
# }
# end
# end
# trait :never_requestable do
# service_data do
# {
# status: 'Reshelving',
# requestability: 'no'
# }
# end
# end
# factory :ill_service_response, traits: [:ill]
# factory :billed_as_lost_service_response, traits: [:billed_as_lost]
# factory :requested_service_response, traits: [:requested]
# factory :on_order_service_response, traits: [:on_order]
# factory :checked_out_service_response, traits: [:checked_out]
# factory :available_service_response, traits: [:available]
# factory :offsite_service_response, traits: [:offsite]
# factory :processing_service_response, traits: [:processing]
# factory :afc_recalled_service_response, traits: [:afc_recalled]
# factory :bobst_recalled_service_response, traits: [:bobst_recalled]
# factory :deferred_requestability_service_response, traits: [:deferred_requestability]
# factory :always_requestable_service_response, traits: [:always_requestable]
# factory :never_requestable_service_response, traits: [:never_requestable]
# end
end
|
require "rack/test"
require "test/unit"
require_relative "../test_case"
require_relative "../../model/database"
require_relative "../../routes/api_routes"
class ApiRoutesTest < HikeAppTestCase
def setup
clear_cookies
header "Accept", "application/json"
header "User-Agent", "rack/test (#{Rack::Test::VERSION})"
end
#
# GET /api/v1/hikes
#
def test_get_hikes_ok
get "/api/v1/hikes"
assert last_response.ok?
end
def test_get_hikes_returns_multiple_hikes
get "/api/v1/hikes"
json = JSON.parse(last_response.body)
assert json.length > 0
end
def test_get_hikes_includes_location
get "/api/v1/hikes"
json = JSON.parse(last_response.body)
location = json[0]["location"]
assert location != nil
assert location["longitude"] != nil
assert location["latitude"] != nil
end
def test_trailing_slash_doesnt_redirect
get "/api/v1/hikes/"
assert last_response.not_found?
end
#
# GET /api/v1/hikes/:id
#
def test_get_hike_by_id
get "/api/v1/hikes/1"
json = JSON.parse(last_response.body)
assert_equal 1, json["id"]
end
def test_get_hike_by_string_id
get "/api/v1/hikes/empty"
json = JSON.parse(last_response.body)
assert_equal "empty", json["string_id"]
end
#
# POST /api/v1/hikes
#
def test_post_hike_without_credentials
post "/api/v1/hikes", get_post_hike_json.to_json
assert_equal 403, last_response.status
end
def test_post_with_credentials
data = get_post_hike_json
post_and_validate data, 200
validate data, "new-name"
end
def test_post_without_input
post_and_validate nil, 400
end
def test_post_with_incomplete_input
data = get_post_hike_json
data.delete("name")
post_and_validate data, 400
end
def test_post_with_invalid_distance_input
data = get_post_hike_json
data["distance"] = "not-a-number"
post_and_validate data, 400
end
def test_post_with_invalid_elevation_input
data = get_post_hike_json
data["elevation_max"] = "not-a-number"
post_and_validate data, 400
end
def test_post_with_invalid_latitude_input
data = get_post_hike_json
data["location"]["latitude"] = 91
post_and_validate data, 400
end
def test_post_with_invalid_longitude_input
data = get_post_hike_json
data["location"]["longitude"] = -181
post_and_validate data, 400
end
def test_post_with_hike_that_already_exists
data = get_post_hike_json
data["name"] = "Empty"
set_admin_cookie
post "/api/v1/hikes", data.to_json
assert_equal 409, last_response.status
end
#
# PUT /api/v1/hikes/:id
#
def test_put_hike_without_credentials
data = {"name" => "New name"}
put "/api/v1/hikes/empty", data.to_json
assert_equal 403, last_response.status
end
def test_put_hike_name
data = {"name" => "New name"}
put_and_validate data
end
def test_put_hike_description
data = {"description" => "New description"}
put_and_validate data
end
def test_put_hike_name_and_description
data = {"name" => "New name", "description" => "New description"}
put_and_validate data
end
def test_put_hike_location
data = {"location" => {"latitude" => 56, "longitude" => -123.4}}
put_and_validate data
end
#
# Helpers
#
def put_and_validate data
set_admin_cookie
put "/api/v1/hikes/empty", data.to_json
validate data, "empty"
end
def post_and_validate data, response_code
set_admin_cookie
post "/api/v1/hikes", data.to_json
assert_equal response_code, last_response.status
end
def set_admin_cookie
set_cookie "user_id=#{Digest::SHA1.hexdigest(User.first.id)}"
end
def validate data, hike_string_id
json = JSON.parse(last_response.body)
assert_equal hike_string_id, json["string_id"]
validate_hashes data, json
get "/api/v1/hikes/" + hike_string_id
json = JSON.parse(last_response.body)
assert_equal hike_string_id, json["string_id"]
validate_hashes data, json
end
def validate_hashes expected_hash, actual_hash
expected_hash.each do |key, value|
if value.class == Hash
validate_hashes value, actual_hash[key]
else
assert_equal value, actual_hash[key]
end
end
end
def get_post_hike_json
{
"name" => "New Name",
"locality" => "New Locality",
"distance" => 123,
"elevation_max" => 1234,
"location" => {
"latitude" => 12,
"longitude" => 12
}
}
end
end
Cleanup hike already exists test to use helper functions.
require "rack/test"
require "test/unit"
require_relative "../test_case"
require_relative "../../model/database"
require_relative "../../routes/api_routes"
class ApiRoutesTest < HikeAppTestCase
def setup
clear_cookies
header "Accept", "application/json"
header "User-Agent", "rack/test (#{Rack::Test::VERSION})"
end
#
# GET /api/v1/hikes
#
def test_get_hikes_ok
get "/api/v1/hikes"
assert last_response.ok?
end
def test_get_hikes_returns_multiple_hikes
get "/api/v1/hikes"
json = JSON.parse(last_response.body)
assert json.length > 0
end
def test_get_hikes_includes_location
get "/api/v1/hikes"
json = JSON.parse(last_response.body)
location = json[0]["location"]
assert location != nil
assert location["longitude"] != nil
assert location["latitude"] != nil
end
def test_trailing_slash_doesnt_redirect
get "/api/v1/hikes/"
assert last_response.not_found?
end
#
# GET /api/v1/hikes/:id
#
def test_get_hike_by_id
get "/api/v1/hikes/1"
json = JSON.parse(last_response.body)
assert_equal 1, json["id"]
end
def test_get_hike_by_string_id
get "/api/v1/hikes/empty"
json = JSON.parse(last_response.body)
assert_equal "empty", json["string_id"]
end
#
# POST /api/v1/hikes
#
def test_post_hike_without_credentials
post "/api/v1/hikes", get_post_hike_json.to_json
assert_equal 403, last_response.status
end
def test_post_with_credentials
data = get_post_hike_json
post_and_validate data, 200
validate data, "new-name"
end
def test_post_without_input
post_and_validate nil, 400
end
def test_post_with_incomplete_input
data = get_post_hike_json
data.delete("name")
post_and_validate data, 400
end
def test_post_with_invalid_distance_input
data = get_post_hike_json
data["distance"] = "not-a-number"
post_and_validate data, 400
end
def test_post_with_invalid_elevation_input
data = get_post_hike_json
data["elevation_max"] = "not-a-number"
post_and_validate data, 400
end
def test_post_with_invalid_latitude_input
data = get_post_hike_json
data["location"]["latitude"] = 91
post_and_validate data, 400
end
def test_post_with_invalid_longitude_input
data = get_post_hike_json
data["location"]["longitude"] = -181
post_and_validate data, 400
end
def test_post_with_hike_that_already_exists
data = get_post_hike_json
data["name"] = "Empty"
post_and_validate data, 409
end
#
# PUT /api/v1/hikes/:id
#
def test_put_hike_without_credentials
data = {"name" => "New name"}
put "/api/v1/hikes/empty", data.to_json
assert_equal 403, last_response.status
end
def test_put_hike_name
data = {"name" => "New name"}
put_and_validate data
end
def test_put_hike_description
data = {"description" => "New description"}
put_and_validate data
end
def test_put_hike_name_and_description
data = {"name" => "New name", "description" => "New description"}
put_and_validate data
end
def test_put_hike_location
data = {"location" => {"latitude" => 56, "longitude" => -123.4}}
put_and_validate data
end
#
# Helpers
#
def put_and_validate data
set_admin_cookie
put "/api/v1/hikes/empty", data.to_json
validate data, "empty"
end
def post_and_validate data, response_code
set_admin_cookie
post "/api/v1/hikes", data.to_json
assert_equal response_code, last_response.status
end
def set_admin_cookie
set_cookie "user_id=#{Digest::SHA1.hexdigest(User.first.id)}"
end
def validate data, hike_string_id
json = JSON.parse(last_response.body)
assert_equal hike_string_id, json["string_id"]
validate_hashes data, json
get "/api/v1/hikes/" + hike_string_id
json = JSON.parse(last_response.body)
assert_equal hike_string_id, json["string_id"]
validate_hashes data, json
end
def validate_hashes expected_hash, actual_hash
expected_hash.each do |key, value|
if value.class == Hash
validate_hashes value, actual_hash[key]
else
assert_equal value, actual_hash[key]
end
end
end
def get_post_hike_json
{
"name" => "New Name",
"locality" => "New Locality",
"distance" => 123,
"elevation_max" => 1234,
"location" => {
"latitude" => 12,
"longitude" => 12
}
}
end
end |
require "spec_helper"
feature "New project", feature: true do
let(:user) { create(:admin) }
before do
login_as(user)
end
context "Visibility level selector" do
Gitlab::VisibilityLevel.options.each do |key, level|
it "sets selector to #{key}" do
stub_application_setting(default_project_visibility: level)
visit new_project_path
expect(find_field("project_visibility_level_#{level}")).to be_checked
end
it 'saves visibility level on validation error' do
visit new_project_path
choose(key)
click_button('Create project')
expect(find_field("project_visibility_level_#{level}")).to be_checked
end
end
end
context "Namespace selector" do
context "with user namespace" do
before do
visit new_project_path
end
it "selects the user namespace" do
namespace = find("#project_namespace_id")
expect(namespace.text).to eq user.username
end
end
context "with group namespace" do
let(:group) { create(:group, :private, owner: user) }
before do
group.add_owner(user)
visit new_project_path(namespace_id: group.id)
end
it "selects the group namespace" do
namespace = find("#project_namespace_id option[selected]")
expect(namespace.text).to eq group.name
end
context "on validation error" do
before do
fill_in('project_path', with: 'private-group-project')
choose('Internal')
click_button('Create project')
expect(page).to have_css '.project-edit-errors .alert.alert-danger'
end
it "selects the group namespace" do
namespace = find("#project_namespace_id option[selected]")
expect(namespace.text).to eq group.name
end
end
end
end
context 'Import project options' do
before do
visit new_project_path
end
it 'does not autocomplete sensitive git repo URL' do
autocomplete = find('#project_import_url')['autocomplete']
expect(autocomplete).to eq('off')
end
end
end
Fix rubocop offenses
require "spec_helper"
feature "New project", feature: true do
let(:user) { create(:admin) }
before do
login_as(user)
end
context "Visibility level selector" do
Gitlab::VisibilityLevel.options.each do |key, level|
it "sets selector to #{key}" do
stub_application_setting(default_project_visibility: level)
visit new_project_path
expect(find_field("project_visibility_level_#{level}")).to be_checked
end
it 'saves visibility level on validation error' do
visit new_project_path
choose(key)
click_button('Create project')
expect(find_field("project_visibility_level_#{level}")).to be_checked
end
end
end
context "Namespace selector" do
context "with user namespace" do
before do
visit new_project_path
end
it "selects the user namespace" do
namespace = find("#project_namespace_id")
expect(namespace.text).to eq user.username
end
end
context "with group namespace" do
let(:group) { create(:group, :private, owner: user) }
before do
group.add_owner(user)
visit new_project_path(namespace_id: group.id)
end
it "selects the group namespace" do
namespace = find("#project_namespace_id option[selected]")
expect(namespace.text).to eq group.name
end
context "on validation error" do
before do
fill_in('project_path', with: 'private-group-project')
choose('Internal')
click_button('Create project')
expect(page).to have_css '.project-edit-errors .alert.alert-danger'
end
it "selects the group namespace" do
namespace = find("#project_namespace_id option[selected]")
expect(namespace.text).to eq group.name
end
end
end
end
context 'Import project options' do
before do
visit new_project_path
end
it 'does not autocomplete sensitive git repo URL' do
autocomplete = find('#project_import_url')['autocomplete']
expect(autocomplete).to eq('off')
end
end
end
|
require 'rails_helper'
RSpec.shared_context "token_auth feature disabled" do
extend FeatureFlagSpecHelper
disable_feature :token_auth
end
feature 'Token Authentication' do
include ActiveJobHelper
include PermittedDomainHelper
let(:login_page) { Pages::Login.new }
before { create(:group) }
scenario 'trying to log in with an invalid email address' do
visit '/'
fill_in 'token_user_email', with: 'Bob'
expect { click_button 'Request link' }.not_to change { ActionMailer::Base.deliveries.count }
expect(page).to have_text('Email address is not formatted correctly')
end
scenario 'trying to log in with a non-whitelisted email address domain' do
visit '/'
fill_in 'token_user_email', with: 'james@abscond.org'
expect { click_button 'Request link' }.not_to change { ActionMailer::Base.deliveries.count }
expect(page).to have_text('Email address is not valid')
end
scenario 'accurate email' do
visit '/'
fill_in 'token_user_email', with: 'james.darling@digital.justice.gov.uk'
expect { click_button 'Request link' }.to change { ActionMailer::Base.deliveries.count }.by(1)
expect(page).to have_text('We are sending you a link to log in')
expect(last_email.to).to eql(['james.darling@digital.justice.gov.uk'])
expect(last_email.body.encoded).to have_text(token_url(Token.last))
end
scenario 'copy-pasting an email with extraneous spaces' do
visit '/'
fill_in 'token_user_email', with: ' correct@digital.justice.gov.uk '
click_button 'Request link'
expect(page).to have_text('We are sending you a link to log in')
expect(last_email.to).to include('correct@digital.justice.gov.uk')
end
scenario 'following valid link from email and getting prompted to complete my profile' do
token = create(:token)
visit token_path(token)
expect(page).to have_text('Signed in as')
expect(page).to have_text('Start building your profile now')
within('h1') do
expect(page).to have_text('Edit profile')
end
end
scenario "logging in with a fake token" do
visit token_path(id: "gobbledygoock")
expect(page).to_not have_text('Signed in as')
expect(page).to_not have_text('Start building your profile now')
expect(page).to have_text("The authentication token doesn't exist and so isn't valid")
end
scenario "logging in with a token that's more than 3 hours old" do
token = create(:token, created_at: 4.hours.ago)
visit token_path(token)
expect(page).to_not have_text('Signed in as')
expect(page).to_not have_text('Start building your profile now')
expect(page).to have_text("The authentication token has expired and is more than 3 hours old")
end
scenario "requesting more than 8 tokens per hour isn't permitted" do
1.upto(9) do |count|
visit '/'
fill_in 'token_user_email', with: ' tony.stark@digital.justice.gov.uk '
click_button 'Request link'
if count < 9
expect(page).to have_text('We are sending you a link to log in')
expect(page).to_not have_text("You've reached the limit of 8 tokens requested within an hour")
else
expect(page).to_not have_text('We are sending you a link to log in')
expect(page).to have_text("You've reached the limit of 8 tokens requested within an hour")
end
end
end
scenario 'logging in and displaying a link to my profile' do
person = create(:person, given_name: 'Bob', surname: 'Smith', email: 'test.user@digital.justice.gov.uk')
token = Token.for_person(person)
visit token_path(token)
expect(page).to have_text('Signed in as Bob Smith')
expect(page).to have_link('Bob Smith', href: person_path(person))
end
scenario 'logging out' do
token_log_in_as('james.darling@digital.justice.gov.uk')
expect(page).to have_text('James Darling')
click_link 'Sign out'
expect(page).not_to have_text('james.darling@digital.justice.gov.uk')
expect(login_page).to be_displayed
end
scenario 'being inconsistent about capitalisation' do
create(:person,
given_name: 'Example',
surname: 'User',
email: 'example.user@digital.justice.gov.uk'
)
token_log_in_as('Example.USER@digital.justice.gov.uk')
expect(page).to have_text('Signed in as Example User')
end
context 'token_auth feature disabled' do
include_context "token_auth feature disabled"
let(:token) { create(:token) }
scenario 'following a valid link from an email redirects to login' do
visit token_path(token)
expect(page.current_path).to eq(new_sessions_path)
expect(page).to have_text('login link is invalid')
expect(login_page).to be_displayed
end
scenario 'login page does not have token auth login option' do
visit new_sessions_path
expect(page).not_to have_css('form.new_token')
end
scenario 'attempting to create an authentication token redirects to login' do
visit token_path(token)
expect(page.current_path).to eq(new_sessions_path)
expect(page).to have_text('login link is invalid')
expect(login_page).to be_displayed
end
end
end
Update specs with new copy
require 'rails_helper'
RSpec.shared_context "token_auth feature disabled" do
extend FeatureFlagSpecHelper
disable_feature :token_auth
end
feature 'Token Authentication' do
include ActiveJobHelper
include PermittedDomainHelper
let(:login_page) { Pages::Login.new }
before { create(:group) }
scenario 'trying to log in with an invalid email address' do
visit '/'
fill_in 'token_user_email', with: 'Bob'
expect { click_button 'Request link' }.not_to change { ActionMailer::Base.deliveries.count }
expect(page).to have_text('Email address is not formatted correctly')
end
scenario 'trying to log in with a non-whitelisted email address domain' do
visit '/'
fill_in 'token_user_email', with: 'james@abscond.org'
expect { click_button 'Request link' }.not_to change { ActionMailer::Base.deliveries.count }
expect(page).to have_text('Email address is not valid')
end
scenario 'accurate email' do
visit '/'
fill_in 'token_user_email', with: 'james.darling@digital.justice.gov.uk'
expect { click_button 'Request link' }.to change { ActionMailer::Base.deliveries.count }.by(1)
expect(page).to have_text('We\'re just emailing you a link to access People Finder')
expect(last_email.to).to eql(['james.darling@digital.justice.gov.uk'])
expect(last_email.body.encoded).to have_text(token_url(Token.last))
end
scenario 'copy-pasting an email with extraneous spaces' do
visit '/'
fill_in 'token_user_email', with: ' correct@digital.justice.gov.uk '
click_button 'Request link'
expect(page).to have_text('We\'re just emailing you a link to access People Finder')
expect(last_email.to).to include('correct@digital.justice.gov.uk')
end
scenario 'following valid link from email and getting prompted to complete my profile' do
token = create(:token)
visit token_path(token)
expect(page).to have_text('Signed in as')
expect(page).to have_text('Start building your profile now')
within('h1') do
expect(page).to have_text('Edit profile')
end
end
scenario "logging in with a fake token" do
visit token_path(id: "gobbledygoock")
expect(page).to_not have_text('Signed in as')
expect(page).to_not have_text('Start building your profile now')
expect(page).to have_text("The authentication token doesn't exist and so isn't valid")
end
scenario "logging in with a token that's more than 3 hours old" do
token = create(:token, created_at: 4.hours.ago)
visit token_path(token)
expect(page).to_not have_text('Signed in as')
expect(page).to_not have_text('Start building your profile now')
expect(page).to have_text("The authentication token has expired and is more than 3 hours old")
end
scenario "requesting more than 8 tokens per hour isn't permitted" do
1.upto(9) do |count|
visit '/'
fill_in 'token_user_email', with: ' tony.stark@digital.justice.gov.uk '
click_button 'Request link'
if count < 9
expect(page).to have_text('We\'re just emailing you a link to access People Finder')
expect(page).to_not have_text("You've reached the limit of 8 tokens requested within an hour")
else
expect(page).not_to have_text('We\'re just emailing you a link to access People Finder')
expect(page).to have_text("You've reached the limit of 8 tokens requested within an hour")
end
end
end
scenario 'logging in and displaying a link to my profile' do
person = create(:person, given_name: 'Bob', surname: 'Smith', email: 'test.user@digital.justice.gov.uk')
token = Token.for_person(person)
visit token_path(token)
expect(page).to have_text('Signed in as Bob Smith')
expect(page).to have_link('Bob Smith', href: person_path(person))
end
scenario 'logging out' do
token_log_in_as('james.darling@digital.justice.gov.uk')
expect(page).to have_text('James Darling')
click_link 'Sign out'
expect(page).not_to have_text('james.darling@digital.justice.gov.uk')
expect(login_page).to be_displayed
end
scenario 'being inconsistent about capitalisation' do
create(:person,
given_name: 'Example',
surname: 'User',
email: 'example.user@digital.justice.gov.uk'
)
token_log_in_as('Example.USER@digital.justice.gov.uk')
expect(page).to have_text('Signed in as Example User')
end
context 'token_auth feature disabled' do
include_context "token_auth feature disabled"
let(:token) { create(:token) }
scenario 'following a valid link from an email redirects to login' do
visit token_path(token)
expect(page.current_path).to eq(new_sessions_path)
expect(page).to have_text('login link is invalid')
expect(login_page).to be_displayed
end
scenario 'login page does not have token auth login option' do
visit new_sessions_path
expect(page).not_to have_css('form.new_token')
end
scenario 'attempting to create an authentication token redirects to login' do
visit token_path(token)
expect(page.current_path).to eq(new_sessions_path)
expect(page).to have_text('login link is invalid')
expect(login_page).to be_displayed
end
end
end
|
class Thingnatra
module Views
class Layout < Mustache
def title
@title || "Thingnatra -- Webfrontend for Things.app data"
end
end
end
end
adapt layout view for heading
class Thingnatra
module Views
class Layout < Mustache
def title
@title || "Thingnatra -- Webfrontend for Things.app data"
end
def heading
@heading || "Thingnatra"
end
end
end
end
|
class Fixnum
def value
self
end
def name
self
end
def to_ml2
"#{self} evalto #{self} by E-Int {};"
end
end
class TrueClass
def value
self
end
def name
self
end
def to_ml2
"#{self} evalto #{self} by E-Bool {};"
end
end
class FalseClass
def value
self
end
def name
self
end
def to_ml2
"#{self} evalto #{self} by E-Bool {};"
end
end
module Dashutu
module ML2
class Environment
def initialize
@var = []
end
def var
@var
end
def var_clone
@var = @var.clone
end
def var!(x, y)
@var << [x, y]
self
end
def eval(exp)
exp.var! self
exp.to_ml2
end
def to_ml2
@var.map {|k, v| "#{k} = #{v}"}.join(" , ") + " |- "
end
end
class LETIN < Struct.new(:name, :e1, :e2)
def to_ml2
end
end
class EBase < Struct.new(:e1, :e2)
def var!(env)
@env = env.clone
end
def lastline
"#{e1.value} #{lastop} #{e2.value} is #{apply} #{lastby} {};"
end
def env_s
if !@env.nil?
@env.to_ml2
else
"|- "
end
end
def prepare
e1.var! @env if e1.is_a? Var
e2.var! @env if e2.is_a? Var
end
def to_ml2
prepare
e3 = apply
return (
env_s + "#{e1.name} #{op} #{e2.name} evalto #{e3} {\n" +
" #{env_s}#{e1.to_ml2}\n" +
" #{env_s}#{e2.to_ml2}\n" +
" #{lastline}\n" +
"}")
end
end
class EPlus < EBase
def ml2_name
" by E-Plus "
end
def lastop
"plus"
end
def lastby
" by B-Plus "
end
def op
"+"
end
def apply
e1.value + e2.value
end
end
class EMinus < EBase
def ml2_name
" by E-Minus "
end
def lastop
"minus"
end
def lastby
" by B-Minus "
end
def op
"+"
end
def apply
e1.value - e2.value
end
end
class Var
def initialize k
@key = k
end
def name
@key
end
def value
_, v = search
return v
end
def var! env
@env = env
end
def env_s
if !@env.nil?
@env.to_ml2
else
"|- "
end
end
def var1?
k, v = @env.var[-1]
k == @key
end
def search
@env.var.each do |k, v|
return k, v if @key == k
end
end
def to_ml2
if var1?
k, v = @env.var[-1]
env_s + "#{k} evalto #{v} by E-Var1 {};"
else
v2 = Var.new @key
env = @env.clone
env.var_clone
env.var.pop
v2.var! env
k, v = search
env_s + "#{k} evalto #{v} by E-Var2 {\n" +
" " + v2.to_ml2 + "\n" +
"}"
end
end
end
end
end
prepare rewrite evalml2.rb
|
require "action_view"
module Datagrid
module Helper
def datagrid_format_value(column, asset)
value = column.value(asset)
if column.options[:url]
link_to(value, column.options[:url].call(asset))
else
case column.format
when :url
link_to(column.label ? asset.send(column.label) : I18n.t("datagrid.table.url_label", :default => "URL"), value)
else
value
end
end
end
def datagrid_table(report, *args)
options = args.extract_options!
html = options[:html] || {}
html[:class] ||= "datagrid"
paginate[:page] ||= params[:page]
assets = report.assets
paginate = options[:paginate]
assets = assets.paginate(paginate) if paginate
content_tag(:table, html) do
table = content_tag(:tr, datagrid_header(report, options))
table << datagrid_rows(report.columns, assets, options)
table
end
end
protected
def datagrid_header(grid, options)
header = empty_string
grid.columns.each do |column|
data = column.header.html_safe
if column.order
data << datagrid_order_for(grid, column)
end
header << content_tag(:th, data)
end
header
end
def datagrid_rows(columns, assets, options)
rows = empty_string
assets.each do |asset|
rows << content_tag(:tr, :class => cycle("odd", "even")) do
html = empty_string
columns.each do |column|
html << content_tag(:td, datagrid_format_value(column, asset))
end
html
end
end
rows
end
def datagrid_order_for(grid, column)
content_tag(:div, :class => "order") do
link_to(
I18n.t("datagrid.table.order.asc", :default => "ASC"), url_for(grid.param_name => grid.attributes.merge(:order => column.name))
) + " " +
link_to(I18n.t("datagrid.table.order.desc", :default => "DESC"), url_for(grid.param_name => grid.attributes.merge(:order => column.name, :reverse => true )))
end
end
def empty_string
res = ""
res.respond_to?(:html_safe) ? res.html_safe : res
end
end
::ActionView::Base.send(:include, ::Datagrid::Helper)
end
No default pagination parameter
require "action_view"
module Datagrid
module Helper
def datagrid_format_value(column, asset)
value = column.value(asset)
if column.options[:url]
link_to(value, column.options[:url].call(asset))
else
case column.format
when :url
link_to(column.label ? asset.send(column.label) : I18n.t("datagrid.table.url_label", :default => "URL"), value)
else
value
end
end
end
def datagrid_table(report, *args)
options = args.extract_options!
html = options[:html] || {}
html[:class] ||= "datagrid"
assets = report.assets
paginate = options[:paginate]
assets = assets.paginate(paginate) if paginate
content_tag(:table, html) do
table = content_tag(:tr, datagrid_header(report, options))
table << datagrid_rows(report.columns, assets, options)
table
end
end
protected
def datagrid_header(grid, options)
header = empty_string
grid.columns.each do |column|
data = column.header.html_safe
if column.order
data << datagrid_order_for(grid, column)
end
header << content_tag(:th, data)
end
header
end
def datagrid_rows(columns, assets, options)
rows = empty_string
assets.each do |asset|
rows << content_tag(:tr, :class => cycle("odd", "even")) do
html = empty_string
columns.each do |column|
html << content_tag(:td, datagrid_format_value(column, asset))
end
html
end
end
rows
end
def datagrid_order_for(grid, column)
content_tag(:div, :class => "order") do
link_to(
I18n.t("datagrid.table.order.asc", :default => "ASC"), url_for(grid.param_name => grid.attributes.merge(:order => column.name))
) + " " +
link_to(I18n.t("datagrid.table.order.desc", :default => "DESC"), url_for(grid.param_name => grid.attributes.merge(:order => column.name, :reverse => true )))
end
end
def empty_string
res = ""
res.respond_to?(:html_safe) ? res.html_safe : res
end
end
::ActionView::Base.send(:include, ::Datagrid::Helper)
end
|
require 'colorize'
require 'launchy'
require 'downloadr'
require 'addressable/uri'
module Datahunter
DATASETS_URL = "http://localhost:3000/api/datasets/"
# DATASETS_URL = "http://shrouded-harbor-5877.herokuapp.com/api/datasets/"
FEEDBACK_URL = "https://docs.google.com/forms/d/1yNzZjCCXvWHQCbWz4sx-nui3LafeeLcT7FF9T-vbKvw/viewform"
REQUEST_URL = "https://docs.google.com/forms/d/1NRKWmb_mcpKJmrutXvZSZnysM_v0rfLhjD897H3Myrw/viewform?usp=send_form"
def self.datasets_url tag, geo=nil, temp=nil
tag = tag.downcase.split.first if tag
geo = geo.downcase if geo
if geo.nil? and temp.nil?
"#{DATASETS_URL}?tags=#{tag}"
elsif temp.nil?
"#{DATASETS_URL}?tags=#{tag}&spatial=#{geo}"
elsif geo.nil?
"#{DATASETS_URL}?tags=#{tag}&temporal=#{temp}"
else
"#{DATASETS_URL}?tags=#{tag}&spatial=#{geo}&temporal=#{temp}"
end
end
def self.ds_url id
"#{DATASETS_URL}#{id}"
end
def self.print_dataset_info dataset
puts ("title: ".colorize(:green) + "#{dataset["title"]}")
puts ("description: ".colorize(:green) + "#{dataset["description"]}")
puts ("publisher: ".colorize(:green) + "#{dataset["publisher"]}")
puts ("temporal: ".colorize(:green) + "#{dataset["temporal"]}")
puts ("spatial: ".colorize(:green) + "#{dataset["spatial"]}")
puts ("created: ".colorize(:green) + "#{dataset["created"]}")
puts ("updated: ".colorize(:green) + "#{dataset["updated"]}")
puts ("score: ".colorize(:green) + "#{dataset["huntscore"]}")
puts
end
def self.download_the_data dataset
resources = dataset["resources"]
number_of_downloadable_links = resources.size
if number_of_downloadable_links == 1
dl = 0
else
Datahunter.print_downloadable_links resources
dl = ask("### which one? (0/1/...)".colorize(:yellow), Integer) {|i| i.in = 0..(number_of_downloadable_links - 1)}
end
dl = dl.to_i
Datahunter.download_file(resources[dl]["url"], resources[dl]["format"], dataset["uri"])
end
## Messages: feedback and excuses
def self.print_feedback_request
case ask "### give feedback? (y/n)".colorize(:yellow)
when 'y'
Launchy.open(FEEDBACK_URL, options = {})
else
puts "Bye for now!"
end
end
def self.print_request_dataset_message
case ask "### request a dataset? (y/n)".colorize(:yellow)
when 'y'
Launchy.open(REQUEST_URL, options = {})
else
puts "Bye for now!"
end
end
def self.print_excuse_message
puts "Remember, this is a first prototype, there will surely be a lot more "\
"datasets indexed soon. If you want us to find a dataset for you, or "\
"if you just want to give us a feedback, don't hesitate!".colorize(:red)
end
private
def self.print_downloadable_links resources
resources.each_with_index do |dl, i|
puts ("#{i}. ".colorize(:yellow) +
"#{dl["title"]} - ".colorize(:blue) +
"#{dl["format"]}".colorize(:green))
end
end
def self.download_file url, format="", alt_url=""
if format == "HTML"
Launchy.open(url, options = {})
else
puts "Start downloading..."
Downloadr::HTTP.download(url)
puts "Your file has been downloaded, try to $ ls ;D".colorize(:green)
Datahunter.print_excuse_and_alternative_url_message alt_url
end
end
def self.print_excuse_and_alternative_url_message alt_url=""
puts "If this is not the file you expected, it's maybe because publisher don't always keep the metadata up-to-date. We try to clean most of uri's and check the url. Anyway you may be able to download your file by hand here:"
puts "#{alt_url}".colorize(:blue)
end
end
real API url
require 'colorize'
require 'launchy'
require 'downloadr'
require 'addressable/uri'
module Datahunter
# DATASETS_URL = "http://localhost:3000/api/datasets/"
DATASETS_URL = "http://shrouded-harbor-5877.herokuapp.com/api/datasets/"
FEEDBACK_URL = "https://docs.google.com/forms/d/1yNzZjCCXvWHQCbWz4sx-nui3LafeeLcT7FF9T-vbKvw/viewform"
REQUEST_URL = "https://docs.google.com/forms/d/1NRKWmb_mcpKJmrutXvZSZnysM_v0rfLhjD897H3Myrw/viewform?usp=send_form"
def self.datasets_url tag, geo=nil, temp=nil
tag = tag.downcase.split.first if tag
geo = geo.downcase if geo
if geo.nil? and temp.nil?
"#{DATASETS_URL}?tags=#{tag}"
elsif temp.nil?
"#{DATASETS_URL}?tags=#{tag}&spatial=#{geo}"
elsif geo.nil?
"#{DATASETS_URL}?tags=#{tag}&temporal=#{temp}"
else
"#{DATASETS_URL}?tags=#{tag}&spatial=#{geo}&temporal=#{temp}"
end
end
def self.ds_url id
"#{DATASETS_URL}#{id}"
end
def self.print_dataset_info dataset
puts ("title: ".colorize(:green) + "#{dataset["title"]}")
puts ("description: ".colorize(:green) + "#{dataset["description"]}")
puts ("publisher: ".colorize(:green) + "#{dataset["publisher"]}")
puts ("temporal: ".colorize(:green) + "#{dataset["temporal"]}")
puts ("spatial: ".colorize(:green) + "#{dataset["spatial"]}")
puts ("created: ".colorize(:green) + "#{dataset["created"]}")
puts ("updated: ".colorize(:green) + "#{dataset["updated"]}")
puts ("score: ".colorize(:green) + "#{dataset["huntscore"]}")
puts
end
def self.download_the_data dataset
resources = dataset["resources"]
number_of_downloadable_links = resources.size
if number_of_downloadable_links == 1
dl = 0
else
Datahunter.print_downloadable_links resources
dl = ask("### which one? (0/1/...)".colorize(:yellow), Integer) {|i| i.in = 0..(number_of_downloadable_links - 1)}
end
dl = dl.to_i
Datahunter.download_file(resources[dl]["url"], resources[dl]["format"], dataset["uri"])
end
## Messages: feedback and excuses
def self.print_feedback_request
case ask "### give feedback? (y/n)".colorize(:yellow)
when 'y'
Launchy.open(FEEDBACK_URL, options = {})
else
puts "Bye for now!"
end
end
def self.print_request_dataset_message
case ask "### request a dataset? (y/n)".colorize(:yellow)
when 'y'
Launchy.open(REQUEST_URL, options = {})
else
puts "Bye for now!"
end
end
def self.print_excuse_message
puts "Remember, this is a first prototype, there will surely be a lot more "\
"datasets indexed soon. If you want us to find a dataset for you, or "\
"if you just want to give us a feedback, don't hesitate!".colorize(:red)
end
private
def self.print_downloadable_links resources
resources.each_with_index do |dl, i|
puts ("#{i}. ".colorize(:yellow) +
"#{dl["title"]} - ".colorize(:blue) +
"#{dl["format"]}".colorize(:green))
end
end
def self.download_file url, format="", alt_url=""
if format == "HTML"
Launchy.open(url, options = {})
else
puts "Start downloading..."
Downloadr::HTTP.download(url)
puts "Your file has been downloaded, try to $ ls ;D".colorize(:green)
Datahunter.print_excuse_and_alternative_url_message alt_url
end
end
def self.print_excuse_and_alternative_url_message alt_url=""
puts "If this is not the file you expected, it's maybe because publisher don't always keep the metadata up-to-date. We try to clean most of uri's and check the url. Anyway you may be able to download your file by hand here:"
puts "#{alt_url}".colorize(:blue)
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "vitrine"
s.version = "0.0.9"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Julik Tarkhanov"]
s.date = "2013-11-11"
s.description = " Serves ERB templates with live CoffeeScript and SASS "
s.email = "me@julik.nl"
s.executables = ["vitrine"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"bin/vitrine",
"lib/atomic_write.rb",
"lib/sourcemaps.rb",
"lib/version.rb",
"lib/vitrine.rb",
"test/helper.rb",
"test/test_vitrine.rb",
"vitrine.gemspec"
]
s.homepage = "http://github.com/julik/vitrine"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "2.0.6"
s.summary = "Quickie micro-app preview server"
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<sinatra>, ["~> 1.4"])
s.add_runtime_dependency(%q<coffee-script>, ["~> 2.2"])
s.add_runtime_dependency(%q<sass>, ["~> 3"])
s.add_runtime_dependency(%q<guard>, [">= 0"])
s.add_runtime_dependency(%q<rack-livereload>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.7"])
else
s.add_dependency(%q<sinatra>, ["~> 1.4"])
s.add_dependency(%q<coffee-script>, ["~> 2.2"])
s.add_dependency(%q<sass>, ["~> 3"])
s.add_dependency(%q<guard>, [">= 0"])
s.add_dependency(%q<rack-livereload>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
end
else
s.add_dependency(%q<sinatra>, ["~> 1.4"])
s.add_dependency(%q<coffee-script>, ["~> 2.2"])
s.add_dependency(%q<sass>, ["~> 3"])
s.add_dependency(%q<guard>, [">= 0"])
s.add_dependency(%q<rack-livereload>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
end
end
Regenerate gemspec for version 0.0.10
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "vitrine"
s.version = "0.0.10"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Julik Tarkhanov"]
s.date = "2013-11-22"
s.description = " Serves ERB templates with live CoffeeScript and SASS "
s.email = "me@julik.nl"
s.executables = ["vitrine"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"bin/vitrine",
"lib/atomic_write.rb",
"lib/sourcemaps.rb",
"lib/version.rb",
"lib/vitrine.rb",
"test/helper.rb",
"test/test_vitrine.rb",
"vitrine.gemspec"
]
s.homepage = "http://github.com/julik/vitrine"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "2.0.3"
s.summary = "Quickie micro-app preview server"
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<sinatra>, ["~> 1.4"])
s.add_runtime_dependency(%q<coffee-script>, ["~> 2.2"])
s.add_runtime_dependency(%q<sass>, ["~> 3"])
s.add_runtime_dependency(%q<guard>, ["~> 2.2"])
s.add_runtime_dependency(%q<guard-livereload>, [">= 0"])
s.add_runtime_dependency(%q<rack-livereload>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.7"])
else
s.add_dependency(%q<sinatra>, ["~> 1.4"])
s.add_dependency(%q<coffee-script>, ["~> 2.2"])
s.add_dependency(%q<sass>, ["~> 3"])
s.add_dependency(%q<guard>, ["~> 2.2"])
s.add_dependency(%q<guard-livereload>, [">= 0"])
s.add_dependency(%q<rack-livereload>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
end
else
s.add_dependency(%q<sinatra>, ["~> 1.4"])
s.add_dependency(%q<coffee-script>, ["~> 2.2"])
s.add_dependency(%q<sass>, ["~> 3"])
s.add_dependency(%q<guard>, ["~> 2.2"])
s.add_dependency(%q<guard-livereload>, [">= 0"])
s.add_dependency(%q<rack-livereload>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.7"])
end
end
|
module Decking
VERSION = "0.0.3"
end
Allow overriding env on CLI
module Decking
VERSION = "0.0.4"
end
|
class DeployCommands < Commands
BUNDLE_WITHOUT = %w(default production development test staging benchmark debug)
BUNDLE_PATH = File.join(Rails.root, "data", "bundler")
delegate :fetch, to: :stack_commands
def initialize(deploy)
@deploy = deploy
@stack = deploy.stack
end
def install_dependencies
deploy_spec.dependencies_steps.map do |command_line|
Command.new(command_line, env: env, chdir: @deploy.working_directory)
end
end
def deploy(commit)
env = self.env.merge(
'SHA' => commit.sha,
'ENVIRONMENT' => @stack.environment,
'USER' => @deploy.user_name,
'EMAIL' => @deploy.user_email,
)
deploy_spec.deploy_steps.map do |command_line|
Command.new(command_line, env: env, chdir: @deploy.working_directory)
end
end
def checkout(commit)
git("checkout", "-q", commit.sha, chdir: @deploy.working_directory)
end
def clone
git("clone", "--local", @stack.git_path, @deploy.working_directory, chdir: @stack.deploys_path)
end
def deploy_spec
@deploy_spec ||= DeploySpec.new(@deploy.working_directory)
end
def stack_commands
@stack_commands = StackCommands.new(@stack)
end
end
Refine deploy command
class DeployCommands < Commands
BUNDLE_WITHOUT = %w(default production development test staging benchmark debug)
BUNDLE_PATH = File.join(Rails.root, "data", "bundler")
delegate :fetch, to: :stack_commands
def initialize(deploy)
@deploy = deploy
@stack = deploy.stack
end
def install_dependencies
deploy_spec.dependencies_steps.map do |command_line|
Command.new(command_line, env: env, chdir: @deploy.working_directory)
end
end
def deploy(commit)
env = self.env.merge(
'SHA' => commit.sha,
'ENVIRONMENT' => @stack.environment,
'USER' => "#{@deploy.user_name} via Shipit 2",
'EMAIL' => @deploy.user_email,
)
deploy_spec.deploy_steps.map do |command_line|
Command.new(command_line, env: env, chdir: @deploy.working_directory)
end
end
def checkout(commit)
git("checkout", "-q", commit.sha, chdir: @deploy.working_directory)
end
def clone
git("clone", "--local", @stack.git_path, @deploy.working_directory, chdir: @stack.deploys_path)
end
def deploy_spec
@deploy_spec ||= DeploySpec.new(@deploy.working_directory)
end
def stack_commands
@stack_commands = StackCommands.new(@stack)
end
end
|
require "dev-lxc/server"
require "mixlib/install"
require "open-uri"
module DevLXC
class Cluster
attr_reader :config
def initialize(cluster_config)
FileUtils.mkdir_p('/var/dev-lxc') unless Dir.exist?('/var/dev-lxc')
@config = Hash.new { |hash, key| hash[key] = {} }
@server_configs = Hash.new
%w(adhoc analytics automate build-nodes chef-backend chef-server compliance nodes supermarket).each do |server_type|
if cluster_config[server_type]
mounts = ["/var/dev-lxc var/dev-lxc"]
if cluster_config[server_type]["mounts"]
mounts.concat(cluster_config[server_type]["mounts"])
elsif cluster_config["mounts"]
mounts.concat(cluster_config["mounts"])
end
ssh_keys = cluster_config[server_type]["ssh-keys"]
ssh_keys ||= cluster_config["ssh-keys"]
base_container_name = cluster_config[server_type]["base_container"]
base_container_name ||= cluster_config["base_container"]
if cluster_config[server_type]["servers"]
cluster_config[server_type]["servers"].each do |server_name, server_config|
server_config ||= Hash.new
products = server_config['products']
products ||= Hash.new
mounts = ["/var/dev-lxc var/dev-lxc"] + server_config["mounts"] if server_config["mounts"]
ssh_keys = server_config["ssh-keys"] if server_config["ssh-keys"]
base_container_name = server_config["base_container"] if server_config["base_container"]
@server_configs[server_name] = {
server_type: server_type,
products: products,
ipaddress: server_config['ipaddress'],
additional_fqdn: nil,
mounts: mounts,
ssh_keys: ssh_keys,
base_container_name: base_container_name
}
# gather configuration from only the first "automate", "compliance" or "supermarket" server
break if %w(automate compliance supermarket).include?(server_type)
end
end
case server_type
when "analytics"
@config[server_type][:topology] = cluster_config[server_type]["topology"]
@config[server_type][:topology] ||= 'standalone'
@config[server_type][:fqdn] = cluster_config[server_type]["analytics_fqdn"]
@config[server_type][:frontends] = Array.new
if cluster_config[server_type]["servers"]
cluster_config[server_type]["servers"].each do |server_name, server_config|
server_config ||= Hash.new
additional_fqdn = nil
case @config[server_type][:topology]
when 'standalone'
@config[server_type][:bootstrap_backend] = server_name if server_config["role"].nil?
@config[server_type][:fqdn] ||= @config[server_type][:bootstrap_backend]
when 'tier'
@config[server_type][:bootstrap_backend] = server_name if server_config["role"] == "backend" && server_config["bootstrap"] == true
if server_config["role"] == "frontend"
additional_fqdn = @config[server_type][:fqdn]
@config[server_type][:frontends] << server_name
end
end
@server_configs[server_name].merge!({
additional_fqdn: additional_fqdn
})
end
end
when "chef-backend"
@config[server_type][:fqdn] = cluster_config[server_type]["api_fqdn"]
@config[server_type][:users] = cluster_config[server_type]["users"]
@config[server_type][:users] ||= Array.new
@config[server_type][:orgs] = cluster_config[server_type]["orgs"]
@config[server_type][:orgs] ||= Hash.new
@config[server_type][:backends] = Array.new
@config[server_type][:frontends] = Array.new
servers = cluster_config[server_type]["servers"]
if servers
@config[server_type][:leader_backend] = servers.select { |s,sc| sc['role'] == 'backend' && sc['leader'] == true }.keys.first
@config[server_type][:bootstrap_frontend] = servers.select { |s,sc| sc['role'] == 'frontend' && sc['bootstrap'] == true }.keys.first
@config[server_type][:backends] << @config[server_type][:leader_backend]
@config[server_type][:frontends] << @config[server_type][:bootstrap_frontend]
servers.each do |server_name, server_config|
server_config ||= Hash.new
additional_fqdn = nil
case server_config["role"]
when "backend"
@config[server_type][:backends] << server_name unless server_name == @config[server_type][:leader_backend]
when "frontend"
additional_fqdn = @config[server_type][:fqdn]
@config[server_type][:frontends] << server_name unless server_name == @config[server_type][:bootstrap_frontend]
end
@server_configs[server_name].merge!({
additional_fqdn: additional_fqdn,
chef_server_type: 'chef-server'
})
end
end
when "chef-server"
@config[server_type][:topology] = cluster_config[server_type]["topology"]
@config[server_type][:topology] ||= 'standalone'
@config[server_type][:fqdn] = cluster_config[server_type]["api_fqdn"]
@config[server_type][:users] = cluster_config[server_type]["users"]
@config[server_type][:users] ||= Array.new
@config[server_type][:orgs] = cluster_config[server_type]["orgs"]
@config[server_type][:orgs] ||= Hash.new
@config[server_type][:frontends] = Array.new
if cluster_config[server_type]["servers"]
cluster_config[server_type]["servers"].each do |server_name, server_config|
server_config ||= Hash.new
additional_fqdn = nil
chef_server_type = 'private-chef' if @server_configs[server_name][:products].has_key?('private-chef')
chef_server_type = 'chef-server' if @server_configs[server_name][:products].has_key?('chef-server')
case @config[server_type][:topology]
when 'standalone'
@config[server_type][:bootstrap_backend] = server_name if server_config["role"].nil?
@config[server_type][:fqdn] ||= @config[server_type][:bootstrap_backend]
when 'tier'
@config[server_type][:bootstrap_backend] = server_name if server_config["role"] == "backend" && server_config["bootstrap"] == true
if server_config["role"] == "frontend"
additional_fqdn = @config[server_type][:fqdn]
@config[server_type][:frontends] << server_name
end
end
@server_configs[server_name].merge!({
additional_fqdn: additional_fqdn,
chef_server_type: chef_server_type
})
end
end
when "supermarket"
unless cluster_config[server_type]["servers"].first.nil?
(server_name, server_config) = cluster_config[server_type]["servers"].first
@config[server_type][:fqdn] = server_name
end
when "automate"
unless cluster_config[server_type]["servers"].first.nil?
(server_name, server_config) = cluster_config[server_type]["servers"].first
server_config ||= Hash.new
@server_configs[server_name].merge!({
license_path: server_config['license_path'],
chef_org: server_config['chef_org'],
enterprise_name: server_config['enterprise_name']
})
end
when "nodes"
chef_server_url = cluster_config[server_type]['chef_server_url']
validation_client_name = cluster_config[server_type]['validation_client_name']
validation_key = cluster_config[server_type]['validation_key']
if cluster_config[server_type]["servers"]
cluster_config[server_type]["servers"].each do |server_name, server_config|
server_config ||= Hash.new
chef_server_url = server_config['chef_server_url'] if server_config['chef_server_url']
validation_client_name = server_config['validation_client_name'] if server_config['validation_client_name']
validation_key = server_config['validation_key'] if server_config['validation_key']
@server_configs[server_name].merge!({
chef_server_url: chef_server_url,
validation_client_name: validation_client_name,
validation_key: validation_key
})
end
end
end
end
end
validate_cluster_config
end
def validate_cluster_config
base_container_names = Array.new
mounts = Array.new
ssh_keys = Array.new
hostnames = Array.new
@config.map { |server_type, config| hostnames.push(config[:fqdn]) }
@server_configs.each do |server_name, server_config|
base_container_names.push(server_config[:base_container_name]).uniq! if server_config[:base_container_name]
mounts.concat(server_config[:mounts]).uniq! if server_config[:mounts]
ssh_keys.concat(server_config[:ssh_keys]).uniq! if server_config[:ssh_keys]
hostnames.push(server_name)
case server_config[:server_type]
when "automate"
if server_config[:license_path] && !File.exists?(server_config[:license_path])
puts "ERROR: Automate license #{server_config[:license_path]} does not exist."
exit 1
end
when "nodes"
if server_config[:validation_key] && !File.exists?(server_config[:validation_key])
puts "ERROR: Validation key #{server_config[:validation_key]} does not exist."
exit 1
end
end
end
unless base_container_names.empty?
base_container_names.each do |base_container_name|
unless ::DevLXC::Container.new(base_container_name).defined?
puts "ERROR: Base container #{base_container_name} does not exist."
exit 1
end
end
end
unless hostnames.empty?
hostnames.each do |hostname|
unless hostname.end_with?(".lxc")
puts "ERROR: Hostname #{hostname} does not end with '.lxc'."
exit 1
end
end
end
unless mounts.empty?
mounts.each do |mount|
unless File.exists?(mount.split.first)
puts "ERROR: Mount source #{mount.split.first} does not exist."
exit 1
end
end
end
unless ssh_keys.empty?
ssh_keys.each do |ssh_key|
unless File.exists?(ssh_key)
puts "ERROR: SSH key #{ssh_key} does not exist."
exit 1
end
end
end
end
def show_config(include_products=false)
require 'pp'
calculate_required_products(get_sorted_servers, true) if include_products
puts "Cluster config:"
pp @config
puts
puts "Server configs:"
pp @server_configs
end
def get_server(server_name)
ipaddress = @server_configs[server_name][:ipaddress]
additional_fqdn = @server_configs[server_name][:additional_fqdn]
mounts = @server_configs[server_name][:mounts]
ssh_keys = @server_configs[server_name][:ssh_keys]
Server.new(server_name, ipaddress, additional_fqdn, mounts, ssh_keys)
end
def get_sorted_servers(server_name_regex=nil)
servers = Array.new
# the order of this list of server_types matters
# it determines the order in which actions are applied to each server_type
%w(chef-backend chef-server analytics compliance supermarket automate build-nodes nodes adhoc).each do |server_type|
unless @config[server_type].empty?
case server_type
when "chef-backend"
@config[server_type][:backends].each do |backend_name|
servers << get_server(backend_name)
end
@config[server_type][:frontends].each do |frontend_name|
servers << get_server(frontend_name)
end
when "analytics", "chef-server"
if @config[server_type][:bootstrap_backend]
server_name = @config[server_type][:bootstrap_backend]
servers << get_server(server_name)
end
@config[server_type][:frontends].each do |frontend_name|
servers << get_server(frontend_name)
end
end
end
if %w(adhoc automate build-nodes compliance nodes supermarket).include?(server_type)
server_configs = @server_configs.select { |server_name, server_config| server_config[:server_type] == server_type }
server_configs.each_key { |server_name| servers << get_server(server_name) }
end
end
servers.select { |s| s.name =~ /#{server_name_regex}/ }
end
def up(server_name_regex=nil)
abort_up = false
configured_servers = Array.new
servers = get_sorted_servers(server_name_regex)
servers.each do |server|
next if server.container.defined?
if (@config['chef-server'][:frontends] && @config['chef-server'][:frontends].include?(server.name)) || server.name == @config['analytics'][:bootstrap_backend]
if @config['chef-server'][:bootstrap_backend].nil?
puts "ERROR: '#{server.name}' requires a Chef Server bootstrap backend to be configured first."
abort_up = true
elsif !get_server(@config['chef-server'][:bootstrap_backend]).container.defined? && servers.select { |s| s.name == @config['chef-server'][:bootstrap_backend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-server'][:bootstrap_backend]}' to be configured first."
abort_up = true
end
end
if @config['chef-server'][:bootstrap_backend] && @server_configs[server.name][:server_type] == 'supermarket'
if !get_server(@config['chef-server'][:bootstrap_backend]).container.defined? && servers.select { |s| s.name == @config['chef-server'][:bootstrap_backend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-server'][:bootstrap_backend]}' to be configured first."
abort_up = true
end
end
if @config['analytics'][:frontends] && @config['analytics'][:frontends].include?(server.name)
if @config['analytics'][:bootstrap_backend].nil?
puts "ERROR: '#{server.name}' requires an Analytics Server bootstrap backend to be configured first."
abort_up = true
elsif !get_server(@config['analytics'][:bootstrap_backend]).container.defined? && servers.select { |s| s.name == @config['analytics'][:bootstrap_backend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['analytics'][:bootstrap_backend]}' to be configured first."
abort_up = true
end
end
if @config['chef-backend'][:frontends] && @config['chef-backend'][:frontends].include?(server.name) && server.name != @config['chef-backend'][:bootstrap_frontend]
if @config['chef-backend'][:bootstrap_frontend].nil?
puts "ERROR: '#{server.name}' requires a Chef Server bootstrap frontend to be configured first."
abort_up = true
elsif !get_server(@config['chef-backend'][:bootstrap_frontend]).container.defined? && servers.select { |s| s.name == @config['chef-backend'][:bootstrap_frontend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-backend'][:bootstrap_frontend]}' to be configured first."
abort_up = true
end
end
if server.name == @config['chef-backend'][:bootstrap_frontend]
if (@config['chef-backend'][:backends].select { |s| get_server(s).container.running? }.length + servers.select { |s| @config['chef-backend'][:backends].include?(s.name) }.length) < 2
puts "ERROR: '#{server.name}' requires at least two nodes in the backend cluster to be running first."
abort_up = true
end
end
if @config['chef-backend'][:backends] && @config['chef-backend'][:backends].include?(server.name) && server.name != @config['chef-backend'][:leader_backend]
if !get_server(@config['chef-backend'][:leader_backend]).container.running? && servers.select { |s| s.name == @config['chef-backend'][:leader_backend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-backend'][:leader_backend]}' to be running first."
abort_up = true
end
end
if @server_configs[server.name][:server_type] == 'nodes'
if @server_configs[server.name][:chef_server_url] && @server_configs[server.name][:validation_client_name] && @server_configs[server.name][:validation_key].nil?
if @config['chef-server'][:bootstrap_backend] && !get_server(@config['chef-server'][:bootstrap_backend]).container.defined? && servers.select { |s| s.name == @config['chef-server'][:bootstrap_backend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-server'][:bootstrap_backend]}' to be configured first."
abort_up = true
elsif @config['chef-backend'][:bootstrap_frontend] && !get_server(@config['chef-backend'][:bootstrap_frontend]).container.defined? && servers.select { |s| s.name == @config['chef-backend'][:bootstrap_frontend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-backend'][:bootstrap_frontend]}' to be configured first."
abort_up = true
end
end
end
if @server_configs[server.name][:server_type] == 'automate'
if @config['chef-server'][:bootstrap_backend].nil?
puts "ERROR: '#{server.name}' requires a Chef Server bootstrap backend to be configured first."
abort_up = true
elsif !get_server(@config['chef-server'][:bootstrap_backend]).container.defined? && servers.select { |s| s.name == @config['chef-server'][:bootstrap_backend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-server'][:bootstrap_backend]}' to be configured first."
abort_up = true
end
end
if @server_configs[server.name][:server_type] == 'build-nodes'
if @config['chef-server'][:bootstrap_backend].nil?
puts "ERROR: '#{server.name}' requires a Chef Server bootstrap backend to be configured first."
abort_up = true
elsif !get_server(@config['chef-server'][:bootstrap_backend]).container.running? && servers.select { |s| s.name == @config['chef-server'][:bootstrap_backend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-server'][:bootstrap_backend]}' to be running first."
abort_up = true
end
if @config['chef-server'][:topology] == 'tier'
if @config[server_type][:frontends].empty?
puts "ERROR: '#{server.name}' requires at least one Chef Server frontend to be configured first."
abort_up = true
elsif (@config['chef-server'][:frontends].select { |s| get_server(s).container.running? }.length + servers.select { |s| @config['chef-server'][:frontends].include?(s.name) }.length) < 1
puts "ERROR: '#{server.name}' requires at least one Chef Server frontend to be running first."
abort_up = true
end
end
automate_server_name = @server_configs.select {|name, config| config[:server_type] == 'automate'}.keys.first
if automate_server_name
if !get_server(automate_server_name).container.running? && servers.select { |s| s.name == automate_server_name }.empty?
puts "ERROR: '#{server.name}' requires '#{automate_server_name}' to be running first."
abort_up = true
end
else
puts "ERROR: '#{server.name}' requires an Automate Server to be configured first."
abort_up = true
end
end
end
exit 1 if abort_up
prep_product_cache(servers)
servers.each do |server|
clone_from_base_container(server) unless server.container.defined?
end
servers = get_sorted_servers(server_name_regex)
servers.each do |server|
if @server_configs[server.name][:server_type] == "build-nodes"
next if @server_configs[server.name][:required_products]["chefdk"] && @server_configs[server.name][:required_products].length == 1
end
install_products(server) unless @server_configs[server.name][:required_products].empty?
end
servers.each do |server|
if server.snapshot_list.select { |sn| sn[2].start_with?("dev-lxc build: completed") }.empty?
if server.name == @config["chef-backend"][:bootstrap_frontend]
running_backends = Array.new
@config["chef-backend"][:backends].reverse_each do |server_name|
backend = get_server(server_name)
if backend.container.defined? && backend.snapshot_list.select { |sn| sn[2].start_with?("dev-lxc build: backend cluster configured but frontend not bootstrapped") }.empty?
if backend.container.running?
running_backends << backend.name
backend.stop
end
backend.snapshot("dev-lxc build: backend cluster configured but frontend not bootstrapped")
snapshot = backend.snapshot_list.select { |sn| sn[2].start_with?("dev-lxc build: completed") }.first
backend.snapshot_destroy(snapshot.first) if snapshot
end
end
@config["chef-backend"][:backends].each do |server_name|
if running_backends.include?(server_name)
get_server(server_name).start
configured_servers << server_name unless configured_servers.include?(server_name)
end
end
end
configure_products(server)
configured_servers << server.name
end
server.start unless server.container.running?
end
configured_servers.reverse_each do |server_name|
server = get_server(server_name)
server.stop if server.container.running?
server.snapshot("dev-lxc build: completed")
end
configured_servers.each do |server_name|
server = get_server(server_name)
server.start if server.container.defined?
end
end
def clone_from_base_container(server)
server_type = @server_configs[server.name][:server_type]
base_container = DevLXC::Container.new(@server_configs[server.name][:base_container_name])
puts "Cloning base container '#{base_container.name}' into container '#{server.name}'"
base_container.clone(server.name, {:flags => LXC::LXC_CLONE_SNAPSHOT})
server.container.load_config
puts "Deleting SSH Server Host Keys"
FileUtils.rm_f(Dir.glob("#{server.container.config_item('lxc.rootfs')}/etc/ssh/ssh_host*_key*"))
puts "Adding lxc.hook.post-stop hook"
server.container.set_config_item("lxc.hook.post-stop", "/usr/local/share/lxc/hooks/post-stop-dhcp-release")
server.container.save_config
end
def get_product_url(server, product_name, product_options)
server_type = @server_configs[server.name][:server_type]
base_container = DevLXC::Container.new(@server_configs[server.name][:base_container_name])
mixlib_install_platform_detection_path = "#{base_container.config_item('lxc.rootfs')}/mixlib-install-platform-detection"
IO.write(mixlib_install_platform_detection_path, Mixlib::Install::Generator::Bourne.detect_platform_sh)
platform_results = `chroot #{base_container.config_item('lxc.rootfs')} bash mixlib-install-platform-detection`
File.unlink(mixlib_install_platform_detection_path)
if platform_results.empty?
puts "ERROR: Unable to detect the platform of container '#{base_container.name}'"
exit 1
end
(platform, platform_version, architecture) = platform_results.split
product_version = product_options['version'] if product_options
product_version ||= 'latest'
channel = product_options['channel'] if product_options
channel ||= 'stable'
channel = channel.to_sym
options = {
product_name: product_name,
product_version: product_version,
channel: channel,
platform: platform,
platform_version: platform_version,
architecture: architecture
}
artifact = Mixlib::Install.new(options).artifact_info
if artifact.class != Mixlib::Install::ArtifactInfo
puts "ERROR: Unable to find download URL for the following product"
puts JSON.pretty_generate(options)
exit 1
end
artifact.url
end
def calculate_required_products(servers, force=false)
all_required_products = Hash.new
servers.each do |server|
products = @server_configs[server.name][:products]
@server_configs[server.name][:required_products] = Hash.new
if !force && !server.snapshot_list.select { |sn| sn[2].start_with?("dev-lxc build: products installed") }.empty?
# Skipping product cache preparation for container because it has a 'products installed' snapshot
next
end
products.each do |product_name, product_options|
if product_options && product_options['package_source']
package_source = product_options['package_source']
all_required_products[package_source] = product_name
@server_configs[server.name][:required_products][product_name] = package_source
else
package_source = get_product_url(server, product_name, product_options)
all_required_products[package_source] = product_name
product_cache_path = "/var/dev-lxc/cache/chef-products/#{product_name}/#{File.basename(package_source)}"
@server_configs[server.name][:required_products][product_name] = product_cache_path
end
end
end
all_required_products
end
def prep_product_cache(servers, force=false)
all_required_products = calculate_required_products(servers, force)
all_required_products.each do |package_source, product_name|
if package_source.start_with?('http')
product_cache_path = "/var/dev-lxc/cache/chef-products/#{product_name}/#{File.basename(package_source)}"
if !File.exist?(product_cache_path)
FileUtils.mkdir_p(File.dirname(product_cache_path)) unless Dir.exist?(File.dirname(product_cache_path))
puts "Downloading #{package_source} to #{product_cache_path}"
open(package_source) { |url| File.open(product_cache_path, 'wb') { |f| f.write(url.read) } }
end
elsif !File.exist?(package_source)
puts "ERROR: Package source #{package_source} does not exist."
exit 1
end
end
end
def install_products(server)
if !server.snapshot_list.select { |sn| sn[2].start_with?("dev-lxc build: products installed") }.empty?
puts "Skipping product installation for container '#{server.name}' because it already has a 'products installed' snapshot"
return
end
if server.container.running?
server_was_running = true
else
server_was_running = false
server.start
end
@server_configs[server.name][:required_products].each do |product_name, package_source|
next if @server_configs[server.name][:server_type] == "build-nodes" && product_name == "chefdk"
server.install_package(package_source)
end
server.stop
server.snapshot("dev-lxc build: products installed")
server.start if server_was_running
end
def configure_products(server)
puts "Configuring container '#{server.name}'"
server.start unless server.container.running?
required_products = @server_configs[server.name][:required_products].keys if @server_configs[server.name][:required_products]
required_products ||= Array.new
server_type = @server_configs[server.name][:server_type]
dot_chef_path = "/root/chef-repo/.chef"
case server_type
when 'adhoc'
# Allow adhoc servers time to generate SSH Server Host Keys
sleep 5
when 'analytics'
configure_analytics(server) if required_products.include?('analytics')
when 'build-nodes'
sleep 5 # give time for DNS resolution to be available
configure_build_node(server)
when 'chef-backend'
configure_chef_backend(server) if required_products.include?('chef-backend')
if required_products.include?('chef-server')
configure_chef_frontend(server)
if server.name == @config['chef-backend'][:bootstrap_frontend]
create_users_orgs_knife_configs(server, dot_chef_path)
end
end
configure_manage(server) if required_products.include?('manage')
when 'chef-server'
if required_products.include?('chef-server') || required_products.include?('private-chef')
configure_chef_server(server)
if server.name == @config['chef-server'][:bootstrap_backend]
create_users_orgs_knife_configs(server, dot_chef_path)
automate_server_name = @server_configs.select {|name, config| config[:server_type] == 'automate'}.keys.first
if automate_server_name
automate_user = "delivery"
automate_chef_org = @server_configs[automate_server_name][:chef_org]
create_user(server, automate_user, dot_chef_path)
create_org(server, automate_chef_org, dot_chef_path)
org_add_user(server, automate_chef_org, automate_user, true, dot_chef_path)
end
end
end
configure_reporting(server) if required_products.include?('reporting')
configure_push_jobs_server(server) if required_products.include?('push-jobs-server')
configure_manage(server) if required_products.include?('manage')
when 'compliance'
configure_compliance(server) if required_products.include?('compliance')
when 'automate'
configure_automate(server) if required_products.include?('delivery')
when 'nodes'
# Allow servers time to generate SSH Server Host Keys
sleep 5
configure_chef_client(server, dot_chef_path) if required_products.include?('chef') || required_products.include?('chefdk')
when 'supermarket'
configure_supermarket(server) if required_products.include?('supermarket')
end
end
def configure_automate(server)
license_path = @server_configs[server.name][:license_path]
chef_org = @server_configs[server.name][:chef_org]
enterprise_name = @server_configs[server.name][:enterprise_name]
chef_server_url = @config['chef-server'][:fqdn]
supermarket_fqdn = @config['supermarket'][:fqdn]
FileUtils.cp(license_path, "#{server.container.config_item('lxc.rootfs')}/root/automate.license")
chef_server = get_server(@config['chef-server'][:bootstrap_backend])
automate_chef_user_key = "#{chef_server.container.config_item('lxc.rootfs')}/root/chef-repo/.chef/delivery.pem"
FileUtils.cp(automate_chef_user_key, "#{server.container.config_item('lxc.rootfs')}/root/automate_chef_user_key.pem")
setup_cmd = "setup"
setup_cmd += " --license /root/automate.license"
setup_cmd += " --fqdn #{server.name}"
setup_cmd += " --key /root/automate_chef_user_key.pem"
setup_cmd += " --server-url https://#{chef_server_url}/organizations/#{chef_org}"
setup_cmd += " --supermarket-fqdn #{supermarket_fqdn}" if supermarket_fqdn
setup_cmd += " --enterprise #{enterprise_name}"
setup_cmd += " --no-build-node"
setup_cmd += " --configure"
run_ctl(server, "delivery", setup_cmd)
end
def print_automate_credentials
automate_server_name = @server_configs.select {|name, config| config[:server_type] == 'automate'}.keys.first
if automate_server_name
automate_server = get_server(automate_server_name)
automate_credentials_files = Dir.glob("#{automate_server.container.config_item('lxc.rootfs')}/etc/delivery/*-credentials")
automate_credentials_files.each_with_index do |automate_credentials_file, index|
puts IO.read(automate_credentials_file)
puts if index + 1 < automate_credentials_files.length
end
else
puts "WARNING: An Automate server is not defined."
exit 1
end
end
def configure_build_node(server)
automate_server_name = @server_configs.select {|name, config| config[:server_type] == 'automate'}.keys.first
if automate_server_name
automate_server = get_server(automate_server_name)
install_build_node_cmd = "install-build-node"
install_build_node_cmd += " --fqdn #{server.name}"
install_build_node_cmd += " --username dev-lxc"
install_build_node_cmd += " --password dev-lxc"
install_build_node_cmd += " --installer #{@server_configs[server.name][:required_products]["chefdk"]}"
install_build_node_cmd += " --overwrite-registration"
run_ctl(automate_server, "delivery", install_build_node_cmd)
end
end
def configure_chef_client(server, dot_chef_path)
puts "Configuring Chef Client in container '#{server.name}'"
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/etc/chef")
chef_server_url = @server_configs[server.name][:chef_server_url]
validation_client_name = @server_configs[server.name][:validation_client_name]
validation_key = @server_configs[server.name][:validation_key]
if validation_key.nil?
chef_server_name = @config['chef-server'][:bootstrap_backend]
chef_server_name ||= @config['chef-backend'][:bootstrap_frontend]
if chef_server_name
chef_server = get_server(chef_server_name)
if chef_server.container.defined?
validator_pem_files = Dir.glob("#{chef_server.container.config_item('lxc.rootfs')}#{dot_chef_path}/*-validator.pem")
FileUtils.cp(validator_pem_files, "#{server.container.config_item('lxc.rootfs')}/etc/chef/") unless validator_pem_files.empty?
end
end
end
client_rb = %Q(chef_server_url '#{chef_server_url}'
validation_client_name '#{validation_client_name}'
validation_key '/etc/chef/#{validation_client_name}.pem'
ssl_verify_mode :verify_none
)
automate_server_name = @server_configs.select {|name, config| config[:server_type] == 'automate'}.keys.first
if automate_server_name
client_rb += %Q(
data_collector.server_url "https://#{automate_server_name}/data-collector/v0/"
data_collector.token "93a49a4f2482c64126f7b6015e6b0f30284287ee4054ff8807fb63d9cbd1c506"
)
end
IO.write("#{server.container.config_item('lxc.rootfs')}/etc/chef/client.rb", client_rb)
end
def configure_chef_backend(server)
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/var/opt/chef-backend")
FileUtils.touch("#{server.container.config_item('lxc.rootfs')}/var/opt/chef-backend/.license.accepted")
if server.name == @config['chef-backend'][:leader_backend]
puts "Creating /etc/chef-backend/chef-backend.rb"
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/etc/chef-backend")
chef_backend_config = "publish_address '#{@server_configs[server.name][:ipaddress]}'\n"
IO.write("#{server.container.config_item('lxc.rootfs')}/etc/chef-backend/chef-backend.rb", chef_backend_config)
run_ctl(server, "chef-backend", "bootstrap --yes")
else
puts "Joining #{server.name} to the chef-backend cluster"
leader_backend = get_server(@config['chef-backend'][:leader_backend])
FileUtils.cp("#{leader_backend.container.config_item('lxc.rootfs')}/etc/chef-backend/chef-backend-secrets.json",
"#{server.container.config_item('lxc.rootfs')}/root/")
run_ctl(server, "chef-backend", "join-cluster #{@server_configs[leader_backend.name][:ipaddress]} -p #{@server_configs[server.name][:ipaddress]} -s /root/chef-backend-secrets.json --yes")
end
end
def configure_chef_frontend(server)
puts "Creating /etc/opscode/chef-server.rb"
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/etc/opscode")
leader_backend = get_server(@config['chef-backend'][:leader_backend])
run_ctl(leader_backend, "chef-backend", "gen-server-config #{server.name} --filename /tmp/#{server.name}.rb")
FileUtils.cp("#{leader_backend.container.config_item('lxc.rootfs')}/tmp/#{server.name}.rb",
"#{server.container.config_item('lxc.rootfs')}/etc/opscode/chef-server.rb")
unless server.name == @config['chef-backend'][:bootstrap_frontend]
bootstrap_frontend = get_server(@config['chef-backend'][:bootstrap_frontend])
puts "Copying /etc/opscode/private-chef-secrets.json from bootstrap frontend '#{bootstrap_frontend.name}'"
FileUtils.cp("#{bootstrap_frontend.container.config_item('lxc.rootfs')}/etc/opscode/private-chef-secrets.json",
"#{server.container.config_item('lxc.rootfs')}/etc/opscode/")
puts "Copying /etc/opscode/pivotal.pem from bootstrap frontend '#{bootstrap_frontend.name}'"
FileUtils.cp("#{bootstrap_frontend.container.config_item('lxc.rootfs')}/etc/opscode/pivotal.pem",
"#{server.container.config_item('lxc.rootfs')}/etc/opscode/")
end
run_ctl(server, "chef-server", "reconfigure")
end
def configure_chef_server(server)
if @config['chef-server'][:topology] == "standalone" || @config['chef-server'][:bootstrap_backend] == server.name
case @server_configs[server.name][:chef_server_type]
when 'private-chef'
puts "Creating /etc/opscode/private-chef.rb"
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/etc/opscode")
IO.write("#{server.container.config_item('lxc.rootfs')}/etc/opscode/private-chef.rb", chef_server_config)
when 'chef-server'
puts "Creating /etc/opscode/chef-server.rb"
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/etc/opscode")
IO.write("#{server.container.config_item('lxc.rootfs')}/etc/opscode/chef-server.rb", chef_server_config)
end
elsif @config['chef-server'][:frontends].include?(server.name)
puts "Copying /etc/opscode from bootstrap backend '#{@config['chef-server'][:bootstrap_backend]}'"
FileUtils.cp_r("#{get_server(@config['chef-server'][:bootstrap_backend]).container.config_item('lxc.rootfs')}/etc/opscode",
"#{server.container.config_item('lxc.rootfs')}/etc", preserve: true)
end
run_ctl(server, @server_configs[server.name][:chef_server_type], "reconfigure")
end
def configure_reporting(server)
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/var/opt/opscode-reporting")
FileUtils.touch("#{server.container.config_item('lxc.rootfs')}/var/opt/opscode-reporting/.license.accepted")
if @config['chef-server'][:frontends].include?(server.name)
puts "Copying /etc/opscode-reporting from bootstrap backend '#{@config['chef-server'][:bootstrap_backend]}'"
FileUtils.cp_r("#{get_server(@config['chef-server'][:bootstrap_backend]).container.config_item('lxc.rootfs')}/etc/opscode-reporting",
"#{server.container.config_item('lxc.rootfs')}/etc", preserve: true)
end
run_ctl(server, @server_configs[server.name][:chef_server_type], "reconfigure")
run_ctl(server, "opscode-reporting", "reconfigure")
end
def configure_push_jobs_server(server)
run_ctl(server, "opscode-push-jobs-server", "reconfigure")
run_ctl(server, @server_configs[server.name][:chef_server_type], "reconfigure")
end
def configure_manage(server)
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/var/opt/chef-manage")
FileUtils.touch("#{server.container.config_item('lxc.rootfs')}/var/opt/chef-manage/.license.accepted")
if @server_configs[server.name][:chef_server_type] == 'private-chef'
puts "Disabling old opscode-webui in /etc/opscode/private-chef.rb"
DevLXC.search_file_delete_line("#{server.container.config_item('lxc.rootfs')}/etc/opscode/private-chef.rb", /opscode_webui[.enable.]/)
DevLXC.append_line_to_file("#{server.container.config_item('lxc.rootfs')}/etc/opscode/private-chef.rb", "\nopscode_webui['enable'] = false\n")
run_ctl(server, @server_configs[server.name][:chef_server_type], "reconfigure")
end
run_ctl(server, "opscode-manage", "reconfigure")
end
def configure_analytics(server)
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/var/opt/opscode-analytics")
FileUtils.touch("#{server.container.config_item('lxc.rootfs')}/var/opt/opscode-analytics/.license.accepted")
if @config['analytics'][:topology] == "standalone" || @config['analytics'][:bootstrap_backend] == server.name
puts "Copying /etc/opscode-analytics from Chef Server bootstrap backend '#{@config['chef-server'][:bootstrap_backend]}'"
FileUtils.cp_r("#{get_server(@config['chef-server'][:bootstrap_backend]).container.config_item('lxc.rootfs')}/etc/opscode-analytics",
"#{server.container.config_item('lxc.rootfs')}/etc", preserve: true)
IO.write("#{server.container.config_item('lxc.rootfs')}/etc/opscode-analytics/opscode-analytics.rb", analytics_config)
elsif @config['analytics'][:frontends].include?(server.name)
puts "Copying /etc/opscode-analytics from Analytics bootstrap backend '#{@config['analytics'][:bootstrap_backend]}'"
FileUtils.cp_r("#{get_server(@config['analytics'][:bootstrap_backend]).container.config_item('lxc.rootfs')}/etc/opscode-analytics",
"#{server.container.config_item('lxc.rootfs')}/etc", preserve: true)
end
run_ctl(server, "opscode-analytics", "reconfigure")
end
def configure_compliance(server)
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/var/opt/chef-compliance")
FileUtils.touch("#{server.container.config_item('lxc.rootfs')}/var/opt/chef-compliance/.license.accepted")
run_ctl(server, "chef-compliance", "reconfigure")
end
def configure_supermarket(server)
if @config['chef-server'][:bootstrap_backend] && get_server(@config['chef-server'][:bootstrap_backend]).container.defined?
chef_server_supermarket_config = JSON.parse(IO.read("#{get_server(@config['chef-server'][:bootstrap_backend]).container.config_item('lxc.rootfs')}/etc/opscode/oc-id-applications/supermarket.json"))
supermarket_config = {
'chef_server_url' => "https://#{@config['chef-server'][:fqdn]}/",
'chef_oauth2_app_id' => chef_server_supermarket_config['uid'],
'chef_oauth2_secret' => chef_server_supermarket_config['secret'],
'chef_oauth2_verify_ssl' => false
}
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/etc/supermarket")
IO.write("#{server.container.config_item('lxc.rootfs')}/etc/supermarket/supermarket.json", JSON.pretty_generate(supermarket_config))
end
run_ctl(server, "supermarket", "reconfigure")
end
def run_ctl(server, component, subcommand)
server.run_command("#{component}-ctl #{subcommand}")
end
def create_users_orgs_knife_configs(server, dot_chef_path)
server_type = @server_configs[server.name][:server_type]
# give time for all services to come up completely
sleep 10
if @server_configs[server.name][:chef_server_type] == 'private-chef'
# give more time for all services to come up completely
sleep 50
server.run_command("/opt/opscode/embedded/bin/gem install knife-opc --no-ri --no-rdoc -v 0.3.1")
end
chef_server_dot_chef_path = "#{server.container.config_item('lxc.rootfs')}#{dot_chef_path}"
FileUtils.mkdir_p(chef_server_dot_chef_path)
FileUtils.cp( "#{server.container.config_item('lxc.rootfs')}/etc/opscode/pivotal.pem", chef_server_dot_chef_path )
create_pivotal_knife_config('127.0.0.1', chef_server_dot_chef_path)
create_knife_config('127.0.0.1', chef_server_dot_chef_path)
@config[server_type][:users].each do |username|
create_user(server, username, dot_chef_path)
end
@config[server_type][:orgs].each do |orgname, org_users|
create_org(server, orgname, dot_chef_path)
if org_users
if org_users['admins']
org_users['admins'].each do |username|
org_add_user(server, orgname, username, true, dot_chef_path)
end
end
if org_users['non-admins']
org_users['non-admins'].each do |username|
org_add_user(server, orgname, username, false, dot_chef_path)
end
end
end
end
end
def create_pivotal_knife_config(fqdn, dot_chef_path)
pivotal_rb = %Q(
current_dir = File.dirname(__FILE__)
chef_server_root "https://#{fqdn}"
chef_server_url "https://#{fqdn}"
node_name "pivotal"
client_key "\#{current_dir}/pivotal.pem"
cookbook_path Dir.pwd + "/cookbooks"
knife[:chef_repo_path] = Dir.pwd
ssl_verify_mode :verify_none
)
IO.write("#{dot_chef_path}/pivotal.rb", pivotal_rb)
end
def create_knife_config(fqdn, dot_chef_path)
knife_rb = %Q(
username = "CHANGEME"
orgname = "CHANGEME"
if [username, orgname].include?("CHANGEME")
puts "ERROR: Please set 'username' and 'orgname' to proper values in knife.rb"
exit!
end
current_dir = File.dirname(__FILE__)
chef_server_url "https://#{fqdn}/organizations/\#{orgname}"
node_name username
client_key "\#{current_dir}/\#{username}.pem"
validation_client_name "\#{orgname}-validator"
validation_key "\#{current_dir}/\#{orgname}-validator.pem"
cookbook_path Dir.pwd + "/cookbooks"
knife[:chef_repo_path] = Dir.pwd
ssl_verify_mode :verify_none
)
IO.write("#{dot_chef_path}/knife.rb", knife_rb)
end
def create_user(server, username, dot_chef_path)
create_user_string = "#{username} #{username} #{username} #{username}@noreply.com #{username} --filename #{dot_chef_path}/#{username}.pem"
case @server_configs[server.name][:chef_server_type]
when 'private-chef'
server.run_command("/opt/opscode/embedded/bin/knife opc user create #{create_user_string} -c #{dot_chef_path}/pivotal.rb")
when 'chef-server'
run_ctl(server, "chef-server", "user-create #{create_user_string}")
end
end
def create_org(server, orgname, dot_chef_path)
create_org_string = "#{orgname} #{orgname} --filename #{dot_chef_path}/#{orgname}-validator.pem"
case @server_configs[server.name][:chef_server_type]
when 'private-chef'
server.run_command("/opt/opscode/embedded/bin/knife opc org create #{create_org_string} -c #{dot_chef_path}/pivotal.rb")
when 'chef-server'
run_ctl(server, "chef-server", "org-create #{create_org_string}")
end
end
def org_add_user(server, orgname, username, admin, dot_chef_path)
org_add_user_string = "#{orgname} #{username}"
org_add_user_string += " --admin" if admin
case @server_configs[server.name][:chef_server_type]
when 'private-chef'
server.run_command("/opt/opscode/embedded/bin/knife opc org user add #{org_add_user_string} -c #{dot_chef_path}/pivotal.rb")
when 'chef-server'
run_ctl(server, "chef-server", "org-user-add #{org_add_user_string}")
end
end
def chef_repo(force=false, pivotal=false)
chef_server_dot_chef_path = "/root/chef-repo/.chef"
dot_chef_path = "./chef-repo/.chef"
if @config['chef-server'][:bootstrap_backend]
chef_server = get_server(@config['chef-server'][:bootstrap_backend])
chef_server_fqdn = @config['chef-server'][:fqdn]
elsif @config['chef-backend'][:bootstrap_frontend]
chef_server = get_server(@config['chef-backend'][:bootstrap_frontend])
chef_server_fqdn = @config['chef-backend'][:fqdn]
else
puts "ERROR: A Chef Server is not defined in the cluster's config. Please define it first."
exit 1
end
unless chef_server.container.defined?
puts "ERROR: The '#{chef_server.name}' Chef Server does not exist."
exit 1
end
puts "Creating chef-repo with pem files and knife.rb in the current directory"
FileUtils.mkdir_p(dot_chef_path)
pem_files = Dir.glob("#{chef_server.container.config_item('lxc.rootfs')}#{chef_server_dot_chef_path}/*.pem")
pem_files.delete_if { |pem_file| pem_file.end_with?("/pivotal.pem") } unless pivotal
FileUtils.cp(pem_files, dot_chef_path) unless pem_files.empty?
if pivotal
if File.exists?("#{dot_chef_path}/pivotal.rb") && ! force
puts "Skipping pivotal.rb because it already exists in `#{dot_chef_path}`"
else
create_pivotal_knife_config(chef_server_fqdn, dot_chef_path)
end
end
if File.exists?("./chef-repo/.chef/knife.rb") && ! force
puts "Skipping knife.rb because it already exists in `#{dot_chef_path}`"
else
create_knife_config(chef_server_fqdn, dot_chef_path)
end
end
def chef_server_config
chef_server_config = %Q(api_fqdn "#{@config['chef-server'][:fqdn]}"\n)
if @config['chef-server'][:topology] == 'tier'
chef_server_config += %Q(
topology "#{@config['chef-server'][:topology]}"
server "#{@config['chef-server'][:bootstrap_backend]}",
:ipaddress => "#{@server_configs[@config['chef-server'][:bootstrap_backend]][:ipaddress]}",
:role => "backend",
:bootstrap => true
backend_vip "#{@config['chef-server'][:bootstrap_backend]}",
:ipaddress => "#{@server_configs[@config['chef-server'][:bootstrap_backend]][:ipaddress]}"
)
@config['chef-server'][:frontends].each do |frontend_name|
chef_server_config += %Q(
server "#{frontend_name}",
:ipaddress => "#{@server_configs[frontend_name][:ipaddress]}",
:role => "frontend"
)
end
end
if @config['analytics'][:fqdn]
chef_server_config += %Q(
oc_id['applications'] ||= {}
oc_id['applications']['analytics'] = {
'redirect_uri' => 'https://#{@config['analytics'][:fqdn]}/'
}
rabbitmq['vip'] = '#{@config['chef-server'][:bootstrap_backend]}'
rabbitmq['node_ip_address'] = '0.0.0.0'
)
end
if @config['supermarket'][:fqdn]
chef_server_config += %Q(
oc_id['applications'] ||= {}
oc_id['applications']['supermarket'] = {
'redirect_uri' => 'https://#{@config['supermarket'][:fqdn]}/auth/chef_oauth2/callback'
}
)
end
automate_server_name = @server_configs.select {|name, config| config[:server_type] == 'automate'}.keys.first
if automate_server_name
chef_server_config += %Q(
data_collector['root_url'] = "https://#{automate_server_name}/data-collector/v0/"
data_collector['token'] = "93a49a4f2482c64126f7b6015e6b0f30284287ee4054ff8807fb63d9cbd1c506"
)
end
return chef_server_config
end
def analytics_config
analytics_config = %Q(analytics_fqdn "#{@config['analytics'][:fqdn]}"
topology "#{@config['analytics'][:topology]}"
)
if @config['analytics'][:topology] == 'tier'
analytics_config += %Q(
server "#{@config['analytics'][:bootstrap_backend]}",
:ipaddress => "#{@server_configs[@config['analytics'][:bootstrap_backend]][:ipaddress]}",
:role => "backend",
:bootstrap => true
backend_vip "#{@config['analytics'][:bootstrap_backend]}",
:ipaddress => "#{@server_configs[@config['analytics'][:bootstrap_backend]][:ipaddress]}"
)
@config['analytics'][:frontends].each do |frontend_name|
analytics_config += %Q(
server "#{frontend_name}",
:ipaddress => "#{@server_configs[frontend_name][:ipaddress]}",
:role => "frontend"
)
end
end
return analytics_config
end
end
end
Don't calculate/install required products if "build: completed" snapshot exists
require "dev-lxc/server"
require "mixlib/install"
require "open-uri"
module DevLXC
class Cluster
attr_reader :config
def initialize(cluster_config)
FileUtils.mkdir_p('/var/dev-lxc') unless Dir.exist?('/var/dev-lxc')
@config = Hash.new { |hash, key| hash[key] = {} }
@server_configs = Hash.new
%w(adhoc analytics automate build-nodes chef-backend chef-server compliance nodes supermarket).each do |server_type|
if cluster_config[server_type]
mounts = ["/var/dev-lxc var/dev-lxc"]
if cluster_config[server_type]["mounts"]
mounts.concat(cluster_config[server_type]["mounts"])
elsif cluster_config["mounts"]
mounts.concat(cluster_config["mounts"])
end
ssh_keys = cluster_config[server_type]["ssh-keys"]
ssh_keys ||= cluster_config["ssh-keys"]
base_container_name = cluster_config[server_type]["base_container"]
base_container_name ||= cluster_config["base_container"]
if cluster_config[server_type]["servers"]
cluster_config[server_type]["servers"].each do |server_name, server_config|
server_config ||= Hash.new
products = server_config['products']
products ||= Hash.new
mounts = ["/var/dev-lxc var/dev-lxc"] + server_config["mounts"] if server_config["mounts"]
ssh_keys = server_config["ssh-keys"] if server_config["ssh-keys"]
base_container_name = server_config["base_container"] if server_config["base_container"]
@server_configs[server_name] = {
server_type: server_type,
products: products,
ipaddress: server_config['ipaddress'],
additional_fqdn: nil,
mounts: mounts,
ssh_keys: ssh_keys,
base_container_name: base_container_name
}
# gather configuration from only the first "automate", "compliance" or "supermarket" server
break if %w(automate compliance supermarket).include?(server_type)
end
end
case server_type
when "analytics"
@config[server_type][:topology] = cluster_config[server_type]["topology"]
@config[server_type][:topology] ||= 'standalone'
@config[server_type][:fqdn] = cluster_config[server_type]["analytics_fqdn"]
@config[server_type][:frontends] = Array.new
if cluster_config[server_type]["servers"]
cluster_config[server_type]["servers"].each do |server_name, server_config|
server_config ||= Hash.new
additional_fqdn = nil
case @config[server_type][:topology]
when 'standalone'
@config[server_type][:bootstrap_backend] = server_name if server_config["role"].nil?
@config[server_type][:fqdn] ||= @config[server_type][:bootstrap_backend]
when 'tier'
@config[server_type][:bootstrap_backend] = server_name if server_config["role"] == "backend" && server_config["bootstrap"] == true
if server_config["role"] == "frontend"
additional_fqdn = @config[server_type][:fqdn]
@config[server_type][:frontends] << server_name
end
end
@server_configs[server_name].merge!({
additional_fqdn: additional_fqdn
})
end
end
when "chef-backend"
@config[server_type][:fqdn] = cluster_config[server_type]["api_fqdn"]
@config[server_type][:users] = cluster_config[server_type]["users"]
@config[server_type][:users] ||= Array.new
@config[server_type][:orgs] = cluster_config[server_type]["orgs"]
@config[server_type][:orgs] ||= Hash.new
@config[server_type][:backends] = Array.new
@config[server_type][:frontends] = Array.new
servers = cluster_config[server_type]["servers"]
if servers
@config[server_type][:leader_backend] = servers.select { |s,sc| sc['role'] == 'backend' && sc['leader'] == true }.keys.first
@config[server_type][:bootstrap_frontend] = servers.select { |s,sc| sc['role'] == 'frontend' && sc['bootstrap'] == true }.keys.first
@config[server_type][:backends] << @config[server_type][:leader_backend]
@config[server_type][:frontends] << @config[server_type][:bootstrap_frontend]
servers.each do |server_name, server_config|
server_config ||= Hash.new
additional_fqdn = nil
case server_config["role"]
when "backend"
@config[server_type][:backends] << server_name unless server_name == @config[server_type][:leader_backend]
when "frontend"
additional_fqdn = @config[server_type][:fqdn]
@config[server_type][:frontends] << server_name unless server_name == @config[server_type][:bootstrap_frontend]
end
@server_configs[server_name].merge!({
additional_fqdn: additional_fqdn,
chef_server_type: 'chef-server'
})
end
end
when "chef-server"
@config[server_type][:topology] = cluster_config[server_type]["topology"]
@config[server_type][:topology] ||= 'standalone'
@config[server_type][:fqdn] = cluster_config[server_type]["api_fqdn"]
@config[server_type][:users] = cluster_config[server_type]["users"]
@config[server_type][:users] ||= Array.new
@config[server_type][:orgs] = cluster_config[server_type]["orgs"]
@config[server_type][:orgs] ||= Hash.new
@config[server_type][:frontends] = Array.new
if cluster_config[server_type]["servers"]
cluster_config[server_type]["servers"].each do |server_name, server_config|
server_config ||= Hash.new
additional_fqdn = nil
chef_server_type = 'private-chef' if @server_configs[server_name][:products].has_key?('private-chef')
chef_server_type = 'chef-server' if @server_configs[server_name][:products].has_key?('chef-server')
case @config[server_type][:topology]
when 'standalone'
@config[server_type][:bootstrap_backend] = server_name if server_config["role"].nil?
@config[server_type][:fqdn] ||= @config[server_type][:bootstrap_backend]
when 'tier'
@config[server_type][:bootstrap_backend] = server_name if server_config["role"] == "backend" && server_config["bootstrap"] == true
if server_config["role"] == "frontend"
additional_fqdn = @config[server_type][:fqdn]
@config[server_type][:frontends] << server_name
end
end
@server_configs[server_name].merge!({
additional_fqdn: additional_fqdn,
chef_server_type: chef_server_type
})
end
end
when "supermarket"
unless cluster_config[server_type]["servers"].first.nil?
(server_name, server_config) = cluster_config[server_type]["servers"].first
@config[server_type][:fqdn] = server_name
end
when "automate"
unless cluster_config[server_type]["servers"].first.nil?
(server_name, server_config) = cluster_config[server_type]["servers"].first
server_config ||= Hash.new
@server_configs[server_name].merge!({
license_path: server_config['license_path'],
chef_org: server_config['chef_org'],
enterprise_name: server_config['enterprise_name']
})
end
when "nodes"
chef_server_url = cluster_config[server_type]['chef_server_url']
validation_client_name = cluster_config[server_type]['validation_client_name']
validation_key = cluster_config[server_type]['validation_key']
if cluster_config[server_type]["servers"]
cluster_config[server_type]["servers"].each do |server_name, server_config|
server_config ||= Hash.new
chef_server_url = server_config['chef_server_url'] if server_config['chef_server_url']
validation_client_name = server_config['validation_client_name'] if server_config['validation_client_name']
validation_key = server_config['validation_key'] if server_config['validation_key']
@server_configs[server_name].merge!({
chef_server_url: chef_server_url,
validation_client_name: validation_client_name,
validation_key: validation_key
})
end
end
end
end
end
validate_cluster_config
end
def validate_cluster_config
base_container_names = Array.new
mounts = Array.new
ssh_keys = Array.new
hostnames = Array.new
@config.map { |server_type, config| hostnames.push(config[:fqdn]) }
@server_configs.each do |server_name, server_config|
base_container_names.push(server_config[:base_container_name]).uniq! if server_config[:base_container_name]
mounts.concat(server_config[:mounts]).uniq! if server_config[:mounts]
ssh_keys.concat(server_config[:ssh_keys]).uniq! if server_config[:ssh_keys]
hostnames.push(server_name)
case server_config[:server_type]
when "automate"
if server_config[:license_path] && !File.exists?(server_config[:license_path])
puts "ERROR: Automate license #{server_config[:license_path]} does not exist."
exit 1
end
when "nodes"
if server_config[:validation_key] && !File.exists?(server_config[:validation_key])
puts "ERROR: Validation key #{server_config[:validation_key]} does not exist."
exit 1
end
end
end
unless base_container_names.empty?
base_container_names.each do |base_container_name|
unless ::DevLXC::Container.new(base_container_name).defined?
puts "ERROR: Base container #{base_container_name} does not exist."
exit 1
end
end
end
unless hostnames.empty?
hostnames.each do |hostname|
unless hostname.end_with?(".lxc")
puts "ERROR: Hostname #{hostname} does not end with '.lxc'."
exit 1
end
end
end
unless mounts.empty?
mounts.each do |mount|
unless File.exists?(mount.split.first)
puts "ERROR: Mount source #{mount.split.first} does not exist."
exit 1
end
end
end
unless ssh_keys.empty?
ssh_keys.each do |ssh_key|
unless File.exists?(ssh_key)
puts "ERROR: SSH key #{ssh_key} does not exist."
exit 1
end
end
end
end
def show_config(include_products=false)
require 'pp'
calculate_required_products(get_sorted_servers, true) if include_products
puts "Cluster config:"
pp @config
puts
puts "Server configs:"
pp @server_configs
end
def get_server(server_name)
ipaddress = @server_configs[server_name][:ipaddress]
additional_fqdn = @server_configs[server_name][:additional_fqdn]
mounts = @server_configs[server_name][:mounts]
ssh_keys = @server_configs[server_name][:ssh_keys]
Server.new(server_name, ipaddress, additional_fqdn, mounts, ssh_keys)
end
def get_sorted_servers(server_name_regex=nil)
servers = Array.new
# the order of this list of server_types matters
# it determines the order in which actions are applied to each server_type
%w(chef-backend chef-server analytics compliance supermarket automate build-nodes nodes adhoc).each do |server_type|
unless @config[server_type].empty?
case server_type
when "chef-backend"
@config[server_type][:backends].each do |backend_name|
servers << get_server(backend_name)
end
@config[server_type][:frontends].each do |frontend_name|
servers << get_server(frontend_name)
end
when "analytics", "chef-server"
if @config[server_type][:bootstrap_backend]
server_name = @config[server_type][:bootstrap_backend]
servers << get_server(server_name)
end
@config[server_type][:frontends].each do |frontend_name|
servers << get_server(frontend_name)
end
end
end
if %w(adhoc automate build-nodes compliance nodes supermarket).include?(server_type)
server_configs = @server_configs.select { |server_name, server_config| server_config[:server_type] == server_type }
server_configs.each_key { |server_name| servers << get_server(server_name) }
end
end
servers.select { |s| s.name =~ /#{server_name_regex}/ }
end
def up(server_name_regex=nil)
abort_up = false
configured_servers = Array.new
servers = get_sorted_servers(server_name_regex)
servers.each do |server|
next if server.container.defined?
if (@config['chef-server'][:frontends] && @config['chef-server'][:frontends].include?(server.name)) || server.name == @config['analytics'][:bootstrap_backend]
if @config['chef-server'][:bootstrap_backend].nil?
puts "ERROR: '#{server.name}' requires a Chef Server bootstrap backend to be configured first."
abort_up = true
elsif !get_server(@config['chef-server'][:bootstrap_backend]).container.defined? && servers.select { |s| s.name == @config['chef-server'][:bootstrap_backend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-server'][:bootstrap_backend]}' to be configured first."
abort_up = true
end
end
if @config['chef-server'][:bootstrap_backend] && @server_configs[server.name][:server_type] == 'supermarket'
if !get_server(@config['chef-server'][:bootstrap_backend]).container.defined? && servers.select { |s| s.name == @config['chef-server'][:bootstrap_backend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-server'][:bootstrap_backend]}' to be configured first."
abort_up = true
end
end
if @config['analytics'][:frontends] && @config['analytics'][:frontends].include?(server.name)
if @config['analytics'][:bootstrap_backend].nil?
puts "ERROR: '#{server.name}' requires an Analytics Server bootstrap backend to be configured first."
abort_up = true
elsif !get_server(@config['analytics'][:bootstrap_backend]).container.defined? && servers.select { |s| s.name == @config['analytics'][:bootstrap_backend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['analytics'][:bootstrap_backend]}' to be configured first."
abort_up = true
end
end
if @config['chef-backend'][:frontends] && @config['chef-backend'][:frontends].include?(server.name) && server.name != @config['chef-backend'][:bootstrap_frontend]
if @config['chef-backend'][:bootstrap_frontend].nil?
puts "ERROR: '#{server.name}' requires a Chef Server bootstrap frontend to be configured first."
abort_up = true
elsif !get_server(@config['chef-backend'][:bootstrap_frontend]).container.defined? && servers.select { |s| s.name == @config['chef-backend'][:bootstrap_frontend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-backend'][:bootstrap_frontend]}' to be configured first."
abort_up = true
end
end
if server.name == @config['chef-backend'][:bootstrap_frontend]
if (@config['chef-backend'][:backends].select { |s| get_server(s).container.running? }.length + servers.select { |s| @config['chef-backend'][:backends].include?(s.name) }.length) < 2
puts "ERROR: '#{server.name}' requires at least two nodes in the backend cluster to be running first."
abort_up = true
end
end
if @config['chef-backend'][:backends] && @config['chef-backend'][:backends].include?(server.name) && server.name != @config['chef-backend'][:leader_backend]
if !get_server(@config['chef-backend'][:leader_backend]).container.running? && servers.select { |s| s.name == @config['chef-backend'][:leader_backend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-backend'][:leader_backend]}' to be running first."
abort_up = true
end
end
if @server_configs[server.name][:server_type] == 'nodes'
if @server_configs[server.name][:chef_server_url] && @server_configs[server.name][:validation_client_name] && @server_configs[server.name][:validation_key].nil?
if @config['chef-server'][:bootstrap_backend] && !get_server(@config['chef-server'][:bootstrap_backend]).container.defined? && servers.select { |s| s.name == @config['chef-server'][:bootstrap_backend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-server'][:bootstrap_backend]}' to be configured first."
abort_up = true
elsif @config['chef-backend'][:bootstrap_frontend] && !get_server(@config['chef-backend'][:bootstrap_frontend]).container.defined? && servers.select { |s| s.name == @config['chef-backend'][:bootstrap_frontend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-backend'][:bootstrap_frontend]}' to be configured first."
abort_up = true
end
end
end
if @server_configs[server.name][:server_type] == 'automate'
if @config['chef-server'][:bootstrap_backend].nil?
puts "ERROR: '#{server.name}' requires a Chef Server bootstrap backend to be configured first."
abort_up = true
elsif !get_server(@config['chef-server'][:bootstrap_backend]).container.defined? && servers.select { |s| s.name == @config['chef-server'][:bootstrap_backend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-server'][:bootstrap_backend]}' to be configured first."
abort_up = true
end
end
if @server_configs[server.name][:server_type] == 'build-nodes'
if @config['chef-server'][:bootstrap_backend].nil?
puts "ERROR: '#{server.name}' requires a Chef Server bootstrap backend to be configured first."
abort_up = true
elsif !get_server(@config['chef-server'][:bootstrap_backend]).container.running? && servers.select { |s| s.name == @config['chef-server'][:bootstrap_backend] }.empty?
puts "ERROR: '#{server.name}' requires '#{@config['chef-server'][:bootstrap_backend]}' to be running first."
abort_up = true
end
if @config['chef-server'][:topology] == 'tier'
if @config[server_type][:frontends].empty?
puts "ERROR: '#{server.name}' requires at least one Chef Server frontend to be configured first."
abort_up = true
elsif (@config['chef-server'][:frontends].select { |s| get_server(s).container.running? }.length + servers.select { |s| @config['chef-server'][:frontends].include?(s.name) }.length) < 1
puts "ERROR: '#{server.name}' requires at least one Chef Server frontend to be running first."
abort_up = true
end
end
automate_server_name = @server_configs.select {|name, config| config[:server_type] == 'automate'}.keys.first
if automate_server_name
if !get_server(automate_server_name).container.running? && servers.select { |s| s.name == automate_server_name }.empty?
puts "ERROR: '#{server.name}' requires '#{automate_server_name}' to be running first."
abort_up = true
end
else
puts "ERROR: '#{server.name}' requires an Automate Server to be configured first."
abort_up = true
end
end
end
exit 1 if abort_up
prep_product_cache(servers)
servers.each do |server|
clone_from_base_container(server) unless server.container.defined?
end
servers = get_sorted_servers(server_name_regex)
servers.each do |server|
if @server_configs[server.name][:server_type] == "build-nodes"
next if @server_configs[server.name][:required_products]["chefdk"] && @server_configs[server.name][:required_products].length == 1
end
install_products(server) unless @server_configs[server.name][:required_products].empty?
end
servers.each do |server|
if server.snapshot_list.select { |sn| sn[2].start_with?("dev-lxc build: completed") }.empty?
if server.name == @config["chef-backend"][:bootstrap_frontend]
running_backends = Array.new
@config["chef-backend"][:backends].reverse_each do |server_name|
backend = get_server(server_name)
if backend.container.defined? && backend.snapshot_list.select { |sn| sn[2].start_with?("dev-lxc build: backend cluster configured but frontend not bootstrapped") }.empty?
if backend.container.running?
running_backends << backend.name
backend.stop
end
backend.snapshot("dev-lxc build: backend cluster configured but frontend not bootstrapped")
snapshot = backend.snapshot_list.select { |sn| sn[2].start_with?("dev-lxc build: completed") }.first
backend.snapshot_destroy(snapshot.first) if snapshot
end
end
@config["chef-backend"][:backends].each do |server_name|
if running_backends.include?(server_name)
get_server(server_name).start
configured_servers << server_name unless configured_servers.include?(server_name)
end
end
end
configure_products(server)
configured_servers << server.name
end
server.start unless server.container.running?
end
configured_servers.reverse_each do |server_name|
server = get_server(server_name)
server.stop if server.container.running?
server.snapshot("dev-lxc build: completed")
end
configured_servers.each do |server_name|
server = get_server(server_name)
server.start if server.container.defined?
end
end
def clone_from_base_container(server)
server_type = @server_configs[server.name][:server_type]
base_container = DevLXC::Container.new(@server_configs[server.name][:base_container_name])
puts "Cloning base container '#{base_container.name}' into container '#{server.name}'"
base_container.clone(server.name, {:flags => LXC::LXC_CLONE_SNAPSHOT})
server.container.load_config
puts "Deleting SSH Server Host Keys"
FileUtils.rm_f(Dir.glob("#{server.container.config_item('lxc.rootfs')}/etc/ssh/ssh_host*_key*"))
puts "Adding lxc.hook.post-stop hook"
server.container.set_config_item("lxc.hook.post-stop", "/usr/local/share/lxc/hooks/post-stop-dhcp-release")
server.container.save_config
end
def get_product_url(server, product_name, product_options)
server_type = @server_configs[server.name][:server_type]
base_container = DevLXC::Container.new(@server_configs[server.name][:base_container_name])
mixlib_install_platform_detection_path = "#{base_container.config_item('lxc.rootfs')}/mixlib-install-platform-detection"
IO.write(mixlib_install_platform_detection_path, Mixlib::Install::Generator::Bourne.detect_platform_sh)
platform_results = `chroot #{base_container.config_item('lxc.rootfs')} bash mixlib-install-platform-detection`
File.unlink(mixlib_install_platform_detection_path)
if platform_results.empty?
puts "ERROR: Unable to detect the platform of container '#{base_container.name}'"
exit 1
end
(platform, platform_version, architecture) = platform_results.split
product_version = product_options['version'] if product_options
product_version ||= 'latest'
channel = product_options['channel'] if product_options
channel ||= 'stable'
channel = channel.to_sym
options = {
product_name: product_name,
product_version: product_version,
channel: channel,
platform: platform,
platform_version: platform_version,
architecture: architecture
}
artifact = Mixlib::Install.new(options).artifact_info
if artifact.class != Mixlib::Install::ArtifactInfo
puts "ERROR: Unable to find download URL for the following product"
puts JSON.pretty_generate(options)
exit 1
end
artifact.url
end
def calculate_required_products(servers, force=false)
all_required_products = Hash.new
servers.each do |server|
products = @server_configs[server.name][:products]
@server_configs[server.name][:required_products] = Hash.new
if !force && !server.snapshot_list.select { |sn| sn[2].start_with?("dev-lxc build: products installed") }.empty?
# Skipping product cache preparation for container because it has a 'products installed' snapshot
next
elsif !force && !server.snapshot_list.select { |sn| sn[2].start_with?("dev-lxc build: completed") }.empty?
# Skipping product cache preparation for container because it has a 'build: completed' snapshot
next
end
products.each do |product_name, product_options|
if product_options && product_options['package_source']
package_source = product_options['package_source']
all_required_products[package_source] = product_name
@server_configs[server.name][:required_products][product_name] = package_source
else
package_source = get_product_url(server, product_name, product_options)
all_required_products[package_source] = product_name
product_cache_path = "/var/dev-lxc/cache/chef-products/#{product_name}/#{File.basename(package_source)}"
@server_configs[server.name][:required_products][product_name] = product_cache_path
end
end
end
all_required_products
end
def prep_product_cache(servers, force=false)
all_required_products = calculate_required_products(servers, force)
all_required_products.each do |package_source, product_name|
if package_source.start_with?('http')
product_cache_path = "/var/dev-lxc/cache/chef-products/#{product_name}/#{File.basename(package_source)}"
if !File.exist?(product_cache_path)
FileUtils.mkdir_p(File.dirname(product_cache_path)) unless Dir.exist?(File.dirname(product_cache_path))
puts "Downloading #{package_source} to #{product_cache_path}"
open(package_source) { |url| File.open(product_cache_path, 'wb') { |f| f.write(url.read) } }
end
elsif !File.exist?(package_source)
puts "ERROR: Package source #{package_source} does not exist."
exit 1
end
end
end
def install_products(server)
if !server.snapshot_list.select { |sn| sn[2].start_with?("dev-lxc build: products installed") }.empty?
puts "Skipping product installation for container '#{server.name}' because it already has a 'products installed' snapshot"
return
elsif !server.snapshot_list.select { |sn| sn[2].start_with?("dev-lxc build: completed") }.empty?
puts "Skipping product installation for container '#{server.name}' because it already has a 'build: completed' snapshot"
return
end
if server.container.running?
server_was_running = true
else
server_was_running = false
server.start
end
@server_configs[server.name][:required_products].each do |product_name, package_source|
next if @server_configs[server.name][:server_type] == "build-nodes" && product_name == "chefdk"
server.install_package(package_source)
end
server.stop
server.snapshot("dev-lxc build: products installed")
server.start if server_was_running
end
def configure_products(server)
puts "Configuring container '#{server.name}'"
server.start unless server.container.running?
required_products = @server_configs[server.name][:required_products].keys if @server_configs[server.name][:required_products]
required_products ||= Array.new
server_type = @server_configs[server.name][:server_type]
dot_chef_path = "/root/chef-repo/.chef"
case server_type
when 'adhoc'
# Allow adhoc servers time to generate SSH Server Host Keys
sleep 5
when 'analytics'
configure_analytics(server) if required_products.include?('analytics')
when 'build-nodes'
sleep 5 # give time for DNS resolution to be available
configure_build_node(server)
when 'chef-backend'
configure_chef_backend(server) if required_products.include?('chef-backend')
if required_products.include?('chef-server')
configure_chef_frontend(server)
if server.name == @config['chef-backend'][:bootstrap_frontend]
create_users_orgs_knife_configs(server, dot_chef_path)
end
end
configure_manage(server) if required_products.include?('manage')
when 'chef-server'
if required_products.include?('chef-server') || required_products.include?('private-chef')
configure_chef_server(server)
if server.name == @config['chef-server'][:bootstrap_backend]
create_users_orgs_knife_configs(server, dot_chef_path)
automate_server_name = @server_configs.select {|name, config| config[:server_type] == 'automate'}.keys.first
if automate_server_name
automate_user = "delivery"
automate_chef_org = @server_configs[automate_server_name][:chef_org]
create_user(server, automate_user, dot_chef_path)
create_org(server, automate_chef_org, dot_chef_path)
org_add_user(server, automate_chef_org, automate_user, true, dot_chef_path)
end
end
end
configure_reporting(server) if required_products.include?('reporting')
configure_push_jobs_server(server) if required_products.include?('push-jobs-server')
configure_manage(server) if required_products.include?('manage')
when 'compliance'
configure_compliance(server) if required_products.include?('compliance')
when 'automate'
configure_automate(server) if required_products.include?('delivery')
when 'nodes'
# Allow servers time to generate SSH Server Host Keys
sleep 5
configure_chef_client(server, dot_chef_path) if required_products.include?('chef') || required_products.include?('chefdk')
when 'supermarket'
configure_supermarket(server) if required_products.include?('supermarket')
end
end
def configure_automate(server)
license_path = @server_configs[server.name][:license_path]
chef_org = @server_configs[server.name][:chef_org]
enterprise_name = @server_configs[server.name][:enterprise_name]
chef_server_url = @config['chef-server'][:fqdn]
supermarket_fqdn = @config['supermarket'][:fqdn]
FileUtils.cp(license_path, "#{server.container.config_item('lxc.rootfs')}/root/automate.license")
chef_server = get_server(@config['chef-server'][:bootstrap_backend])
automate_chef_user_key = "#{chef_server.container.config_item('lxc.rootfs')}/root/chef-repo/.chef/delivery.pem"
FileUtils.cp(automate_chef_user_key, "#{server.container.config_item('lxc.rootfs')}/root/automate_chef_user_key.pem")
setup_cmd = "setup"
setup_cmd += " --license /root/automate.license"
setup_cmd += " --fqdn #{server.name}"
setup_cmd += " --key /root/automate_chef_user_key.pem"
setup_cmd += " --server-url https://#{chef_server_url}/organizations/#{chef_org}"
setup_cmd += " --supermarket-fqdn #{supermarket_fqdn}" if supermarket_fqdn
setup_cmd += " --enterprise #{enterprise_name}"
setup_cmd += " --no-build-node"
setup_cmd += " --configure"
run_ctl(server, "delivery", setup_cmd)
end
def print_automate_credentials
automate_server_name = @server_configs.select {|name, config| config[:server_type] == 'automate'}.keys.first
if automate_server_name
automate_server = get_server(automate_server_name)
automate_credentials_files = Dir.glob("#{automate_server.container.config_item('lxc.rootfs')}/etc/delivery/*-credentials")
automate_credentials_files.each_with_index do |automate_credentials_file, index|
puts IO.read(automate_credentials_file)
puts if index + 1 < automate_credentials_files.length
end
else
puts "WARNING: An Automate server is not defined."
exit 1
end
end
def configure_build_node(server)
automate_server_name = @server_configs.select {|name, config| config[:server_type] == 'automate'}.keys.first
if automate_server_name
automate_server = get_server(automate_server_name)
install_build_node_cmd = "install-build-node"
install_build_node_cmd += " --fqdn #{server.name}"
install_build_node_cmd += " --username dev-lxc"
install_build_node_cmd += " --password dev-lxc"
install_build_node_cmd += " --installer #{@server_configs[server.name][:required_products]["chefdk"]}"
install_build_node_cmd += " --overwrite-registration"
run_ctl(automate_server, "delivery", install_build_node_cmd)
end
end
def configure_chef_client(server, dot_chef_path)
puts "Configuring Chef Client in container '#{server.name}'"
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/etc/chef")
chef_server_url = @server_configs[server.name][:chef_server_url]
validation_client_name = @server_configs[server.name][:validation_client_name]
validation_key = @server_configs[server.name][:validation_key]
if validation_key.nil?
chef_server_name = @config['chef-server'][:bootstrap_backend]
chef_server_name ||= @config['chef-backend'][:bootstrap_frontend]
if chef_server_name
chef_server = get_server(chef_server_name)
if chef_server.container.defined?
validator_pem_files = Dir.glob("#{chef_server.container.config_item('lxc.rootfs')}#{dot_chef_path}/*-validator.pem")
FileUtils.cp(validator_pem_files, "#{server.container.config_item('lxc.rootfs')}/etc/chef/") unless validator_pem_files.empty?
end
end
end
client_rb = %Q(chef_server_url '#{chef_server_url}'
validation_client_name '#{validation_client_name}'
validation_key '/etc/chef/#{validation_client_name}.pem'
ssl_verify_mode :verify_none
)
automate_server_name = @server_configs.select {|name, config| config[:server_type] == 'automate'}.keys.first
if automate_server_name
client_rb += %Q(
data_collector.server_url "https://#{automate_server_name}/data-collector/v0/"
data_collector.token "93a49a4f2482c64126f7b6015e6b0f30284287ee4054ff8807fb63d9cbd1c506"
)
end
IO.write("#{server.container.config_item('lxc.rootfs')}/etc/chef/client.rb", client_rb)
end
def configure_chef_backend(server)
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/var/opt/chef-backend")
FileUtils.touch("#{server.container.config_item('lxc.rootfs')}/var/opt/chef-backend/.license.accepted")
if server.name == @config['chef-backend'][:leader_backend]
puts "Creating /etc/chef-backend/chef-backend.rb"
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/etc/chef-backend")
chef_backend_config = "publish_address '#{@server_configs[server.name][:ipaddress]}'\n"
IO.write("#{server.container.config_item('lxc.rootfs')}/etc/chef-backend/chef-backend.rb", chef_backend_config)
run_ctl(server, "chef-backend", "bootstrap --yes")
else
puts "Joining #{server.name} to the chef-backend cluster"
leader_backend = get_server(@config['chef-backend'][:leader_backend])
FileUtils.cp("#{leader_backend.container.config_item('lxc.rootfs')}/etc/chef-backend/chef-backend-secrets.json",
"#{server.container.config_item('lxc.rootfs')}/root/")
run_ctl(server, "chef-backend", "join-cluster #{@server_configs[leader_backend.name][:ipaddress]} -p #{@server_configs[server.name][:ipaddress]} -s /root/chef-backend-secrets.json --yes")
end
end
def configure_chef_frontend(server)
puts "Creating /etc/opscode/chef-server.rb"
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/etc/opscode")
leader_backend = get_server(@config['chef-backend'][:leader_backend])
run_ctl(leader_backend, "chef-backend", "gen-server-config #{server.name} --filename /tmp/#{server.name}.rb")
FileUtils.cp("#{leader_backend.container.config_item('lxc.rootfs')}/tmp/#{server.name}.rb",
"#{server.container.config_item('lxc.rootfs')}/etc/opscode/chef-server.rb")
unless server.name == @config['chef-backend'][:bootstrap_frontend]
bootstrap_frontend = get_server(@config['chef-backend'][:bootstrap_frontend])
puts "Copying /etc/opscode/private-chef-secrets.json from bootstrap frontend '#{bootstrap_frontend.name}'"
FileUtils.cp("#{bootstrap_frontend.container.config_item('lxc.rootfs')}/etc/opscode/private-chef-secrets.json",
"#{server.container.config_item('lxc.rootfs')}/etc/opscode/")
puts "Copying /etc/opscode/pivotal.pem from bootstrap frontend '#{bootstrap_frontend.name}'"
FileUtils.cp("#{bootstrap_frontend.container.config_item('lxc.rootfs')}/etc/opscode/pivotal.pem",
"#{server.container.config_item('lxc.rootfs')}/etc/opscode/")
end
run_ctl(server, "chef-server", "reconfigure")
end
def configure_chef_server(server)
if @config['chef-server'][:topology] == "standalone" || @config['chef-server'][:bootstrap_backend] == server.name
case @server_configs[server.name][:chef_server_type]
when 'private-chef'
puts "Creating /etc/opscode/private-chef.rb"
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/etc/opscode")
IO.write("#{server.container.config_item('lxc.rootfs')}/etc/opscode/private-chef.rb", chef_server_config)
when 'chef-server'
puts "Creating /etc/opscode/chef-server.rb"
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/etc/opscode")
IO.write("#{server.container.config_item('lxc.rootfs')}/etc/opscode/chef-server.rb", chef_server_config)
end
elsif @config['chef-server'][:frontends].include?(server.name)
puts "Copying /etc/opscode from bootstrap backend '#{@config['chef-server'][:bootstrap_backend]}'"
FileUtils.cp_r("#{get_server(@config['chef-server'][:bootstrap_backend]).container.config_item('lxc.rootfs')}/etc/opscode",
"#{server.container.config_item('lxc.rootfs')}/etc", preserve: true)
end
run_ctl(server, @server_configs[server.name][:chef_server_type], "reconfigure")
end
def configure_reporting(server)
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/var/opt/opscode-reporting")
FileUtils.touch("#{server.container.config_item('lxc.rootfs')}/var/opt/opscode-reporting/.license.accepted")
if @config['chef-server'][:frontends].include?(server.name)
puts "Copying /etc/opscode-reporting from bootstrap backend '#{@config['chef-server'][:bootstrap_backend]}'"
FileUtils.cp_r("#{get_server(@config['chef-server'][:bootstrap_backend]).container.config_item('lxc.rootfs')}/etc/opscode-reporting",
"#{server.container.config_item('lxc.rootfs')}/etc", preserve: true)
end
run_ctl(server, @server_configs[server.name][:chef_server_type], "reconfigure")
run_ctl(server, "opscode-reporting", "reconfigure")
end
def configure_push_jobs_server(server)
run_ctl(server, "opscode-push-jobs-server", "reconfigure")
run_ctl(server, @server_configs[server.name][:chef_server_type], "reconfigure")
end
def configure_manage(server)
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/var/opt/chef-manage")
FileUtils.touch("#{server.container.config_item('lxc.rootfs')}/var/opt/chef-manage/.license.accepted")
if @server_configs[server.name][:chef_server_type] == 'private-chef'
puts "Disabling old opscode-webui in /etc/opscode/private-chef.rb"
DevLXC.search_file_delete_line("#{server.container.config_item('lxc.rootfs')}/etc/opscode/private-chef.rb", /opscode_webui[.enable.]/)
DevLXC.append_line_to_file("#{server.container.config_item('lxc.rootfs')}/etc/opscode/private-chef.rb", "\nopscode_webui['enable'] = false\n")
run_ctl(server, @server_configs[server.name][:chef_server_type], "reconfigure")
end
run_ctl(server, "opscode-manage", "reconfigure")
end
def configure_analytics(server)
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/var/opt/opscode-analytics")
FileUtils.touch("#{server.container.config_item('lxc.rootfs')}/var/opt/opscode-analytics/.license.accepted")
if @config['analytics'][:topology] == "standalone" || @config['analytics'][:bootstrap_backend] == server.name
puts "Copying /etc/opscode-analytics from Chef Server bootstrap backend '#{@config['chef-server'][:bootstrap_backend]}'"
FileUtils.cp_r("#{get_server(@config['chef-server'][:bootstrap_backend]).container.config_item('lxc.rootfs')}/etc/opscode-analytics",
"#{server.container.config_item('lxc.rootfs')}/etc", preserve: true)
IO.write("#{server.container.config_item('lxc.rootfs')}/etc/opscode-analytics/opscode-analytics.rb", analytics_config)
elsif @config['analytics'][:frontends].include?(server.name)
puts "Copying /etc/opscode-analytics from Analytics bootstrap backend '#{@config['analytics'][:bootstrap_backend]}'"
FileUtils.cp_r("#{get_server(@config['analytics'][:bootstrap_backend]).container.config_item('lxc.rootfs')}/etc/opscode-analytics",
"#{server.container.config_item('lxc.rootfs')}/etc", preserve: true)
end
run_ctl(server, "opscode-analytics", "reconfigure")
end
def configure_compliance(server)
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/var/opt/chef-compliance")
FileUtils.touch("#{server.container.config_item('lxc.rootfs')}/var/opt/chef-compliance/.license.accepted")
run_ctl(server, "chef-compliance", "reconfigure")
end
def configure_supermarket(server)
if @config['chef-server'][:bootstrap_backend] && get_server(@config['chef-server'][:bootstrap_backend]).container.defined?
chef_server_supermarket_config = JSON.parse(IO.read("#{get_server(@config['chef-server'][:bootstrap_backend]).container.config_item('lxc.rootfs')}/etc/opscode/oc-id-applications/supermarket.json"))
supermarket_config = {
'chef_server_url' => "https://#{@config['chef-server'][:fqdn]}/",
'chef_oauth2_app_id' => chef_server_supermarket_config['uid'],
'chef_oauth2_secret' => chef_server_supermarket_config['secret'],
'chef_oauth2_verify_ssl' => false
}
FileUtils.mkdir_p("#{server.container.config_item('lxc.rootfs')}/etc/supermarket")
IO.write("#{server.container.config_item('lxc.rootfs')}/etc/supermarket/supermarket.json", JSON.pretty_generate(supermarket_config))
end
run_ctl(server, "supermarket", "reconfigure")
end
def run_ctl(server, component, subcommand)
server.run_command("#{component}-ctl #{subcommand}")
end
def create_users_orgs_knife_configs(server, dot_chef_path)
server_type = @server_configs[server.name][:server_type]
# give time for all services to come up completely
sleep 10
if @server_configs[server.name][:chef_server_type] == 'private-chef'
# give more time for all services to come up completely
sleep 50
server.run_command("/opt/opscode/embedded/bin/gem install knife-opc --no-ri --no-rdoc -v 0.3.1")
end
chef_server_dot_chef_path = "#{server.container.config_item('lxc.rootfs')}#{dot_chef_path}"
FileUtils.mkdir_p(chef_server_dot_chef_path)
FileUtils.cp( "#{server.container.config_item('lxc.rootfs')}/etc/opscode/pivotal.pem", chef_server_dot_chef_path )
create_pivotal_knife_config('127.0.0.1', chef_server_dot_chef_path)
create_knife_config('127.0.0.1', chef_server_dot_chef_path)
@config[server_type][:users].each do |username|
create_user(server, username, dot_chef_path)
end
@config[server_type][:orgs].each do |orgname, org_users|
create_org(server, orgname, dot_chef_path)
if org_users
if org_users['admins']
org_users['admins'].each do |username|
org_add_user(server, orgname, username, true, dot_chef_path)
end
end
if org_users['non-admins']
org_users['non-admins'].each do |username|
org_add_user(server, orgname, username, false, dot_chef_path)
end
end
end
end
end
def create_pivotal_knife_config(fqdn, dot_chef_path)
pivotal_rb = %Q(
current_dir = File.dirname(__FILE__)
chef_server_root "https://#{fqdn}"
chef_server_url "https://#{fqdn}"
node_name "pivotal"
client_key "\#{current_dir}/pivotal.pem"
cookbook_path Dir.pwd + "/cookbooks"
knife[:chef_repo_path] = Dir.pwd
ssl_verify_mode :verify_none
)
IO.write("#{dot_chef_path}/pivotal.rb", pivotal_rb)
end
def create_knife_config(fqdn, dot_chef_path)
knife_rb = %Q(
username = "CHANGEME"
orgname = "CHANGEME"
if [username, orgname].include?("CHANGEME")
puts "ERROR: Please set 'username' and 'orgname' to proper values in knife.rb"
exit!
end
current_dir = File.dirname(__FILE__)
chef_server_url "https://#{fqdn}/organizations/\#{orgname}"
node_name username
client_key "\#{current_dir}/\#{username}.pem"
validation_client_name "\#{orgname}-validator"
validation_key "\#{current_dir}/\#{orgname}-validator.pem"
cookbook_path Dir.pwd + "/cookbooks"
knife[:chef_repo_path] = Dir.pwd
ssl_verify_mode :verify_none
)
IO.write("#{dot_chef_path}/knife.rb", knife_rb)
end
def create_user(server, username, dot_chef_path)
create_user_string = "#{username} #{username} #{username} #{username}@noreply.com #{username} --filename #{dot_chef_path}/#{username}.pem"
case @server_configs[server.name][:chef_server_type]
when 'private-chef'
server.run_command("/opt/opscode/embedded/bin/knife opc user create #{create_user_string} -c #{dot_chef_path}/pivotal.rb")
when 'chef-server'
run_ctl(server, "chef-server", "user-create #{create_user_string}")
end
end
def create_org(server, orgname, dot_chef_path)
create_org_string = "#{orgname} #{orgname} --filename #{dot_chef_path}/#{orgname}-validator.pem"
case @server_configs[server.name][:chef_server_type]
when 'private-chef'
server.run_command("/opt/opscode/embedded/bin/knife opc org create #{create_org_string} -c #{dot_chef_path}/pivotal.rb")
when 'chef-server'
run_ctl(server, "chef-server", "org-create #{create_org_string}")
end
end
def org_add_user(server, orgname, username, admin, dot_chef_path)
org_add_user_string = "#{orgname} #{username}"
org_add_user_string += " --admin" if admin
case @server_configs[server.name][:chef_server_type]
when 'private-chef'
server.run_command("/opt/opscode/embedded/bin/knife opc org user add #{org_add_user_string} -c #{dot_chef_path}/pivotal.rb")
when 'chef-server'
run_ctl(server, "chef-server", "org-user-add #{org_add_user_string}")
end
end
def chef_repo(force=false, pivotal=false)
chef_server_dot_chef_path = "/root/chef-repo/.chef"
dot_chef_path = "./chef-repo/.chef"
if @config['chef-server'][:bootstrap_backend]
chef_server = get_server(@config['chef-server'][:bootstrap_backend])
chef_server_fqdn = @config['chef-server'][:fqdn]
elsif @config['chef-backend'][:bootstrap_frontend]
chef_server = get_server(@config['chef-backend'][:bootstrap_frontend])
chef_server_fqdn = @config['chef-backend'][:fqdn]
else
puts "ERROR: A Chef Server is not defined in the cluster's config. Please define it first."
exit 1
end
unless chef_server.container.defined?
puts "ERROR: The '#{chef_server.name}' Chef Server does not exist."
exit 1
end
puts "Creating chef-repo with pem files and knife.rb in the current directory"
FileUtils.mkdir_p(dot_chef_path)
pem_files = Dir.glob("#{chef_server.container.config_item('lxc.rootfs')}#{chef_server_dot_chef_path}/*.pem")
pem_files.delete_if { |pem_file| pem_file.end_with?("/pivotal.pem") } unless pivotal
FileUtils.cp(pem_files, dot_chef_path) unless pem_files.empty?
if pivotal
if File.exists?("#{dot_chef_path}/pivotal.rb") && ! force
puts "Skipping pivotal.rb because it already exists in `#{dot_chef_path}`"
else
create_pivotal_knife_config(chef_server_fqdn, dot_chef_path)
end
end
if File.exists?("./chef-repo/.chef/knife.rb") && ! force
puts "Skipping knife.rb because it already exists in `#{dot_chef_path}`"
else
create_knife_config(chef_server_fqdn, dot_chef_path)
end
end
def chef_server_config
chef_server_config = %Q(api_fqdn "#{@config['chef-server'][:fqdn]}"\n)
if @config['chef-server'][:topology] == 'tier'
chef_server_config += %Q(
topology "#{@config['chef-server'][:topology]}"
server "#{@config['chef-server'][:bootstrap_backend]}",
:ipaddress => "#{@server_configs[@config['chef-server'][:bootstrap_backend]][:ipaddress]}",
:role => "backend",
:bootstrap => true
backend_vip "#{@config['chef-server'][:bootstrap_backend]}",
:ipaddress => "#{@server_configs[@config['chef-server'][:bootstrap_backend]][:ipaddress]}"
)
@config['chef-server'][:frontends].each do |frontend_name|
chef_server_config += %Q(
server "#{frontend_name}",
:ipaddress => "#{@server_configs[frontend_name][:ipaddress]}",
:role => "frontend"
)
end
end
if @config['analytics'][:fqdn]
chef_server_config += %Q(
oc_id['applications'] ||= {}
oc_id['applications']['analytics'] = {
'redirect_uri' => 'https://#{@config['analytics'][:fqdn]}/'
}
rabbitmq['vip'] = '#{@config['chef-server'][:bootstrap_backend]}'
rabbitmq['node_ip_address'] = '0.0.0.0'
)
end
if @config['supermarket'][:fqdn]
chef_server_config += %Q(
oc_id['applications'] ||= {}
oc_id['applications']['supermarket'] = {
'redirect_uri' => 'https://#{@config['supermarket'][:fqdn]}/auth/chef_oauth2/callback'
}
)
end
automate_server_name = @server_configs.select {|name, config| config[:server_type] == 'automate'}.keys.first
if automate_server_name
chef_server_config += %Q(
data_collector['root_url'] = "https://#{automate_server_name}/data-collector/v0/"
data_collector['token'] = "93a49a4f2482c64126f7b6015e6b0f30284287ee4054ff8807fb63d9cbd1c506"
)
end
return chef_server_config
end
def analytics_config
analytics_config = %Q(analytics_fqdn "#{@config['analytics'][:fqdn]}"
topology "#{@config['analytics'][:topology]}"
)
if @config['analytics'][:topology] == 'tier'
analytics_config += %Q(
server "#{@config['analytics'][:bootstrap_backend]}",
:ipaddress => "#{@server_configs[@config['analytics'][:bootstrap_backend]][:ipaddress]}",
:role => "backend",
:bootstrap => true
backend_vip "#{@config['analytics'][:bootstrap_backend]}",
:ipaddress => "#{@server_configs[@config['analytics'][:bootstrap_backend]][:ipaddress]}"
)
@config['analytics'][:frontends].each do |frontend_name|
analytics_config += %Q(
server "#{frontend_name}",
:ipaddress => "#{@server_configs[frontend_name][:ipaddress]}",
:role => "frontend"
)
end
end
return analytics_config
end
end
end
|
spec: add formatter for example count
Change-Id: I4ee4e9346f7fa874b0203152b09b1b72988fff31
Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/282875
Reviewed-by: Aaron Ogata <11ccf682c06d1508642f943a3962a1c3d00b8e44@instructure.com>
Tested-by: Service Cloud Jenkins <9144042a601061f88f1e1d7a1753ea3e2972119d@instructure.com>
QA-Review: James Butters <4c5a489c9bb72d41f20b3b22aaf4c9b864e32f20@instructure.com>
Product-Review: James Butters <4c5a489c9bb72d41f20b3b22aaf4c9b864e32f20@instructure.com>
# frozen_string_literal: true
#
# Copyright (C) 2022 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
class ExampleCountRecorder
RSpec::Core::Formatters.register self, :dump_summary
def initialize(output)
@output = output
end
def dump_summary(output)
@output << output.examples.count
end
end
|
require File.expand_path('../spec_helper', File.dirname(__FILE__))
include Kaminari::ActionViewExtension
describe 'Kaminari::ActionViewExtension' do
describe '#paginate' do
before do
@author = User.create! :name => 'author'
@books = 2.times.map { @author.books_authored.create! }
@books = Book.page(1)
end
subject { paginate( @books ) }
it { should be_a(String) }
context "escaping the pagination for javascript" do
it "should escape for javascript" do
lambda { escape_javascript( paginate( @books ) ) }.should_not raise_error
end
end
end
end
use helper instead of polluting Object
require File.expand_path('../spec_helper', File.dirname(__FILE__))
describe 'Kaminari::ActionViewExtension' do
describe '#paginate' do
before do
50.times {|i| User.create! :name => "user#{i}"}
@users = User.page(1)
end
subject { helper.paginate @users, :params => {:controller => 'users', :action => 'index'} }
it { should be_a String }
context 'escaping the pagination for javascript' do
it 'should escape for javascript' do
lambda { escape_javascript(helper.paginate @users, :params => {:controller => 'users', :action => 'index'}) }.should_not raise_error
end
end
end
end
|
require 'spec_helper'
describe DataMapper::Is::CounterCacheable do
before(:all) do
User.create(
:name => 'bob',
:posts => [
{:title => 'Hello', :body => 'Hello there.'}
]
)
end
before(:each) do
@user = User.first
@post = @user.posts.first
end
it "should define the default counter cache property" do
Post.properties.should be_named('comments_counter')
end
it "should allow defining custom named counter cache properties" do
User.properties.should be_named('post_comments_counter')
end
it "should optionally define a counter index column" do
Comment.properties.should be_named('users_index')
end
it "should have a counter cache of 0 by default" do
@post.comments_counter.should == 0
end
it "should increment the counter cache by 1 when a new resource is created" do
orig_counter = @post.comments_counter
@post.comments.create(
:body => 'lol',
:user => @user
)
new_counter = @post.comments_counter
(new_counter - orig_counter).should == 1
end
it "should increment the counter cache by 1 when a new resource is created" do
@post.comments.create(
:body => 'lol',
:user => @user
)
@post.comments.last.users_index.should == 1
end
it "should increment the counter cache by 1 when a new resource is saved" do
orig_counter = @post.comments_counter
@post.comments.new(
:body => 'omg',
:user => @user
).save
new_counter = @post.comments_counter
(new_counter - orig_counter).should == 1
end
it "should decrement the counter cache by 1 when a resource is destroyed" do
@post.comments.create(
:body => 'wtf',
:user => @user
)
orig_counter = @post.comments_counter
@post.comments.first.destroy
new_counter = @post.comments_counter
(new_counter - orig_counter).should == -1
end
end
Updated the specs.
require 'spec_helper'
describe DataMapper::Is::CounterCacheable do
before(:all) do
User.create(
:name => 'bob',
:posts => [
{:title => 'Hello', :body => 'Hello there.'}
]
)
end
before(:each) do
@user = User.first
@post = @user.posts.first
end
it "should define the default counter cache property" do
Post.properties.should be_named('comments_counter')
end
it "should allow defining custom named counter cache properties" do
User.properties.should be_named('post_comments_counter')
end
it "should optionally define a counter index column" do
Comment.properties.should be_named('user_comments_index')
end
it "should have a counter cache of 0 by default" do
@post.comments_counter.should == 0
end
it "should increment the counter cache by 1 when a new resource is created" do
orig_counter = @post.comments_counter
@post.comments.create(
:body => 'lol',
:user => @user
)
new_counter = @post.comments_counter
(new_counter - orig_counter).should == 1
end
it "should increment the counter cache by 1 when a new resource is saved" do
orig_counter = @post.comments_counter
@post.comments.new(
:body => 'omg',
:user => @user
).save
new_counter = @post.comments_counter
(new_counter - orig_counter).should == 1
end
it "should set the counter index to the counter value when a new resource is created" do
@post.comments.create(
:body => 'lol',
:user => @user
)
@post.comments.last.user_comments_index.should == @user.post_comments_counter
end
it "should decrement the counter cache by 1 when a resource is destroyed" do
@post.comments.create(
:body => 'wtf',
:user => @user
)
orig_counter = @post.comments_counter
@post.comments.first.destroy
new_counter = @post.comments_counter
(new_counter - orig_counter).should == -1
end
end
|
require 'spec_helper'
describe SubscriptionConfirmJob do
let(:job) { SubscriptionConfirmJob.new }
describe "finding proxy_orders that are ready to be confirmed" do
let(:shop) { create(:distributor_enterprise) }
let(:order_cycle1) { create(:simple_order_cycle, coordinator: shop, orders_close_at: 59.minutes.ago, updated_at: 1.day.ago) }
let(:order_cycle2) { create(:simple_order_cycle, coordinator: shop, orders_close_at: 61.minutes.ago, updated_at: 1.day.ago) }
let(:schedule) { create(:schedule, order_cycles: [order_cycle1, order_cycle2]) }
let(:subscription) { create(:subscription, shop: shop, schedule: schedule) }
let!(:proxy_order) { create(:proxy_order, subscription: subscription, order_cycle: order_cycle1, placed_at: 5.minutes.ago, order: create(:order, completed_at: 1.minute.ago)) }
let(:proxy_orders) { job.send(:proxy_orders) }
it "returns proxy orders that meet all of the criteria" do
expect(proxy_orders).to include proxy_order
end
it "returns proxy orders for paused subscriptions" do
subscription.update_attributes!(paused_at: 1.minute.ago)
expect(proxy_orders).to include proxy_order
end
it "returns proxy orders for cancelled subscriptions" do
subscription.update_attributes!(canceled_at: 1.minute.ago)
expect(proxy_orders).to include proxy_order
end
it "ignores proxy orders where the OC closed more than 1 hour ago" do
proxy_order.update_attributes!(order_cycle_id: order_cycle2.id)
expect(proxy_orders).to_not include proxy_order
end
it "ignores cancelled proxy orders" do
proxy_order.update_attributes!(canceled_at: 5.minutes.ago)
expect(proxy_orders).to_not include proxy_order
end
it "ignores proxy orders without a completed order" do
proxy_order.order.completed_at = nil
proxy_order.order.save!
expect(proxy_orders).to_not include proxy_order
end
it "ignores proxy orders without an associated order" do
proxy_order.update_attributes!(order_id: nil)
expect(proxy_orders).to_not include proxy_order
end
it "ignores proxy orders that haven't been placed yet" do
proxy_order.update_attributes!(placed_at: nil)
expect(proxy_orders).to_not include proxy_order
end
it "ignores proxy orders that have already been confirmed" do
proxy_order.update_attributes!(confirmed_at: 1.second.ago)
expect(proxy_orders).to_not include proxy_order
end
end
describe "performing the job" do
context "when unconfirmed proxy_orders exist" do
let!(:proxy_order) { create(:proxy_order) }
before do
proxy_order.initialise_order!
allow(job).to receive(:proxy_orders) { ProxyOrder.where(id: proxy_order.id) }
allow(job).to receive(:process!)
allow(job).to receive(:send_confirmation_summary_emails)
end
it "marks confirmable proxy_orders as processed by setting confirmed_at" do
expect{ job.perform }.to change{ proxy_order.reload.confirmed_at }
expect(proxy_order.confirmed_at).to be_within(5.seconds).of Time.zone.now
end
it "processes confirmable proxy_orders" do
job.perform
expect(job).to have_received(:process!)
expect(job.instance_variable_get(:@order)).to eq proxy_order.reload.order
end
it "sends a summary email" do
job.perform
expect(job).to have_received(:send_confirmation_summary_emails)
end
end
end
describe "finding recently closed order cycles" do
let!(:order_cycle1) { create(:simple_order_cycle, orders_close_at: 61.minutes.ago, updated_at: 61.minutes.ago) }
let!(:order_cycle2) { create(:simple_order_cycle, orders_close_at: nil, updated_at: 59.minutes.ago) }
let!(:order_cycle3) { create(:simple_order_cycle, orders_close_at: 61.minutes.ago, updated_at: 59.minutes.ago) }
let!(:order_cycle4) { create(:simple_order_cycle, orders_close_at: 59.minutes.ago, updated_at: 61.minutes.ago) }
let!(:order_cycle5) { create(:simple_order_cycle, orders_close_at: 1.minute.from_now) }
it "returns closed order cycles whose orders_close_at or updated_at date is within the last hour" do
order_cycles = job.send(:recently_closed_order_cycles)
expect(order_cycles).to include order_cycle3, order_cycle4
expect(order_cycles).to_not include order_cycle1, order_cycle2, order_cycle5
end
end
describe "processing an order" do
let(:shop) { create(:distributor_enterprise) }
let(:order_cycle1) { create(:simple_order_cycle, coordinator: shop) }
let(:order_cycle2) { create(:simple_order_cycle, coordinator: shop) }
let(:schedule1) { create(:schedule, order_cycles: [order_cycle1, order_cycle2]) }
let(:subscription1) { create(:subscription, shop: shop, schedule: schedule1, with_items: true) }
let(:proxy_order) { create(:proxy_order, subscription: subscription1) }
let(:order) { proxy_order.initialise_order! }
before do
while !order.completed? do break unless order.next! end
allow(job).to receive(:send_confirm_email).and_call_original
job.instance_variable_set(:@order, order)
Spree::MailMethod.create!(
environment: Rails.env,
preferred_mails_from: 'spree@example.com'
)
expect(job).to receive(:record_order).with(order)
end
context "when payments need to be processed" do
let(:payment_method) { create(:payment_method) }
let(:payment) { double(:payment, amount: 10) }
before do
allow(order).to receive(:payment_total) { 0 }
allow(order).to receive(:total) { 10 }
allow(order).to receive(:pending_payments) { [payment] }
end
context "and an error is added to the order when updating payments" do
before { expect(job).to receive(:update_payment!) { order.errors.add(:base, "a payment error") } }
it "sends a failed payment email" do
expect(job).to receive(:send_failed_payment_email)
expect(job).to_not receive(:send_confirm_email)
job.send(:process!)
end
end
context "and no errors are added when updating payments" do
before { expect(job).to receive(:update_payment!) { true } }
context "when an error occurs while processing the payment" do
before do
expect(payment).to receive(:process!).and_raise Spree::Core::GatewayError, "payment failure error"
end
it "sends a failed payment email" do
expect(job).to receive(:send_failed_payment_email)
expect(job).to_not receive(:send_confirm_email)
job.send(:process!)
end
end
context "when payments are processed without error" do
before do
expect(payment).to receive(:process!) { true }
expect(payment).to receive(:completed?) { true }
end
it "sends only a subscription confirm email, no regular confirmation emails" do
ActionMailer::Base.deliveries.clear
expect{ job.send(:process!) }.to_not enqueue_job ConfirmOrderJob
expect(job).to have_received(:send_confirm_email).once
expect(ActionMailer::Base.deliveries.count).to be 1
end
end
end
end
end
describe "#send_confirm_email" do
let(:order) { instance_double(Spree::Order) }
let(:mail_mock) { double(:mailer_mock, deliver: true) }
before do
job.instance_variable_set(:@order, order)
allow(SubscriptionMailer).to receive(:confirmation_email) { mail_mock }
end
it "records a success and sends the email" do
expect(order).to receive(:update!)
expect(job).to receive(:record_success).with(order).once
job.send(:send_confirm_email)
expect(SubscriptionMailer).to have_received(:confirmation_email).with(order)
expect(mail_mock).to have_received(:deliver)
end
end
describe "#send_failed_payment_email" do
let(:order) { instance_double(Spree::Order) }
let(:mail_mock) { double(:mailer_mock, deliver: true) }
before do
job.instance_variable_set(:@order, order)
allow(SubscriptionMailer).to receive(:failed_payment_email) { mail_mock }
end
it "records and logs an error and sends the email" do
expect(order).to receive(:update!)
expect(job).to receive(:record_and_log_error).with(:failed_payment, order).once
job.send(:send_failed_payment_email)
expect(SubscriptionMailer).to have_received(:failed_payment_email).with(order)
expect(mail_mock).to have_received(:deliver)
end
end
end
Move all specs in subscription_confirm_job_spec temporarily to pending
require 'spec_helper'
xdescribe SubscriptionConfirmJob do
let(:job) { SubscriptionConfirmJob.new }
describe "finding proxy_orders that are ready to be confirmed" do
let(:shop) { create(:distributor_enterprise) }
let(:order_cycle1) { create(:simple_order_cycle, coordinator: shop, orders_close_at: 59.minutes.ago, updated_at: 1.day.ago) }
let(:order_cycle2) { create(:simple_order_cycle, coordinator: shop, orders_close_at: 61.minutes.ago, updated_at: 1.day.ago) }
let(:schedule) { create(:schedule, order_cycles: [order_cycle1, order_cycle2]) }
let(:subscription) { create(:subscription, shop: shop, schedule: schedule) }
let!(:proxy_order) { create(:proxy_order, subscription: subscription, order_cycle: order_cycle1, placed_at: 5.minutes.ago, order: create(:order, completed_at: 1.minute.ago)) }
let(:proxy_orders) { job.send(:proxy_orders) }
it "returns proxy orders that meet all of the criteria" do
expect(proxy_orders).to include proxy_order
end
it "returns proxy orders for paused subscriptions" do
subscription.update_attributes!(paused_at: 1.minute.ago)
expect(proxy_orders).to include proxy_order
end
it "returns proxy orders for cancelled subscriptions" do
subscription.update_attributes!(canceled_at: 1.minute.ago)
expect(proxy_orders).to include proxy_order
end
it "ignores proxy orders where the OC closed more than 1 hour ago" do
proxy_order.update_attributes!(order_cycle_id: order_cycle2.id)
expect(proxy_orders).to_not include proxy_order
end
it "ignores cancelled proxy orders" do
proxy_order.update_attributes!(canceled_at: 5.minutes.ago)
expect(proxy_orders).to_not include proxy_order
end
it "ignores proxy orders without a completed order" do
proxy_order.order.completed_at = nil
proxy_order.order.save!
expect(proxy_orders).to_not include proxy_order
end
it "ignores proxy orders without an associated order" do
proxy_order.update_attributes!(order_id: nil)
expect(proxy_orders).to_not include proxy_order
end
it "ignores proxy orders that haven't been placed yet" do
proxy_order.update_attributes!(placed_at: nil)
expect(proxy_orders).to_not include proxy_order
end
it "ignores proxy orders that have already been confirmed" do
proxy_order.update_attributes!(confirmed_at: 1.second.ago)
expect(proxy_orders).to_not include proxy_order
end
end
describe "performing the job" do
context "when unconfirmed proxy_orders exist" do
let!(:proxy_order) { create(:proxy_order) }
before do
proxy_order.initialise_order!
allow(job).to receive(:proxy_orders) { ProxyOrder.where(id: proxy_order.id) }
allow(job).to receive(:process!)
allow(job).to receive(:send_confirmation_summary_emails)
end
it "marks confirmable proxy_orders as processed by setting confirmed_at" do
expect{ job.perform }.to change{ proxy_order.reload.confirmed_at }
expect(proxy_order.confirmed_at).to be_within(5.seconds).of Time.zone.now
end
it "processes confirmable proxy_orders" do
job.perform
expect(job).to have_received(:process!)
expect(job.instance_variable_get(:@order)).to eq proxy_order.reload.order
end
it "sends a summary email" do
job.perform
expect(job).to have_received(:send_confirmation_summary_emails)
end
end
end
describe "finding recently closed order cycles" do
let!(:order_cycle1) { create(:simple_order_cycle, orders_close_at: 61.minutes.ago, updated_at: 61.minutes.ago) }
let!(:order_cycle2) { create(:simple_order_cycle, orders_close_at: nil, updated_at: 59.minutes.ago) }
let!(:order_cycle3) { create(:simple_order_cycle, orders_close_at: 61.minutes.ago, updated_at: 59.minutes.ago) }
let!(:order_cycle4) { create(:simple_order_cycle, orders_close_at: 59.minutes.ago, updated_at: 61.minutes.ago) }
let!(:order_cycle5) { create(:simple_order_cycle, orders_close_at: 1.minute.from_now) }
it "returns closed order cycles whose orders_close_at or updated_at date is within the last hour" do
order_cycles = job.send(:recently_closed_order_cycles)
expect(order_cycles).to include order_cycle3, order_cycle4
expect(order_cycles).to_not include order_cycle1, order_cycle2, order_cycle5
end
end
describe "processing an order" do
let(:shop) { create(:distributor_enterprise) }
let(:order_cycle1) { create(:simple_order_cycle, coordinator: shop) }
let(:order_cycle2) { create(:simple_order_cycle, coordinator: shop) }
let(:schedule1) { create(:schedule, order_cycles: [order_cycle1, order_cycle2]) }
let(:subscription1) { create(:subscription, shop: shop, schedule: schedule1, with_items: true) }
let(:proxy_order) { create(:proxy_order, subscription: subscription1) }
let(:order) { proxy_order.initialise_order! }
before do
while !order.completed? do break unless order.next! end
allow(job).to receive(:send_confirm_email).and_call_original
job.instance_variable_set(:@order, order)
Spree::MailMethod.create!(
environment: Rails.env,
preferred_mails_from: 'spree@example.com'
)
expect(job).to receive(:record_order).with(order)
end
context "when payments need to be processed" do
let(:payment_method) { create(:payment_method) }
let(:payment) { double(:payment, amount: 10) }
before do
allow(order).to receive(:payment_total) { 0 }
allow(order).to receive(:total) { 10 }
allow(order).to receive(:pending_payments) { [payment] }
end
context "and an error is added to the order when updating payments" do
before { expect(job).to receive(:update_payment!) { order.errors.add(:base, "a payment error") } }
it "sends a failed payment email" do
expect(job).to receive(:send_failed_payment_email)
expect(job).to_not receive(:send_confirm_email)
job.send(:process!)
end
end
context "and no errors are added when updating payments" do
before { expect(job).to receive(:update_payment!) { true } }
context "when an error occurs while processing the payment" do
before do
expect(payment).to receive(:process!).and_raise Spree::Core::GatewayError, "payment failure error"
end
it "sends a failed payment email" do
expect(job).to receive(:send_failed_payment_email)
expect(job).to_not receive(:send_confirm_email)
job.send(:process!)
end
end
context "when payments are processed without error" do
before do
expect(payment).to receive(:process!) { true }
expect(payment).to receive(:completed?) { true }
end
it "sends only a subscription confirm email, no regular confirmation emails" do
ActionMailer::Base.deliveries.clear
expect{ job.send(:process!) }.to_not enqueue_job ConfirmOrderJob
expect(job).to have_received(:send_confirm_email).once
expect(ActionMailer::Base.deliveries.count).to be 1
end
end
end
end
end
describe "#send_confirm_email" do
let(:order) { instance_double(Spree::Order) }
let(:mail_mock) { double(:mailer_mock, deliver: true) }
before do
job.instance_variable_set(:@order, order)
allow(SubscriptionMailer).to receive(:confirmation_email) { mail_mock }
end
it "records a success and sends the email" do
expect(order).to receive(:update!)
expect(job).to receive(:record_success).with(order).once
job.send(:send_confirm_email)
expect(SubscriptionMailer).to have_received(:confirmation_email).with(order)
expect(mail_mock).to have_received(:deliver)
end
end
describe "#send_failed_payment_email" do
let(:order) { instance_double(Spree::Order) }
let(:mail_mock) { double(:mailer_mock, deliver: true) }
before do
job.instance_variable_set(:@order, order)
allow(SubscriptionMailer).to receive(:failed_payment_email) { mail_mock }
end
it "records and logs an error and sends the email" do
expect(order).to receive(:update!)
expect(job).to receive(:record_and_log_error).with(:failed_payment, order).once
job.send(:send_failed_payment_email)
expect(SubscriptionMailer).to have_received(:failed_payment_email).with(order)
expect(mail_mock).to have_received(:deliver)
end
end
end
|
require 'spec_helper'
# used to test the method_missing functionality #
module RSpec::Mocks
module Errors
class RequestLimitExceeded < RuntimeError
end
class Throttling < RuntimeError
end
end
end
# rubocop:disable Metrics/BlockLength
describe Awspec::Helper::ClientWrap do
let(:subj) { Awspec::Helper::ClientWrap }
describe 'wtf .new' do
let(:client) { subj.new('client') }
context 'given the correct required arguments' do
it 'should create a new instance' do
expect { subj.new("I'm a client") }.to_not raise_error
end
it 'should set the client arrtibute' do
expect(client.client).to eq 'client'
end
it 'should set the backoff attribute to the default' do
expect(client.backoff).to eq 0.0
end
it 'should set the iteration attribute to the default' do
expect(client.iteration).to eq 1
end
it 'should set the backoff_limit attribute to the default' do
expect(client.backoff_limit).to eq 30.0
end
end
context 'given optional args' do
before(:all) do
config = Awspec::Config.instance
config.client_backoff(1.0)
config.client_backoff_limit(10)
config.client_iteration(2)
end
after(:all) do
config = Awspec::Config.instance
config.client_backoff(0.0)
config.client_backoff_limit(30.0)
config.client_iteration(1)
end
it 'should set the backoff attribute' do
expect(client.backoff).to eq 1.0
end
it 'should set the iteration attribute' do
expect(client.iteration).to eq 2
end
it 'should set the backoff_limit attribute' do
expect(client.backoff_limit).to eq 10.0
end
end
context 'given no client' do
it 'should raise an ArgumentError' do
expect { subj.new }.to raise_error(ArgumentError, 'Client can not be nil')
end
end
context 'given a nil client' do
it 'should raise an ArgumentError' do
expect { subj.new(nil) }.to raise_error(ArgumentError, 'Client can not be nil')
end
end
end
describe '#method_missing (protected)' do
context 'given the client does not raise ::RequestLimitExceeded error' do
let(:client) { subj.new({}) }
it 'should pass the method through to the client' do
expect(client.empty?).to be true
end
it 'should raise exceptions not caught' do
expect { client.foo }.to raise_error(NoMethodError)
end
end
context 'given the client raises ::RequestLimitExceeded error' do
let(:client) { double 'fake' }
before(:all) { Awspec::Config.instance.client_backoff_limit(1) }
after(:all) { Awspec::Config.instance.client_backoff_limit(30.0) }
it 'should be called multiple times in a sleep loop, and re-raise the exception if it is not cleared' do
calls = 0
allow(client).to receive(:test_me) do
calls += 1
raise RSpec::Mocks::Errors::RequestLimitExceeded
end
foo = subj.new(client)
expect { foo.test_me }.to raise_error(RSpec::Mocks::Errors::RequestLimitExceeded)
expect(foo.backoff).to eq(2.5)
expect(foo.iteration).to eq(3)
end
it 'return as expected once the error is cleared' do
calls = 0
res = ''
allow(client).to receive(:test_me) do
calls += 1
raise RSpec::Mocks::Errors::RequestLimitExceeded, 'Fail' if calls == 1
'done'
end
foo = subj.new(client)
expect { res = foo.test_me }.to_not raise_error
expect(res).to eq 'done'
end
end
context 'given the client raises ::Throttling error' do
let(:client) { double 'fake' }
before(:all) { Awspec::Config.instance.client_backoff_limit(1) }
after(:all) { Awspec::Config.instance.client_backoff_limit(30.0) }
it 'should be called multiple times in a sleep loop, and re-raise the exception if it is not cleared' do
calls = 0
allow(client).to receive(:test_me) do
calls += 1
raise RSpec::Mocks::Errors::Throttling
end
foo = subj.new(client)
expect { foo.test_me }.to raise_error(RSpec::Mocks::Errors::Throttling)
expect(foo.backoff).to eq(2.5)
expect(foo.iteration).to eq(3)
end
it 'return as expected once the error is cleared' do
calls = 0
res = ''
allow(client).to receive(:test_me) do
calls += 1
raise RSpec::Mocks::Errors::Throttling, 'Fail' if calls == 1
'done'
end
foo = subj.new(client)
expect { res = foo.test_me }.to_not raise_error
expect(res).to eq 'done'
end
end
end
end
removed extraneous line (rubocop)
require 'spec_helper'
# used to test the method_missing functionality #
module RSpec::Mocks
module Errors
class RequestLimitExceeded < RuntimeError
end
class Throttling < RuntimeError
end
end
end
# rubocop:disable Metrics/BlockLength
describe Awspec::Helper::ClientWrap do
let(:subj) { Awspec::Helper::ClientWrap }
describe 'wtf .new' do
let(:client) { subj.new('client') }
context 'given the correct required arguments' do
it 'should create a new instance' do
expect { subj.new("I'm a client") }.to_not raise_error
end
it 'should set the client arrtibute' do
expect(client.client).to eq 'client'
end
it 'should set the backoff attribute to the default' do
expect(client.backoff).to eq 0.0
end
it 'should set the iteration attribute to the default' do
expect(client.iteration).to eq 1
end
it 'should set the backoff_limit attribute to the default' do
expect(client.backoff_limit).to eq 30.0
end
end
context 'given optional args' do
before(:all) do
config = Awspec::Config.instance
config.client_backoff(1.0)
config.client_backoff_limit(10)
config.client_iteration(2)
end
after(:all) do
config = Awspec::Config.instance
config.client_backoff(0.0)
config.client_backoff_limit(30.0)
config.client_iteration(1)
end
it 'should set the backoff attribute' do
expect(client.backoff).to eq 1.0
end
it 'should set the iteration attribute' do
expect(client.iteration).to eq 2
end
it 'should set the backoff_limit attribute' do
expect(client.backoff_limit).to eq 10.0
end
end
context 'given no client' do
it 'should raise an ArgumentError' do
expect { subj.new }.to raise_error(ArgumentError, 'Client can not be nil')
end
end
context 'given a nil client' do
it 'should raise an ArgumentError' do
expect { subj.new(nil) }.to raise_error(ArgumentError, 'Client can not be nil')
end
end
end
describe '#method_missing (protected)' do
context 'given the client does not raise ::RequestLimitExceeded error' do
let(:client) { subj.new({}) }
it 'should pass the method through to the client' do
expect(client.empty?).to be true
end
it 'should raise exceptions not caught' do
expect { client.foo }.to raise_error(NoMethodError)
end
end
context 'given the client raises ::RequestLimitExceeded error' do
let(:client) { double 'fake' }
before(:all) { Awspec::Config.instance.client_backoff_limit(1) }
after(:all) { Awspec::Config.instance.client_backoff_limit(30.0) }
it 'should be called multiple times in a sleep loop, and re-raise the exception if it is not cleared' do
calls = 0
allow(client).to receive(:test_me) do
calls += 1
raise RSpec::Mocks::Errors::RequestLimitExceeded
end
foo = subj.new(client)
expect { foo.test_me }.to raise_error(RSpec::Mocks::Errors::RequestLimitExceeded)
expect(foo.backoff).to eq(2.5)
expect(foo.iteration).to eq(3)
end
it 'return as expected once the error is cleared' do
calls = 0
res = ''
allow(client).to receive(:test_me) do
calls += 1
raise RSpec::Mocks::Errors::RequestLimitExceeded, 'Fail' if calls == 1
'done'
end
foo = subj.new(client)
expect { res = foo.test_me }.to_not raise_error
expect(res).to eq 'done'
end
end
context 'given the client raises ::Throttling error' do
let(:client) { double 'fake' }
before(:all) { Awspec::Config.instance.client_backoff_limit(1) }
after(:all) { Awspec::Config.instance.client_backoff_limit(30.0) }
it 'should be called multiple times in a sleep loop, and re-raise the exception if it is not cleared' do
calls = 0
allow(client).to receive(:test_me) do
calls += 1
raise RSpec::Mocks::Errors::Throttling
end
foo = subj.new(client)
expect { foo.test_me }.to raise_error(RSpec::Mocks::Errors::Throttling)
expect(foo.backoff).to eq(2.5)
expect(foo.iteration).to eq(3)
end
it 'return as expected once the error is cleared' do
calls = 0
res = ''
allow(client).to receive(:test_me) do
calls += 1
raise RSpec::Mocks::Errors::Throttling, 'Fail' if calls == 1
'done'
end
foo = subj.new(client)
expect { res = foo.test_me }.to_not raise_error
expect(res).to eq 'done'
end
end
end
end
|
enable consistent reads for shared tables in DynamoDB.
|
require "spec_helper"
module SecureHeaders
describe Middleware do
let(:app) { lambda { |env| [200, env, "app"] } }
let(:cookie_app) { lambda { |env| [200, env.merge("Set-Cookie" => "foo=bar"), "app"] } }
let(:middleware) { Middleware.new(app) }
let(:cookie_middleware) { Middleware.new(cookie_app) }
before(:each) do
reset_config
Configuration.default do |config|
# use all default provided by the library
end
end
it "sets the headers" do
_, env = middleware.call(Rack::MockRequest.env_for("https://looocalhost", {}))
expect_default_values(env)
end
it "respects overrides" do
request = Rack::Request.new("HTTP_X_FORWARDED_SSL" => "on")
SecureHeaders.override_x_frame_options(request, "DENY")
_, env = middleware.call request.env
expect(env[XFrameOptions::HEADER_NAME]).to eq("DENY")
end
it "uses named overrides" do
Configuration.override("my_custom_config") do |config|
config.csp[:script_src] = %w(example.org)
end
request = Rack::Request.new({})
SecureHeaders.use_secure_headers_override(request, "my_custom_config")
expect(request.env[SECURE_HEADERS_CONFIG]).to be(Configuration.get("my_custom_config"))
_, env = middleware.call request.env
expect(env[CSP::HEADER_NAME]).to match("example.org")
end
context "secure_cookies" do
context "cookies should be flagged" do
it "flags cookies as secure" do
capture_warning do
Configuration.default { |config| config.secure_cookies = true }
end
request = Rack::Request.new("HTTPS" => "on")
_, env = cookie_middleware.call request.env
expect(env['Set-Cookie']).to match(SecureHeaders::Cookie::SECURE_REGEXP)
end
end
context "cookies should not be flagged" do
it "does not flags cookies as secure" do
capture_warning do
Configuration.default { |config| config.secure_cookies = false }
end
request = Rack::Request.new("HTTPS" => "on")
_, env = cookie_middleware.call request.env
expect(env['Set-Cookie']).not_to match(SecureHeaders::Cookie::SECURE_REGEXP)
end
end
end
context "cookies" do
it "flags cookies from configuration" do
Configuration.default { |config| config.cookies = { secure: true, httponly: true, samesite: true } }
request = Rack::Request.new("HTTPS" => "on")
_, env = cookie_middleware.call request.env
expect(env['Set-Cookie']).to match(SecureHeaders::Cookie::SECURE_REGEXP)
expect(env['Set-Cookie']).to match(SecureHeaders::Cookie::HTTPONLY_REGEXP)
expect(env['Set-Cookie']).to match(SecureHeaders::Cookie::SAMESITE_REGEXP)
end
it "disables secure cookies for non-https requests" do
Configuration.default { |config| config.cookies = { secure: true } }
request = Rack::Request.new("HTTPS" => "off")
_, env = cookie_middleware.call request.env
expect(env['Set-Cookie']).not_to match(SecureHeaders::Cookie::SECURE_REGEXP)
end
end
end
end
add combination test of Strict and Lax SameSite cookies
require "spec_helper"
module SecureHeaders
describe Middleware do
let(:app) { lambda { |env| [200, env, "app"] } }
let(:cookie_app) { lambda { |env| [200, env.merge("Set-Cookie" => "foo=bar"), "app"] } }
let(:middleware) { Middleware.new(app) }
let(:cookie_middleware) { Middleware.new(cookie_app) }
before(:each) do
reset_config
Configuration.default do |config|
# use all default provided by the library
end
end
it "sets the headers" do
_, env = middleware.call(Rack::MockRequest.env_for("https://looocalhost", {}))
expect_default_values(env)
end
it "respects overrides" do
request = Rack::Request.new("HTTP_X_FORWARDED_SSL" => "on")
SecureHeaders.override_x_frame_options(request, "DENY")
_, env = middleware.call request.env
expect(env[XFrameOptions::HEADER_NAME]).to eq("DENY")
end
it "uses named overrides" do
Configuration.override("my_custom_config") do |config|
config.csp[:script_src] = %w(example.org)
end
request = Rack::Request.new({})
SecureHeaders.use_secure_headers_override(request, "my_custom_config")
expect(request.env[SECURE_HEADERS_CONFIG]).to be(Configuration.get("my_custom_config"))
_, env = middleware.call request.env
expect(env[CSP::HEADER_NAME]).to match("example.org")
end
context "secure_cookies" do
context "cookies should be flagged" do
it "flags cookies as secure" do
capture_warning do
Configuration.default { |config| config.secure_cookies = true }
end
request = Rack::Request.new("HTTPS" => "on")
_, env = cookie_middleware.call request.env
expect(env['Set-Cookie']).to match(SecureHeaders::Cookie::SECURE_REGEXP)
end
end
context "cookies should not be flagged" do
it "does not flags cookies as secure" do
capture_warning do
Configuration.default { |config| config.secure_cookies = false }
end
request = Rack::Request.new("HTTPS" => "on")
_, env = cookie_middleware.call request.env
expect(env['Set-Cookie']).not_to match(SecureHeaders::Cookie::SECURE_REGEXP)
end
end
end
context "cookies" do
it "flags cookies from configuration" do
Configuration.default { |config| config.cookies = { secure: true, httponly: true, samesite: true } }
request = Rack::Request.new("HTTPS" => "on")
_, env = cookie_middleware.call request.env
expect(env['Set-Cookie']).to match(SecureHeaders::Cookie::SECURE_REGEXP)
expect(env['Set-Cookie']).to match(SecureHeaders::Cookie::HTTPONLY_REGEXP)
expect(env['Set-Cookie']).to match(SecureHeaders::Cookie::SAMESITE_REGEXP)
end
it "flags cookies with a combination of SameSite configurations" do
cookie_middleware = Middleware.new(lambda { |env| [200, env.merge("Set-Cookie" => ["_session=foobar", "_guest=true"]), "app"] })
Configuration.default { |config| config.cookies = { samesite: { lax: { except: ["_session"] }, strict: { only: ["_session"] } } } }
request = Rack::Request.new("HTTPS" => "on")
_, env = cookie_middleware.call request.env
expect(env['Set-Cookie']).to match("_session=foobar; SameSite=Strict")
expect(env['Set-Cookie']).to match("_guest=true; SameSite=Lax")
end
it "disables secure cookies for non-https requests" do
Configuration.default { |config| config.cookies = { secure: true } }
request = Rack::Request.new("HTTPS" => "off")
_, env = cookie_middleware.call request.env
expect(env['Set-Cookie']).not_to match(SecureHeaders::Cookie::SECURE_REGEXP)
end
end
end
end
|
describe TaskHelpers::Exports::Tags do
let(:parent) { FactoryGirl.create(:classification, :name => "export_test_category", :description => "Export Test") }
let(:def_parent) { FactoryGirl.create(:classification, :name => "default_test_category", :description => "Default Export Test", :default => true) }
let(:def_parent2) { FactoryGirl.create(:classification, :name => "default_test2_category", :description => "Default Export Test 2", :default => true) }
let(:export_dir) { Dir.mktmpdir('miq_exp_dir') }
let(:tag_export_test) do
[{"description" => "Export Test",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"parent_id" => 0,
"show" => true,
"default" => nil,
"perf_by_tag" => nil,
"name" => "export_test_category",
"entries" => [{"description" => "Test Entry",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"show" => true,
"default" => nil,
"perf_by_tag" => nil,
"name" => "test_entry"},
{"description" => "Another Test Entry",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"show" => true,
"default" => nil,
"perf_by_tag" => nil,
"name" => "another_test_entry"}]}]
end
let(:tag_default_export_test) do
[{"description" => "Default Export Test",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"parent_id" => 0,
"show" => true,
"default" => true,
"perf_by_tag" => nil,
"name" => "default_test_category",
"entries" => [{"description" => "Default Test Entry",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"show" => true,
"default" => true,
"perf_by_tag" => nil,
"name" => "def_test_entry"}]}]
end
let(:tag_default_export_test_2) do
[{"description" => "Default Export Test 2",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"parent_id" => 0,
"show" => true,
"default" => true,
"perf_by_tag" => nil,
"name" => "default_test2_category",
"entries" => [{"description" => "Default Test Entry 2",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"show" => true,
"default" => true,
"perf_by_tag" => nil,
"name" => "def_test_entry_2"},
{"description" => "Default Test Entry 3",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"show" => true,
"default" => nil,
"perf_by_tag" => nil,
"name" => "def_test_entry_3"}]}]
end
before do
FactoryGirl.create(:classification_tag, :name => "test_entry", :description => "Test Entry", :parent => parent)
FactoryGirl.create(:classification_tag, :name => "another_test_entry", :description => "Another Test Entry", :parent => parent)
FactoryGirl.create(:classification_tag, :name => "def_test_entry", :description => "Default Test Entry", :parent => def_parent, :default => true)
FactoryGirl.create(:classification_tag, :name => "def_test_entry_2", :description => "Default Test Entry 2", :parent => def_parent2, :default => true)
FactoryGirl.create(:classification_tag, :name => "def_test_entry_3", :description => "Default Test Entry 3", :parent => def_parent2)
end
after do
FileUtils.remove_entry export_dir
end
it 'exports user tags to a given directory' do
TaskHelpers::Exports::Tags.new.export(:directory => export_dir)
file_contents = File.read("#{export_dir}/Export_Test.yaml")
file_contents2 = File.read("#{export_dir}/Default_Export_Test_2.yaml")
expect(YAML.safe_load(file_contents)).to eq(tag_export_test)
expect(YAML.safe_load(file_contents2)).to eq(tag_default_export_test_2)
expect(Dir[File.join(export_dir, '**', '*')].count { |file| File.file?(file) }).to eq(2)
end
it 'exports all tags to a given directory' do
TaskHelpers::Exports::Tags.new.export(:directory => export_dir, :all => true)
file_contents = File.read("#{export_dir}/Export_Test.yaml")
file_contents2 = File.read("#{export_dir}/Default_Export_Test.yaml")
file_contents3 = File.read("#{export_dir}/Default_Export_Test_2.yaml")
expect(YAML.safe_load(file_contents)).to eq(tag_export_test)
expect(YAML.safe_load(file_contents2)).to eq(tag_default_export_test)
expect(YAML.safe_load(file_contents3)).to eq(tag_default_export_test_2)
expect(Dir[File.join(export_dir, '**', '*')].count { |file| File.file?(file) }).to eq(3)
end
end
Sporadic failure using eq for arrays
Sporadic failure using eq for arrays
describe TaskHelpers::Exports::Tags do
let(:parent) { FactoryGirl.create(:classification, :name => "export_test_category", :description => "Export Test") }
let(:def_parent) { FactoryGirl.create(:classification, :name => "default_test_category", :description => "Default Export Test", :default => true) }
let(:def_parent2) { FactoryGirl.create(:classification, :name => "default_test2_category", :description => "Default Export Test 2", :default => true) }
let(:export_dir) { Dir.mktmpdir('miq_exp_dir') }
let(:tag_export_test) do
[{"description" => "Export Test",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"parent_id" => 0,
"show" => true,
"default" => nil,
"perf_by_tag" => nil,
"name" => "export_test_category",
"entries" => [{"description" => "Test Entry",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"show" => true,
"default" => nil,
"perf_by_tag" => nil,
"name" => "test_entry"},
{"description" => "Another Test Entry",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"show" => true,
"default" => nil,
"perf_by_tag" => nil,
"name" => "another_test_entry"}]}]
end
let(:tag_default_export_test) do
[{"description" => "Default Export Test",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"parent_id" => 0,
"show" => true,
"default" => true,
"perf_by_tag" => nil,
"name" => "default_test_category",
"entries" => [{"description" => "Default Test Entry",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"show" => true,
"default" => true,
"perf_by_tag" => nil,
"name" => "def_test_entry"}]}]
end
let(:tag_default_export_test_2) do
[{"description" => "Default Export Test 2",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"parent_id" => 0,
"show" => true,
"default" => true,
"perf_by_tag" => nil,
"name" => "default_test2_category",
"entries" => [{"description" => "Default Test Entry 2",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"show" => true,
"default" => true,
"perf_by_tag" => nil,
"name" => "def_test_entry_2"},
{"description" => "Default Test Entry 3",
"icon" => nil,
"read_only" => false,
"syntax" => "string",
"single_value" => false,
"example_text" => nil,
"show" => true,
"default" => nil,
"perf_by_tag" => nil,
"name" => "def_test_entry_3"}]}]
end
before do
FactoryGirl.create(:classification_tag, :name => "test_entry", :description => "Test Entry", :parent => parent)
FactoryGirl.create(:classification_tag, :name => "another_test_entry", :description => "Another Test Entry", :parent => parent)
FactoryGirl.create(:classification_tag, :name => "def_test_entry", :description => "Default Test Entry", :parent => def_parent, :default => true)
FactoryGirl.create(:classification_tag, :name => "def_test_entry_2", :description => "Default Test Entry 2", :parent => def_parent2, :default => true)
FactoryGirl.create(:classification_tag, :name => "def_test_entry_3", :description => "Default Test Entry 3", :parent => def_parent2)
end
after do
FileUtils.remove_entry export_dir
end
it 'exports user tags to a given directory' do
TaskHelpers::Exports::Tags.new.export(:directory => export_dir)
file_contents = File.read("#{export_dir}/Export_Test.yaml")
file_contents2 = File.read("#{export_dir}/Default_Export_Test_2.yaml")
expect(YAML.safe_load(file_contents)).to contain_exactly(*tag_export_test)
expect(YAML.safe_load(file_contents2)).to contain_exactly(*tag_default_export_test_2)
expect(Dir[File.join(export_dir, '**', '*')].count { |file| File.file?(file) }).to eq(2)
end
it 'exports all tags to a given directory' do
TaskHelpers::Exports::Tags.new.export(:directory => export_dir, :all => true)
file_contents = File.read("#{export_dir}/Export_Test.yaml")
file_contents2 = File.read("#{export_dir}/Default_Export_Test.yaml")
file_contents3 = File.read("#{export_dir}/Default_Export_Test_2.yaml")
expect(YAML.safe_load(file_contents)).to contain_exactly(*tag_export_test)
expect(YAML.safe_load(file_contents2)).to contain_exactly(*tag_default_export_test)
expect(YAML.safe_load(file_contents3)).to contain_exactly(*tag_default_export_test_2)
expect(Dir[File.join(export_dir, '**', '*')].count { |file| File.file?(file) }).to eq(3)
end
end
|
require 'rails_helper'
describe JsonDocumentImporter do
let(:schema) { json_schema }
let(:importer) { JsonDocumentImporter.new('./spec/examples/cms_exported_claim.json', schema) }
let(:invalid_importer) { JsonDocumentImporter.new('./spec/examples/invalid_cms_exported_claim.json', schema) }
let(:multiple_claim_importer) { JsonDocumentImporter.new('./spec/examples/multiple_cms_exported_claims.json', schema) }
let(:claim_params) { {"advocate_email"=>"advocate@example.com", "case_number"=>"12345678", "case_type_id"=>1, "indictment_number"=>"12345678", "first_day_of_trial"=>"2015/06/01", "estimated_trial_length"=>1, "actual_trial_length"=>1, "trial_concluded_at"=>"2015/06/02", "advocate_category"=>"QC", "prosecuting_authority"=>"cps", "offence_id"=>1, "court_id"=>1, "cms_number"=>"12345678", "additional_information"=>"string", "apply_vat"=>true, "trial_fixed_notice_at"=>"2015-06-01", "trial_fixed_at"=>"2015-06-01", "trial_cracked_at"=>"2015-06-01"} }
let(:defendant_params) { {"first_name"=>"case", "middle_name"=>"management", "last_name"=>"system", "date_of_birth"=>"1979/12/10", "order_for_judicial_apportionment"=>true, "claim_id"=>"642ec639-5037-4d64-a3aa-27c377e51ea7"} }
let(:rep_order_params) { {"granting_body"=>"Crown Court", "maat_reference"=>"12345678-3", "representation_order_date"=>"2015/05/01", "defendant_id"=>"642ec639-5037-4d64-a3aa-27c377e51ea7"} }
let(:fee_params) { {"fee_type_id"=>1, "quantity"=>1, "amount"=>1.1, "claim_id"=>"642ec639-5037-4d64-a3aa-27c377e51ea7"} }
let(:expense_params) { {"expense_type_id"=>1, "quantity"=>1, "rate"=>1.1, "location"=>"London", "claim_id"=>"642ec639-5037-4d64-a3aa-27c377e51ea7"} }
let(:date_attended_params) { {"attended_item_type"=>/Fee|Expense/, "date"=>"2015/06/01", "date_to"=>"2015/06/01", "attended_item_id"=>"1234"} }
context 'parses a json document and' do
context 'calls API endpoints for' do
it 'claims, defendants, representation_orders, fees, expenses' do
expect(JsonDocumentImporter::CLAIM_CREATION).to receive(:post).with(claim_params).and_return({"id"=>"642ec639-5037-4d64-a3aa-27c377e51ea7"}.to_json)
expect(JsonDocumentImporter::DEFENDANT_CREATION).to receive(:post).with(defendant_params).and_return({"id"=>"642ec639-5037-4d64-a3aa-27c377e51ea7"}.to_json)
expect(JsonDocumentImporter::REPRESENTATION_ORDER_CREATION).to receive(:post).with(rep_order_params)
expect(JsonDocumentImporter::FEE_CREATION).to receive(:post).with(fee_params).and_return({"id"=> "1234"}.to_json)
expect(JsonDocumentImporter::EXPENSE_CREATION).to receive(:post).with(expense_params).and_return({"id"=> "1234"}.to_json)
expect(JsonDocumentImporter::DATE_ATTENDED_CREATION).to receive(:post).with(date_attended_params).exactly(2).times
importer.import!
end
end
context 'each claim is processed as an atomic transaction' do
it 'and errors are stored' do
expect(invalid_importer.errors.blank?).to be true
invalid_importer.import!
expect(invalid_importer.errors.blank?).to be false
end
end
context 'can validate the json document against our schema' do
it 'returning true if valid' do
expect(importer.validate!).to eq true
end
end
context 'iterates through multiple claim hashes' do
it 'to validate' do
expect(JSON::Validator).to receive(:fully_validate).exactly(2).times
multiple_claim_importer.validate!
end
it 'to create claims' do
expect(multiple_claim_importer).to receive(:create_claim).exactly(2).times
multiple_claim_importer.import!
end
end
end
def json_schema
{"$schema"=>"http://json-schema.org/draft-04/schema#",
"description"=>"Generated from Advocate Defense Payments - Claim Import with shasum 827ad7ec32160abdc3cd7075c8050812a21a64e4",
"type"=>"object",
"required"=>["claim"],
"properties"=>
{"claim"=>
{"type"=>"object",
"required"=>
["advocate_email",
"case_number",
"case_type_id",
"indictment_number",
"first_day_of_trial",
"estimated_trial_length",
"actual_trial_length",
"trial_concluded_at",
"advocate_category",
"prosecuting_authority",
"offence_id",
"court_id",
"cms_number",
"apply_vat",
"defendants",
"fees",
"expenses"],
"properties"=>
{"advocate_email"=>{"type"=>"string"},
"case_number"=>{"type"=>"string"},
"case_type_id"=>{"type"=>"integer"},
"indictment_number"=>{"type"=>"string"},
"first_day_of_trial"=>{"type"=>"string"},
"estimated_trial_length"=>{"type"=>"integer"},
"actual_trial_length"=>{"type"=>"integer"},
"trial_concluded_at"=>{"type"=>"string"},
"advocate_category"=>{"type"=>"string"},
"prosecuting_authority"=>{"type"=>"string"},
"offence_id"=>{"type"=>"integer"},
"court_id"=>{"type"=>"integer"},
"cms_number"=>{"type"=>"string"},
"additional_information"=>{"type"=>"string"},
"apply_vat"=>{"type"=>"boolean"},
"trial_fixed_notice_at"=>{"type"=>"string"},
"trial_fixed_at"=>{"type"=>"string"},
"trial_cracked_at"=>{"type"=>"string"},
"trial_cracked_at_third"=>{"type"=>"string"},
"defendants"=>
{"type"=>"array",
"minItems"=>1,
"uniqueItems"=>true,
"items"=>
{"type"=>"object",
"required"=>["first_name", "middle_name", "last_name", "date_of_birth", "order_for_judicial_apportionment", "representation_orders"],
"properties"=>
{"claim_id"=>{"type"=>"integer"},
"first_name"=>{"type"=>"string"},
"middle_name"=>{"type"=>"string"},
"last_name"=>{"type"=>"string"},
"date_of_birth"=>{"type"=>"string"},
"order_for_judicial_apportionment"=>{"type"=>"boolean"},
"representation_orders"=>
{"type"=>"array",
"minItems"=>1,
"uniqueItems"=>true,
"items"=>
{"type"=>"object",
"required"=>["granting_body", "maat_reference", "representation_order_date"],
"properties"=>
{"defendant_id"=>{"type"=>"integer"},
"granting_body"=>{"type"=>"string"},
"maat_reference"=>{"type"=>"string"},
"representation_order_date"=>{"type"=>"string"}}}}}}},
"fees"=>
{"type"=>"array",
"minItems"=>1,
"uniqueItems"=>true,
"items"=>
{"type"=>"object",
"required"=>["fee_type_id", "quantity", "amount"],
"properties"=>
{"claim_id"=>{"type"=>"integer"},
"fee_type_id"=>{"type"=>"integer"},
"quantity"=>{"type"=>"integer"},
"amount"=>{"type"=>"number"},
"dates_attended"=>
{"type"=>"array",
"minItems"=>1,
"uniqueItems"=>true,
"items"=>
{"type"=>"object",
"required"=>["attended_item_type", "date", "date_to"],
"properties"=>{"attended_item_id"=>{"type"=>"integer"}, "attended_item_type"=>{"type"=>"string"}, "date"=>{"type"=>"string"}, "date_to"=>{"type"=>"string"}}}}}}},
"expenses"=>
{"type"=>"array",
"minItems"=>1,
"uniqueItems"=>true,
"items"=>
{"type"=>"object",
"required"=>["expense_type_id", "quantity", "rate", "location"],
"properties"=>
{"claim_id"=>{"type"=>"integer"},
"expense_type_id"=>{"type"=>"integer"},
"quantity"=>{"type"=>"integer"},
"rate"=>{"type"=>"number"},
"location"=>{"type"=>"string"},
"dates_attended"=>
{"type"=>"array",
"minItems"=>1,
"uniqueItems"=>true,
"items"=>
{"type"=>"object",
"required"=>["attended_item_type", "date", "date_to"],
"properties"=>{"attended_item_id"=>{"type"=>"integer"}, "attended_item_type"=>{"type"=>"string"}, "date"=>{"type"=>"string"}, "date_to"=>{"type"=>"string"}}}}}}}}}}}
end
end
Skip the api test that creates claims on the dev db
require 'rails_helper'
describe JsonDocumentImporter do
let(:schema) { json_schema }
let(:importer) { JsonDocumentImporter.new('./spec/examples/cms_exported_claim.json', schema) }
let(:invalid_importer) { JsonDocumentImporter.new('./spec/examples/invalid_cms_exported_claim.json', schema) }
let(:multiple_claim_importer) { JsonDocumentImporter.new('./spec/examples/multiple_cms_exported_claims.json', schema) }
let(:claim_params) { {"advocate_email"=>"advocate@example.com", "case_number"=>"12345678", "case_type_id"=>1, "indictment_number"=>"12345678", "first_day_of_trial"=>"2015/06/01", "estimated_trial_length"=>1, "actual_trial_length"=>1, "trial_concluded_at"=>"2015/06/02", "advocate_category"=>"QC", "prosecuting_authority"=>"cps", "offence_id"=>1, "court_id"=>1, "cms_number"=>"12345678", "additional_information"=>"string", "apply_vat"=>true, "trial_fixed_notice_at"=>"2015-06-01", "trial_fixed_at"=>"2015-06-01", "trial_cracked_at"=>"2015-06-01"} }
let(:defendant_params) { {"first_name"=>"case", "middle_name"=>"management", "last_name"=>"system", "date_of_birth"=>"1979/12/10", "order_for_judicial_apportionment"=>true, "claim_id"=>"642ec639-5037-4d64-a3aa-27c377e51ea7"} }
let(:rep_order_params) { {"granting_body"=>"Crown Court", "maat_reference"=>"12345678-3", "representation_order_date"=>"2015/05/01", "defendant_id"=>"642ec639-5037-4d64-a3aa-27c377e51ea7"} }
let(:fee_params) { {"fee_type_id"=>1, "quantity"=>1, "amount"=>1.1, "claim_id"=>"642ec639-5037-4d64-a3aa-27c377e51ea7"} }
let(:expense_params) { {"expense_type_id"=>1, "quantity"=>1, "rate"=>1.1, "location"=>"London", "claim_id"=>"642ec639-5037-4d64-a3aa-27c377e51ea7"} }
let(:date_attended_params) { {"attended_item_type"=>/Fee|Expense/, "date"=>"2015/06/01", "date_to"=>"2015/06/01", "attended_item_id"=>"1234"} }
context 'parses a json document and' do
context 'calls API endpoints for' do
it 'claims, defendants, representation_orders, fees, expenses' do
expect(JsonDocumentImporter::CLAIM_CREATION).to receive(:post).with(claim_params).and_return({"id"=>"642ec639-5037-4d64-a3aa-27c377e51ea7"}.to_json)
expect(JsonDocumentImporter::DEFENDANT_CREATION).to receive(:post).with(defendant_params).and_return({"id"=>"642ec639-5037-4d64-a3aa-27c377e51ea7"}.to_json)
expect(JsonDocumentImporter::REPRESENTATION_ORDER_CREATION).to receive(:post).with(rep_order_params)
expect(JsonDocumentImporter::FEE_CREATION).to receive(:post).with(fee_params).and_return({"id"=> "1234"}.to_json)
expect(JsonDocumentImporter::EXPENSE_CREATION).to receive(:post).with(expense_params).and_return({"id"=> "1234"}.to_json)
expect(JsonDocumentImporter::DATE_ATTENDED_CREATION).to receive(:post).with(date_attended_params).exactly(2).times
importer.import!
end
end
context 'each claim is processed as an atomic transaction' do
skip 'This test needs to be rewritten so that it doesnt create claims on the development database if a server is running' do
it 'and errors are stored' do
expect(invalid_importer.errors.blank?).to be true
invalid_importer.import!
expect(invalid_importer.errors.blank?).to be false
end
end
end
context 'can validate the json document against our schema' do
it 'returning true if valid' do
expect(importer.validate!).to eq true
end
end
context 'iterates through multiple claim hashes' do
it 'to validate' do
expect(JSON::Validator).to receive(:fully_validate).exactly(2).times
multiple_claim_importer.validate!
end
it 'to create claims' do
expect(multiple_claim_importer).to receive(:create_claim).exactly(2).times
multiple_claim_importer.import!
end
end
end
def json_schema
{"$schema"=>"http://json-schema.org/draft-04/schema#",
"description"=>"Generated from Advocate Defense Payments - Claim Import with shasum 827ad7ec32160abdc3cd7075c8050812a21a64e4",
"type"=>"object",
"required"=>["claim"],
"properties"=>
{"claim"=>
{"type"=>"object",
"required"=>
["advocate_email",
"case_number",
"case_type_id",
"indictment_number",
"first_day_of_trial",
"estimated_trial_length",
"actual_trial_length",
"trial_concluded_at",
"advocate_category",
"prosecuting_authority",
"offence_id",
"court_id",
"cms_number",
"apply_vat",
"defendants",
"fees",
"expenses"],
"properties"=>
{"advocate_email"=>{"type"=>"string"},
"case_number"=>{"type"=>"string"},
"case_type_id"=>{"type"=>"integer"},
"indictment_number"=>{"type"=>"string"},
"first_day_of_trial"=>{"type"=>"string"},
"estimated_trial_length"=>{"type"=>"integer"},
"actual_trial_length"=>{"type"=>"integer"},
"trial_concluded_at"=>{"type"=>"string"},
"advocate_category"=>{"type"=>"string"},
"prosecuting_authority"=>{"type"=>"string"},
"offence_id"=>{"type"=>"integer"},
"court_id"=>{"type"=>"integer"},
"cms_number"=>{"type"=>"string"},
"additional_information"=>{"type"=>"string"},
"apply_vat"=>{"type"=>"boolean"},
"trial_fixed_notice_at"=>{"type"=>"string"},
"trial_fixed_at"=>{"type"=>"string"},
"trial_cracked_at"=>{"type"=>"string"},
"trial_cracked_at_third"=>{"type"=>"string"},
"defendants"=>
{"type"=>"array",
"minItems"=>1,
"uniqueItems"=>true,
"items"=>
{"type"=>"object",
"required"=>["first_name", "middle_name", "last_name", "date_of_birth", "order_for_judicial_apportionment", "representation_orders"],
"properties"=>
{"claim_id"=>{"type"=>"integer"},
"first_name"=>{"type"=>"string"},
"middle_name"=>{"type"=>"string"},
"last_name"=>{"type"=>"string"},
"date_of_birth"=>{"type"=>"string"},
"order_for_judicial_apportionment"=>{"type"=>"boolean"},
"representation_orders"=>
{"type"=>"array",
"minItems"=>1,
"uniqueItems"=>true,
"items"=>
{"type"=>"object",
"required"=>["granting_body", "maat_reference", "representation_order_date"],
"properties"=>
{"defendant_id"=>{"type"=>"integer"},
"granting_body"=>{"type"=>"string"},
"maat_reference"=>{"type"=>"string"},
"representation_order_date"=>{"type"=>"string"}}}}}}},
"fees"=>
{"type"=>"array",
"minItems"=>1,
"uniqueItems"=>true,
"items"=>
{"type"=>"object",
"required"=>["fee_type_id", "quantity", "amount"],
"properties"=>
{"claim_id"=>{"type"=>"integer"},
"fee_type_id"=>{"type"=>"integer"},
"quantity"=>{"type"=>"integer"},
"amount"=>{"type"=>"number"},
"dates_attended"=>
{"type"=>"array",
"minItems"=>1,
"uniqueItems"=>true,
"items"=>
{"type"=>"object",
"required"=>["attended_item_type", "date", "date_to"],
"properties"=>{"attended_item_id"=>{"type"=>"integer"}, "attended_item_type"=>{"type"=>"string"}, "date"=>{"type"=>"string"}, "date_to"=>{"type"=>"string"}}}}}}},
"expenses"=>
{"type"=>"array",
"minItems"=>1,
"uniqueItems"=>true,
"items"=>
{"type"=>"object",
"required"=>["expense_type_id", "quantity", "rate", "location"],
"properties"=>
{"claim_id"=>{"type"=>"integer"},
"expense_type_id"=>{"type"=>"integer"},
"quantity"=>{"type"=>"integer"},
"rate"=>{"type"=>"number"},
"location"=>{"type"=>"string"},
"dates_attended"=>
{"type"=>"array",
"minItems"=>1,
"uniqueItems"=>true,
"items"=>
{"type"=>"object",
"required"=>["attended_item_type", "date", "date_to"],
"properties"=>{"attended_item_id"=>{"type"=>"integer"}, "attended_item_type"=>{"type"=>"string"}, "date"=>{"type"=>"string"}, "date_to"=>{"type"=>"string"}}}}}}}}}}}
end
end
|
# coding: utf-8
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "MutuallyExclusiveCollection" do
context "non-arbitrage methods" do
before(:each) do
@good_team = FixedOdds.from_s '-275'
@draw = FixedOdds.from_s '+429'
@bad_team = FixedOdds.from_s '+915'
@events = MutuallyExclusiveCollection.new [@draw, @bad_team, @good_team]
end
subject { @events }
its(:most_likely) { should == @good_team }
its(:least_likely) { should == @bad_team }
its(:in_descending_probability) { should == [@good_team, @draw, @bad_team] }
its(:in_ascending_probability) { should == [@bad_team, @draw, @good_team] }
end
context "decimal odds arbitrage" do
before(:each) do
@bookmaker1outcome1 = FixedOdds.from_s '2.25'
@bookmaker1outcome2 = FixedOdds.from_s '4.9'
@bookmaker2outcome1 = FixedOdds.from_s '2.43'
@bookmaker2outcome2 = FixedOdds.from_s '3.85'
@bookmaker1 = MutuallyExclusiveCollection.new [@bookmaker1outcome1, @bookmaker1outcome2]
@bookmaker2 = MutuallyExclusiveCollection.new [@bookmaker2outcome1, @bookmaker2outcome2]
@bookmaker_vulnerable_to_arbitrage = MutuallyExclusiveCollection.new [@bookmaker2outcome1, @bookmaker1outcome2]
end
describe "#sum_inverse_outcome" do
it "is 1.056 for bookmaker 1" do
@bookmaker1.sum_inverse_outcome.should be_within(0.001).of(1.056)
end
it "is 1.051 for bookmaker 2" do
@bookmaker2.sum_inverse_outcome.should be_within(0.001).of(1.051)
end
end
describe "#rational_bookmaker?" do
it "is true for bookmaker 1" do
@bookmaker1.rational_bookmaker?.should be
end
it "is true for bookmaker 2" do
@bookmaker2.rational_bookmaker?.should be
end
it "is false for vulnerable bookmaker" do
@bookmaker_vulnerable_to_arbitrage.rational_bookmaker?.should be_false
end
end
describe "#bookmakers_return_rate" do
it "is 5.34% for bookmaker 1" do
@bookmaker1.bookmakers_return_rate.should be_within(0.0001).of(0.0534)
end
it "is 4.78% for bookmaker 2" do
@bookmaker2.bookmakers_return_rate.should be_within(0.0001).of(0.0478)
end
end
describe "#profit_from_total_stake" do
it "is £4.64 with a £100.00 stake" do
@bookmaker_vulnerable_to_arbitrage.profit_from_total_stake(Money.from_fixnum(100, :GBP)).should == Money.from_fixnum(4.63, :GBP)
end
end
describe "#profit_percentage" do
it "is 4.6%" do
@bookmaker_vulnerable_to_arbitrage.profit_percentage.should be_within(0.001).of(0.046)
end
end
end
context "fractional odds arbitrage" do
before(:each) do
@odds1 = FixedOdds.from_s('2/1')
@odds2 = FixedOdds.from_s('3/1')
@bookmaker_vulnerable_to_arbitrage = MutuallyExclusiveCollection.new [@odds1, @odds2]
end
subject { @bookmaker_vulnerable_to_arbitrage }
it "is vulnerable to arbitrage" do
@bookmaker_vulnerable_to_arbitrage.should_not be_rational_bookmaker
end
its(:rational_bookmaker?) { should be_false }
its(:profit_percentage) { should be_within(0.001).of(0.2) }
describe "#profit_from_total_stake" do
it "is £100.00 with a £500.00 stake" do
@bookmaker_vulnerable_to_arbitrage.profit_from_total_stake(Money.from_fixnum(500, :GBP)).should == Money.from_fixnum(100.00, :GBP)
end
end
end
context "more than two mutually exclusive events" do
before(:each) do
@odds1 = FixedOdds.from_s('2.3')
@odds2 = FixedOdds.from_s('8.0')
@odds3 = FixedOdds.from_s('18.0')
@bookmaker_vulnerable_to_arbitrage = MutuallyExclusiveCollection.new [@odds1, @odds2, @odds3]
end
subject { @bookmaker_vulnerable_to_arbitrage }
its(:rational_bookmaker?) { should be_false }
its(:sum_inverse_outcome) { should be_within(0.0001).of(0.9709) }
its(:profit_percentage) { should be_within(0.0000001).of(0.02996) }
describe "#percentages" do
it "gives the percentages to put on each bet" do
percentages = @bookmaker_vulnerable_to_arbitrage.percentages
percentages.should have(3).items
percentages[@odds1].should be_within(0.0001).of(0.7922)
percentages[@odds2].should be_within(0.0001).of(0.1472)
percentages[@odds3].should be_within(0.0001).of(0.0606)
end
end
describe "#bet_amounts_for_total" do
it "gives the right amounts" do
total = Money.from_fixnum(500, :GBP)
amounts = @bookmaker_vulnerable_to_arbitrage.bet_amounts_for_total total
amounts.should have(3).items
amounts[@odds1].should == Money.from_fixnum(396.14, :GBP)
amounts[@odds2].should == Money.from_fixnum(73.57, :GBP)
amounts[@odds3].should == Money.from_fixnum(30.29, :GBP)
amounts.values.reduce(:+).should == total
end
end
describe "#profit_from_total_stake" do
it "gives the right amount" do
@bookmaker_vulnerable_to_arbitrage.profit_from_total_stake(Money.from_fixnum(500, :GBP)).should == Money.from_fixnum(14.98, :GBP)
end
end
describe "#bet_amounts_for_winnings" do
it "gives the right amounts" do
amounts = @bookmaker_vulnerable_to_arbitrage.bet_amounts_for_profit Money.from_fixnum(750, :GBP)
amounts.should have(3).items
amounts[@odds1].should == Money.from_fixnum(19833.33, :GBP)
amounts[@odds2].should == Money.from_fixnum(3683.33, :GBP)
amounts[@odds3].should == Money.from_fixnum(1516.67, :GBP)
amounts.values.reduce(:+).should == Money.from_fixnum(25033.33, :GBP)
end
end
describe "#total_stake_for_profit" do
it "gives the right amounts" do
@bookmaker_vulnerable_to_arbitrage.total_stake_for_profit(Money.from_fixnum(750, :GBP)).should == Money.from_fixnum(25033.33, :GBP)
end
end
end
end
Change to use Money.parse method
# coding: utf-8
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "MutuallyExclusiveCollection" do
before :all do
Money.assume_from_symbol = true
end
context "non-arbitrage methods" do
before(:each) do
@good_team = FixedOdds.from_s '-275'
@draw = FixedOdds.from_s '+429'
@bad_team = FixedOdds.from_s '+915'
@events = MutuallyExclusiveCollection.new [@draw, @bad_team, @good_team]
end
subject { @events }
its(:most_likely) { should == @good_team }
its(:least_likely) { should == @bad_team }
its(:in_descending_probability) { should == [@good_team, @draw, @bad_team] }
its(:in_ascending_probability) { should == [@bad_team, @draw, @good_team] }
end
context "decimal odds arbitrage" do
before(:each) do
@bookmaker1outcome1 = FixedOdds.from_s '2.25'
@bookmaker1outcome2 = FixedOdds.from_s '4.9'
@bookmaker2outcome1 = FixedOdds.from_s '2.43'
@bookmaker2outcome2 = FixedOdds.from_s '3.85'
@bookmaker1 = MutuallyExclusiveCollection.new [@bookmaker1outcome1, @bookmaker1outcome2]
@bookmaker2 = MutuallyExclusiveCollection.new [@bookmaker2outcome1, @bookmaker2outcome2]
@bookmaker_vulnerable_to_arbitrage = MutuallyExclusiveCollection.new [@bookmaker2outcome1, @bookmaker1outcome2]
end
describe "#sum_inverse_outcome" do
it "is 1.056 for bookmaker 1" do
@bookmaker1.sum_inverse_outcome.should be_within(0.001).of(1.056)
end
it "is 1.051 for bookmaker 2" do
@bookmaker2.sum_inverse_outcome.should be_within(0.001).of(1.051)
end
end
describe "#rational_bookmaker?" do
it "is true for bookmaker 1" do
@bookmaker1.rational_bookmaker?.should be
end
it "is true for bookmaker 2" do
@bookmaker2.rational_bookmaker?.should be
end
it "is false for vulnerable bookmaker" do
@bookmaker_vulnerable_to_arbitrage.rational_bookmaker?.should be_false
end
end
describe "#bookmakers_return_rate" do
it "is 5.34% for bookmaker 1" do
@bookmaker1.bookmakers_return_rate.should be_within(0.0001).of(0.0534)
end
it "is 4.78% for bookmaker 2" do
@bookmaker2.bookmakers_return_rate.should be_within(0.0001).of(0.0478)
end
end
describe "#profit_from_total_stake" do
it "is £4.63 with a £100.00 stake" do
@bookmaker_vulnerable_to_arbitrage.profit_from_total_stake(Money.parse '£100').should == Money.parse('£4.63')
end
end
describe "#profit_percentage" do
it "is 4.6%" do
@bookmaker_vulnerable_to_arbitrage.profit_percentage.should be_within(0.001).of(0.046)
end
end
end
context "fractional odds arbitrage" do
before(:each) do
@odds1 = FixedOdds.from_s('2/1')
@odds2 = FixedOdds.from_s('3/1')
@bookmaker_vulnerable_to_arbitrage = MutuallyExclusiveCollection.new [@odds1, @odds2]
end
subject { @bookmaker_vulnerable_to_arbitrage }
it "is vulnerable to arbitrage" do
@bookmaker_vulnerable_to_arbitrage.should_not be_rational_bookmaker
end
its(:rational_bookmaker?) { should be_false }
its(:profit_percentage) { should be_within(0.001).of(0.2) }
describe "#profit_from_total_stake" do
it "is £100.00 with a £500.00 stake" do
@bookmaker_vulnerable_to_arbitrage.profit_from_total_stake(Money.parse '£500').should == Money.parse('£100')
end
end
end
context "more than two mutually exclusive events" do
before(:each) do
@odds1 = FixedOdds.from_s('2.3')
@odds2 = FixedOdds.from_s('8.0')
@odds3 = FixedOdds.from_s('18.0')
@bookmaker_vulnerable_to_arbitrage = MutuallyExclusiveCollection.new [@odds1, @odds2, @odds3]
end
subject { @bookmaker_vulnerable_to_arbitrage }
its(:rational_bookmaker?) { should be_false }
its(:sum_inverse_outcome) { should be_within(0.0001).of(0.9709) }
its(:profit_percentage) { should be_within(0.0000001).of(0.02996) }
describe "#percentages" do
it "gives the percentages to put on each bet" do
percentages = @bookmaker_vulnerable_to_arbitrage.percentages
percentages.should have(3).items
percentages[@odds1].should be_within(0.0001).of(0.7922)
percentages[@odds2].should be_within(0.0001).of(0.1472)
percentages[@odds3].should be_within(0.0001).of(0.0606)
end
end
describe "#bet_amounts_for_total" do
it "gives the right amounts" do
total = Money.parse '£500'
amounts = @bookmaker_vulnerable_to_arbitrage.bet_amounts_for_total total
amounts.should have(3).items
amounts[@odds1].should == Money.parse('£396.14')
amounts[@odds2].should == Money.parse('£73.57')
amounts[@odds3].should == Money.parse('£30.29')
amounts.values.reduce(:+).should == total
end
end
describe "#profit_from_total_stake" do
it "gives the right amount" do
@bookmaker_vulnerable_to_arbitrage.profit_from_total_stake(Money.parse '£500').should == Money.parse('£14.98')
end
end
describe "#bet_amounts_for_winnings" do
it "gives the right amounts" do
amounts = @bookmaker_vulnerable_to_arbitrage.bet_amounts_for_profit Money.parse '£750'
amounts.should have(3).items
amounts[@odds1].should == Money.parse('£19833.33')
amounts[@odds2].should == Money.parse('£3683.33')
amounts[@odds3].should == Money.parse('£1516.67')
amounts.values.reduce(:+).should == Money.parse('£25033.33')
end
end
describe "#total_stake_for_profit" do
it "gives the right amounts" do
@bookmaker_vulnerable_to_arbitrage.total_stake_for_profit(Money.parse '£750').should == Money.parse('£25033.33')
end
end
end
end |
Spec for ObjectifiedEnvironments::Base.
require 'objectified_environments/base'
describe ObjectifiedEnvironments::Base do
it "should hold environment data" do
b = ObjectifiedEnvironments::Base.new(:rails_env => 'foo', :user_name => 'bar', :host_name => 'baz')
b.send(:rails_env).should == 'foo'
b.send(:user_name).should == 'bar'
b.send(:host_name).should == 'baz'
end
it "should not require any data but rails_env" do
b = ObjectifiedEnvironments::Base.new(:rails_env => 'foo')
b.send(:rails_env).should == 'foo'
b.send(:user_name).should == nil
b.send(:host_name).should == nil
end
it "should not expose this data by default" do
lambda { b.rails_env }.should raise_error
lambda { b.user_name }.should raise_error
lambda { b.host_name }.should raise_error
end
it "should implement #must_implement as something that raises" do
b = ObjectifiedEnvironments::Base.new(:rails_env => 'foo')
class << b
def foo
must_implement
end
end
lambda { b.foo }.should raise_error
end
end
|
require 'spec_helper'
describe Oga::XPath::Evaluator do
context 'self axis' do
before do
@document = parse('<a><b>foo</b><b>bar</b></a>')
@evaluator = described_class.new(@document)
end
context 'matching the context node itself' do
before do
@set = @evaluator.evaluate('a/self::a')
end
it_behaves_like :node_set, :length => 1
example 'return the <a> node' do
@set[0].should == @document.children[0]
end
end
context 'matching non existing nodes' do
before do
@set = @evaluator.evaluate('a/self::b')
end
it_behaves_like :empty_node_set
end
context 'matching the context node itself using the short form' do
before do
@set = @evaluator.evaluate('a/.')
end
it_behaves_like :node_set, :length => 1
example 'return the <a> node' do
@set[0].should == @document.children[0]
end
end
context 'matching nodes inside predicates' do
before do
@set = @evaluator.evaluate('a/b[.="foo"]')
end
it_behaves_like :node_set, :length => 1
example 'return the first <b> node' do
@set[0].should == @document.children[0].children[0]
end
end
end
end
Specs for nested predicates & self axis.
require 'spec_helper'
describe Oga::XPath::Evaluator do
context 'self axis' do
before do
@document = parse('<a><b>foo</b><b>bar<c>test</c></b></a>')
@evaluator = described_class.new(@document)
end
context 'matching the context node itself' do
before do
@set = @evaluator.evaluate('a/self::a')
end
it_behaves_like :node_set, :length => 1
example 'return the <a> node' do
@set[0].should == @document.children[0]
end
end
context 'matching non existing nodes' do
before do
@set = @evaluator.evaluate('a/self::b')
end
it_behaves_like :empty_node_set
end
context 'matching the context node itself using the short form' do
before do
@set = @evaluator.evaluate('a/.')
end
it_behaves_like :node_set, :length => 1
example 'return the <a> node' do
@set[0].should == @document.children[0]
end
end
context 'matching nodes inside predicates' do
before do
@set = @evaluator.evaluate('a/b[. = "foo"]')
end
it_behaves_like :node_set, :length => 1
example 'return the first <b> node' do
@set[0].should == @document.children[0].children[0]
end
end
context 'using self inside a path inside a predicate' do
before do
@set = @evaluator.evaluate('a/b[c/. = "test"]')
end
it_behaves_like :node_set, :length => 1
example 'return the second <b> node' do
@set[0].should == @document.children[0].children[1]
end
end
context 'using self inside a nested predicate' do
before do
@set = @evaluator.evaluate('a/b[c[. = "test"]]')
end
it_behaves_like :node_set, :length => 1
example 'return the second <b> node' do
@set[0].should == @document.children[0].children[1]
end
end
end
end
|
Added spec for PartGet request. [ci skip]
require 'spec_helper'
describe Fishbowl::Requests do
describe "#get_part_information" do
before :each do
mock_tcp_connection
mock_login_response
Fishbowl::Connection.connect(host: 'localhost')
Fishbowl::Connection.login(username: 'johndoe', password: 'secret')
end
let(:connection) { FakeTCPSocket.instance }
it "sends proper request" do
mock_the_response(expected_response)
Fishbowl::Requests.get_part_information("B203")
connection.last_write.should be_equivalent_to(expected_request)
end
def expected_request(options = {})
request = Nokogiri::XML::Builder.new do |xml|
xml.FbiXml {
xml.Ticket
xml.FbiMsgsRq {
xml.PartGetRq {
xml.PartNum "B203"
}
}
}
end
request.to_xml
end
def expected_response
Nokogiri::XML::Builder.new do |xml|
xml.response {
xml.PartGetRs(statusCode: '1000', statusMessage: "Success!") {
#TODO figure out what goes here!
}
}
end
end
end
end
|
require_relative 'class_builder'
module UnitTests
module ModelBuilder
def create_table(*args, &block)
ModelBuilder.create_table(*args, &block)
end
def define_model(*args, &block)
ModelBuilder.define_model(*args, &block)
end
def define_model_class(*args, &block)
ModelBuilder.define_model_class(*args, &block)
end
def define_active_model_class(*args, &block)
ModelBuilder.define_active_model_class(*args, &block)
end
class << self
def configure_example_group(example_group)
example_group.include(self)
example_group.after do
ModelBuilder.reset
end
end
def reset
clear_column_caches
drop_created_tables
created_tables.clear
defined_models.clear
end
def create_table(table_name, options = {}, &block)
connection = ::ActiveRecord::Base.connection
begin
connection.execute("DROP TABLE IF EXISTS #{table_name}")
connection.create_table(table_name, options, &block)
created_tables << table_name
connection
rescue Exception => e
connection.execute("DROP TABLE IF EXISTS #{table_name}")
raise e
end
end
def define_model_class(class_name, &block)
ClassBuilder.define_class(class_name, ::ActiveRecord::Base, &block)
end
def define_active_model_class(class_name, options = {}, &block)
attribute_names = options.delete(:accessors) { [] }
columns = attribute_names.reduce({}) do |hash, attribute_name|
hash.merge(attribute_name => nil)
end
UnitTests::ModelCreationStrategies::ActiveModel.call(
class_name,
columns,
options,
&block
)
end
def define_model(name, columns = {}, options = {}, &block)
model = UnitTests::ModelCreationStrategies::ActiveRecord.call(
name,
columns,
options,
&block
)
defined_models << model
model
end
private
def clear_column_caches
# Rails 4.x
if ::ActiveRecord::Base.connection.respond_to?(:schema_cache)
::ActiveRecord::Base.connection.schema_cache.clear!
# Rails 3.1 - 4.0
elsif ::ActiveRecord::Base.connection_pool.respond_to?(:clear_cache!)
::ActiveRecord::Base.connection_pool.clear_cache!
end
defined_models.each do |model|
model.reset_column_information
end
end
def drop_created_tables
connection = ::ActiveRecord::Base.connection
created_tables.each do |table_name|
connection.execute("DROP TABLE IF EXISTS #{table_name}")
end
end
def created_tables
@_created_tables ||= []
end
def defined_models
@_defined_models ||= []
end
end
end
end
Changes to ModelBuilder
* create_table and define_model_class accepts optional `connection' parameter for custom db connections
* clear_column_caches and drop_created_tables for ProductionRecord
require_relative 'class_builder'
module UnitTests
module ModelBuilder
def create_table(*args, &block)
ModelBuilder.create_table(*args, &block)
end
def define_model(*args, &block)
ModelBuilder.define_model(*args, &block)
end
def define_model_class(*args, &block)
ModelBuilder.define_model_class(*args, &block)
end
def define_active_model_class(*args, &block)
ModelBuilder.define_active_model_class(*args, &block)
end
class << self
def configure_example_group(example_group)
example_group.include(self)
example_group.after do
ModelBuilder.reset
end
end
def reset
clear_column_caches
drop_created_tables
created_tables.clear
defined_models.clear
end
def create_table(table_name, options = {}, &block)
connection =
options.delete(:connection) || DevelopmentRecord.connection
begin
connection.execute("DROP TABLE IF EXISTS #{table_name}")
connection.create_table(table_name, options, &block)
created_tables << table_name
connection
rescue Exception => e
connection.execute("DROP TABLE IF EXISTS #{table_name}")
raise e
end
end
def define_model_class(class_name, parent_class: DevelopmentRecord, &block)
ClassBuilder.define_class(class_name, parent_class, &block)
end
def define_active_model_class(class_name, options = {}, &block)
attribute_names = options.delete(:accessors) { [] }
columns = attribute_names.reduce({}) do |hash, attribute_name|
hash.merge(attribute_name => nil)
end
UnitTests::ModelCreationStrategies::ActiveModel.call(
class_name,
columns,
options,
&block
)
end
def define_model(name, columns = {}, options = {}, &block)
model = UnitTests::ModelCreationStrategies::ActiveRecord.call(
name,
columns,
options,
&block
)
defined_models << model
model
end
private
def clear_column_caches
# Rails 4.x
if ::ActiveRecord::Base.connection.respond_to?(:schema_cache)
DevelopmentRecord.connection.schema_cache.clear!
ProductionRecord.connection.schema_cache.clear!
# Rails 3.1 - 4.0
elsif ::ActiveRecord::Base.connection_pool.respond_to?(:clear_cache!)
DevelopmentRecord.connection_pool.clear_cache!
ProductionRecord.connection_pool.clear_cache!
end
defined_models.each do |model|
model.reset_column_information
end
end
def drop_created_tables
created_tables.each do |table_name|
DevelopmentRecord.connection.
execute("DROP TABLE IF EXISTS #{table_name}")
ProductionRecord.connection.
execute("DROP TABLE IF EXISTS #{table_name}")
end
end
def created_tables
@_created_tables ||= []
end
def defined_models
@_defined_models ||= []
end
end
end
end
|
#
# Cookbook Name:: rackspace_cloudbackup
#
# Copyright 2014, Rackspace, US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
require 'webmock/rspec'
require_relative '../../../libraries/RcbuApiWrapper.rb'
#include Opscode::Rackspace::CloudBackup
include WebMock::API
module RcbuApiWrapperTestHelpers
def test_data
return {
api_username: 'Test API Username',
api_key: 'Test API Key',
region: 'TESTREGION', # Needs to be UPCASE
agent_id: 'TestAgentID', # I believe in the real API this needs to be an int, but our code doesn't care
api_url: 'http://mockidentity.local/',
# For Mocking
api_tenant_id: 'TestAPITenantID', # Used in URL
api_token: 'Test API Token',
# For write tests
dummy_write_data: { 'name' => 'dataW', 'key1' => 'dataW-1', 'key2' => 'dataW-2'},
dummy_config_id: 'TestConfigurationID'
}
end
module_function :test_data
def identity_API_data(data = test_data)
return {
'access' => {
'token' => {
'id' => data[:api_token],
'expires' =>'2014-02-19T01:20:15.305Z',
'tenant' => {
'id' => data[:api_tenant_id],
'name' => data[:api_tenant_id]
},
'RAX-AUTH:authenticatedBy'=>['APIKEY']
},
'serviceCatalog'=> [
# WARNING: failure case tests below assume cloudBackup is ['serviceCatalog'][0]
{
'name' => 'cloudBackup',
'endpoints' =>
[
# Our dummy region
# WARNING: tests below assume this entry first
{
'region' => data[:region],
'tenantId' => data[:api_tenant_id],
'publicURL' => "https://#{data[:region]}.mockrcbu.local/v1.0/#{data[:api_tenant_id]}"
},
{
'tenantId' => data[:api_tenant_id],
'publicURL' => "https://mockrcbu.local/v1.0/#{data[:api_tenant_id]}"
# Note no region key: important case for testing. (The API does this.)
},
# A few regions just to puff up the searched data
{
'region' => 'IAD',
'tenantId' => data[:api_tenant_id],
'publicURL' => "https://iad.mockrcbu.local/v1.0/#{data[:api_tenant_id]}"
},
{
'region' => 'DFW',
'tenantId' => data[:api_tenant_id],
'publicURL' => "https://dfw.mockrcbu.local/v1.0/#{data[:api_tenant_id]}"
}
],
'type' => 'rax:backup'
}
# The rest of the catalog is omitted to keep this dataset to a resonable size.
]
}
}
end
module_function :identity_API_data
def rcbu_API_configurations_data
# As we're just testing API calls and not the use of the data return dummy data
base_dataset = []
retVal = []
3.times do |x|
base_dataset.push({ 'name' => "data#{x}", 'key1' => "data#{x}-1", 'key2' => "data#{x}-2", 'BackupConfigurationName' => "data#{x}"})
end
retVal.push(base_dataset)
base_dataset = []
3.times do |y|
# Intentionally remove data0 so we can tell which set the data came from.
x = y + 1
base_dataset.push({ 'name' => "data#{x}", 'key1' => "data#{x}-1", 'key2' => "data#{x}-2", 'BackupConfigurationName' => "data#{x}"})
end
retVal.push(base_dataset)
return retVal
end
module_function :rcbu_API_configurations_data
def mock_identity_API(data = test_data, identity_data = identity_API_data)
# Set up API mocks
# Disallow any real connections, all connections should be mocked
WebMock.disable_net_connect!
# Mock the identity service
stub_request(:post, data[:api_url]).with(:body => {
'auth' =>
{ 'RAX-KSKEY:apiKeyCredentials' =>
{ 'username' => data[:api_username],
'apiKey' => data[:api_key]
}
}
}.to_json,
:headers => {
# Headers with values we care about
'Accept' => 'application/json',
'Content-Type' => 'application/json',
# Headers we don't care about, but need to specify for mocking
# Near as I can tell you can't specify a subset of headers to care about
# So if RestClient changes the headers it sends in the future this may break.
'Accept-Encoding' => /.*/,
'Content-Length' => /.*/,
'User-Agent' => /.*/
}).
to_return(:status => 200, :body => identity_data.to_json, :headers => {'Content-Type' => 'application/json'})
end
module_function :mock_identity_API
def mock_rcbu_backup_configuration_api(data = test_data, configurations_data = rcbu_API_configurations_data)
# Mock get for lookup_configurations
stub_request(:get, "https://#{data[:region]}.mockrcbu.local/v1.0/#{data[:api_tenant_id]}/backup-configuration/system/#{data[:agent_id]}").
with(:headers => {
# Headers with values we care about
'Accept' => 'application/json',
'X-Auth-Token' => data[:api_token],
# Headers we don't care about, but need to specify for mocking
'Accept-Encoding' => /.*/,
'User-Agent' => /.*/
}).
# Overload the data response for subsequent call testing
to_return({ :status => 200, :body => rcbu_API_configurations_data[0].to_json, :headers => {'Content-Type' => 'application/json'} },
{ :status => 200, :body => rcbu_API_configurations_data[1].to_json, :headers => {'Content-Type' => 'application/json'} },
{ :status => 400, :body => "", :headers => {}})
# Mock post for create_config
stub_request(:post, "https://#{data[:region]}.mockrcbu.local/v1.0/#{data[:api_tenant_id]}/backup-configuration/").
with(:body => data[:dummy_write_data],
:headers => {
# Headers with values we care about
'Content-Type' => 'application/json',
'X-Auth-Token' => data[:api_token],
# Headers we don't care about, but need to specify for mocking
'Accept' => /.*/,
'Accept-Encoding' => /.*/,
'Content-Length' => /.*/,
'User-Agent' => /.*/
}).
# Overload the data response for bad call testing
to_return({:status => 200, :body => "", :headers => {}},
{:status => 400, :body => "", :headers => {}})
# Mock put for update_config
stub_request(:put, "https://#{data[:region]}.mockrcbu.local/v1.0/#{data[:api_tenant_id]}/backup-configuration/#{data[:dummy_config_id]}").
with(:body => data[:dummy_write_data],
:headers => {
# Headers with values we care about
'Content-Type' => 'application/json',
'X-Auth-Token' => data[:api_token],
# Headers we don't care about, but need to specify for mocking
'Accept' => /.*/,
'Accept-Encoding' => /.*/,
'Content-Length' => /.*/,
'User-Agent' => /.*/
}).
# Overload the data response for bad call testing
to_return({:status => 200, :body => "", :headers => {}},
{:status => 400, :body => "", :headers => {}})
end
module_function :mock_rcbu_backup_configuration_api
end
describe 'RcbuApiWrapper' do
describe 'initialize' do
before :each do
@test_data = RcbuApiWrapperTestHelpers.test_data
@identity_data = RcbuApiWrapperTestHelpers.identity_API_data
RcbuApiWrapperTestHelpers.mock_identity_API(@test_data, @identity_data)
end
it 'sets the agent_id class instance variable' do
@test_obj = Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username], @test_data[:api_key], @test_data[:region], @test_data[:agent_id], @test_data[:api_url])
@test_obj.agent_id.should eql @test_data[:agent_id]
end
it 'sets the api_url class instance variable' do
@test_obj = Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username], @test_data[:api_key], @test_data[:region], @test_data[:agent_id], @test_data[:api_url])
@test_obj.api_url.should eql @test_data[:api_url]
end
it 'sets the api token class instance variable' do
@test_obj = Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username], @test_data[:api_key], @test_data[:region], @test_data[:agent_id], @test_data[:api_url])
@test_obj.token.should eql @test_data[:api_token]
end
it 'fails if "cloudBackup" is not in the catalog' do
fail 'Assert error on test data: serviceCatalog order' if @identity_data['access']['serviceCatalog'][0]['name'] != 'cloudBackup'
@identity_data['access']['serviceCatalog'][0]['name'] = 'notCloudBackup'
RcbuApiWrapperTestHelpers.mock_identity_API(@test_data, @identity_data)
expect { Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username],
@test_data[:api_key],
@test_data[:region],
@test_data[:agent_id],
@test_data[:api_url]) }.to raise_exception
end
it 'fails if the region is not in the cloudBackup service catalog' do
expect { Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username],
@test_data[:api_key],
'Atlantis',
@test_data[:agent_id],
@test_data[:api_url]) }.to raise_exception
end
it 'sets the rcbu API URL class instance variable' do
fail 'Assert error on test data: serviceCatalog order' if @identity_data['access']['serviceCatalog'][0]['name'] != 'cloudBackup'
fail 'Assert error on test data: endpoint order' if @identity_data['access']['serviceCatalog'][0]['endpoints'][0]['region'] != @test_data[:region]
@test_obj = Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username], @test_data[:api_key], @test_data[:region], @test_data[:agent_id], @test_data[:api_url])
@test_obj.rcbu_api_url.should eql @identity_data['access']['serviceCatalog'][0]['endpoints'][0]['publicURL']
end
end
describe 'lookup_configurations' do
before :each do
@test_data = RcbuApiWrapperTestHelpers.test_data
@identity_data = RcbuApiWrapperTestHelpers.identity_API_data
@configurations_data = RcbuApiWrapperTestHelpers.rcbu_API_configurations_data
RcbuApiWrapperTestHelpers.mock_identity_API(@test_data, @identity_data)
RcbuApiWrapperTestHelpers.mock_rcbu_backup_configuration_api(@test_data, @configurations_data)
@test_obj = Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username], @test_data[:api_key], @test_data[:region], @test_data[:agent_id], @test_data[:api_url])
end
it 'sets the configurations class instance variable' do
@test_obj.configurations.should eql nil
@test_obj.lookup_configurations
@test_obj.configurations.should eql @configurations_data[0]
end
# This is really testing the test, but it is important to verify as locate_existing_config() tests depend on this behavior.
it 'updates the configurations class instance variable' do
# Rehash of the last test to get to proper state
@test_obj.configurations.should eql nil
@test_obj.lookup_configurations
@test_obj.configurations.should eql @configurations_data[0]
# Content of the new test
@test_obj.lookup_configurations
@test_obj.configurations.should eql @configurations_data[1]
end
it 'fails on bad response code' do
@test_obj.configurations.should eql nil
@test_obj.lookup_configurations
@test_obj.configurations.should eql @configurations_data[0]
@test_obj.lookup_configurations
@test_obj.configurations.should eql @configurations_data[1]
expect { @test_obj.lookup_configurations }.to raise_error
end
end
describe 'locate_existing_config' do
before :each do
@test_data = RcbuApiWrapperTestHelpers.test_data
@identity_data = RcbuApiWrapperTestHelpers.identity_API_data
@configurations_data = RcbuApiWrapperTestHelpers.rcbu_API_configurations_data
RcbuApiWrapperTestHelpers.mock_identity_API(@test_data, @identity_data)
RcbuApiWrapperTestHelpers.mock_rcbu_backup_configuration_api(@test_data, @configurations_data)
@test_obj = Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username], @test_data[:api_key], @test_data[:region], @test_data[:agent_id], @test_data[:api_url])
end
it 'looks up configurations when configurations class instance variable is nil' do
@test_obj.configurations.should eql nil
@test_obj.locate_existing_config('data0').should eql @configurations_data[0][0]
end
it 'only looks up configurations once when configurations class instance variable is nil' do
@test_obj.configurations.should eql nil
# This relies on the mock returning more data on the second call: data4 shouldn't be present in the first lookup
@test_obj.locate_existing_config('data3').should eql nil
end
it 'returns data from configurations class instance variable when configurations is not nil' do
@test_obj.configurations.should eql nil
@test_obj.lookup_configurations
@test_obj.configurations.should eql @configurations_data[0]
# This relies on the mock returning different data on the second call: data0 shouldn't be present in the second lookup
# so this should expose unnecessairy lookups
@test_obj.locate_existing_config('data0').should eql @configurations_data[0][0]
end
it 'performs a fresh lookup if desired value is not in configurations class instance variable' do
@test_obj.configurations.should eql nil
@test_obj.lookup_configurations
@test_obj.configurations.should eql @configurations_data[0]
@test_obj.locate_existing_config('data3').should eql @configurations_data[1][2]
end
it 'returns nil on no match' do
@test_obj.locate_existing_config('bogus').should eql nil
end
end
describe 'config writer' do
before :each do
@test_data = RcbuApiWrapperTestHelpers.test_data
@identity_data = RcbuApiWrapperTestHelpers.identity_API_data
@configurations_data = RcbuApiWrapperTestHelpers.rcbu_API_configurations_data
RcbuApiWrapperTestHelpers.mock_identity_API(@test_data, @identity_data)
RcbuApiWrapperTestHelpers.mock_rcbu_backup_configuration_api(@test_data, @configurations_data)
@test_obj = Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username], @test_data[:api_key], @test_data[:region], @test_data[:agent_id], @test_data[:api_url])
end
it 'create_config posts the configuration to the API' do
@test_obj.create_config(@test_data[:dummy_write_data])
end
it 'create_config fails on non-200 status code' do
# Like the get above we're relying on differing data for subsequent calls
@test_obj.create_config(@test_data[:dummy_write_data])
expect { @test_obj.create_config(@test_data[:dummy_write_data]) }.to raise_exception
end
it 'update_config puts the configuration to the API' do
@test_obj.update_config(@test_data[:dummy_config_id], @test_data[:dummy_write_data])
end
it 'update_config fails on non-200 status code' do
# Like the get above we're relying on differing data for subsequent calls
@test_obj.update_config(@test_data[:dummy_config_id], @test_data[:dummy_write_data])
expect { @test_obj.update_config(@test_data[:dummy_config_id], @test_data[:dummy_write_data]) }.to raise_exception
end
end
end
Remove unnecessairy include
#
# Cookbook Name:: rackspace_cloudbackup
#
# Copyright 2014, Rackspace, US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
require 'webmock/rspec'
require_relative '../../../libraries/RcbuApiWrapper.rb'
include WebMock::API
module RcbuApiWrapperTestHelpers
def test_data
return {
api_username: 'Test API Username',
api_key: 'Test API Key',
region: 'TESTREGION', # Needs to be UPCASE
agent_id: 'TestAgentID', # I believe in the real API this needs to be an int, but our code doesn't care
api_url: 'http://mockidentity.local/',
# For Mocking
api_tenant_id: 'TestAPITenantID', # Used in URL
api_token: 'Test API Token',
# For write tests
dummy_write_data: { 'name' => 'dataW', 'key1' => 'dataW-1', 'key2' => 'dataW-2'},
dummy_config_id: 'TestConfigurationID'
}
end
module_function :test_data
def identity_API_data(data = test_data)
return {
'access' => {
'token' => {
'id' => data[:api_token],
'expires' =>'2014-02-19T01:20:15.305Z',
'tenant' => {
'id' => data[:api_tenant_id],
'name' => data[:api_tenant_id]
},
'RAX-AUTH:authenticatedBy'=>['APIKEY']
},
'serviceCatalog'=> [
# WARNING: failure case tests below assume cloudBackup is ['serviceCatalog'][0]
{
'name' => 'cloudBackup',
'endpoints' =>
[
# Our dummy region
# WARNING: tests below assume this entry first
{
'region' => data[:region],
'tenantId' => data[:api_tenant_id],
'publicURL' => "https://#{data[:region]}.mockrcbu.local/v1.0/#{data[:api_tenant_id]}"
},
{
'tenantId' => data[:api_tenant_id],
'publicURL' => "https://mockrcbu.local/v1.0/#{data[:api_tenant_id]}"
# Note no region key: important case for testing. (The API does this.)
},
# A few regions just to puff up the searched data
{
'region' => 'IAD',
'tenantId' => data[:api_tenant_id],
'publicURL' => "https://iad.mockrcbu.local/v1.0/#{data[:api_tenant_id]}"
},
{
'region' => 'DFW',
'tenantId' => data[:api_tenant_id],
'publicURL' => "https://dfw.mockrcbu.local/v1.0/#{data[:api_tenant_id]}"
}
],
'type' => 'rax:backup'
}
# The rest of the catalog is omitted to keep this dataset to a resonable size.
]
}
}
end
module_function :identity_API_data
def rcbu_API_configurations_data
# As we're just testing API calls and not the use of the data return dummy data
base_dataset = []
retVal = []
3.times do |x|
base_dataset.push({ 'name' => "data#{x}", 'key1' => "data#{x}-1", 'key2' => "data#{x}-2", 'BackupConfigurationName' => "data#{x}"})
end
retVal.push(base_dataset)
base_dataset = []
3.times do |y|
# Intentionally remove data0 so we can tell which set the data came from.
x = y + 1
base_dataset.push({ 'name' => "data#{x}", 'key1' => "data#{x}-1", 'key2' => "data#{x}-2", 'BackupConfigurationName' => "data#{x}"})
end
retVal.push(base_dataset)
return retVal
end
module_function :rcbu_API_configurations_data
def mock_identity_API(data = test_data, identity_data = identity_API_data)
# Set up API mocks
# Disallow any real connections, all connections should be mocked
WebMock.disable_net_connect!
# Mock the identity service
stub_request(:post, data[:api_url]).with(:body => {
'auth' =>
{ 'RAX-KSKEY:apiKeyCredentials' =>
{ 'username' => data[:api_username],
'apiKey' => data[:api_key]
}
}
}.to_json,
:headers => {
# Headers with values we care about
'Accept' => 'application/json',
'Content-Type' => 'application/json',
# Headers we don't care about, but need to specify for mocking
# Near as I can tell you can't specify a subset of headers to care about
# So if RestClient changes the headers it sends in the future this may break.
'Accept-Encoding' => /.*/,
'Content-Length' => /.*/,
'User-Agent' => /.*/
}).
to_return(:status => 200, :body => identity_data.to_json, :headers => {'Content-Type' => 'application/json'})
end
module_function :mock_identity_API
def mock_rcbu_backup_configuration_api(data = test_data, configurations_data = rcbu_API_configurations_data)
# Mock get for lookup_configurations
stub_request(:get, "https://#{data[:region]}.mockrcbu.local/v1.0/#{data[:api_tenant_id]}/backup-configuration/system/#{data[:agent_id]}").
with(:headers => {
# Headers with values we care about
'Accept' => 'application/json',
'X-Auth-Token' => data[:api_token],
# Headers we don't care about, but need to specify for mocking
'Accept-Encoding' => /.*/,
'User-Agent' => /.*/
}).
# Overload the data response for subsequent call testing
to_return({ :status => 200, :body => rcbu_API_configurations_data[0].to_json, :headers => {'Content-Type' => 'application/json'} },
{ :status => 200, :body => rcbu_API_configurations_data[1].to_json, :headers => {'Content-Type' => 'application/json'} },
{ :status => 400, :body => "", :headers => {}})
# Mock post for create_config
stub_request(:post, "https://#{data[:region]}.mockrcbu.local/v1.0/#{data[:api_tenant_id]}/backup-configuration/").
with(:body => data[:dummy_write_data],
:headers => {
# Headers with values we care about
'Content-Type' => 'application/json',
'X-Auth-Token' => data[:api_token],
# Headers we don't care about, but need to specify for mocking
'Accept' => /.*/,
'Accept-Encoding' => /.*/,
'Content-Length' => /.*/,
'User-Agent' => /.*/
}).
# Overload the data response for bad call testing
to_return({:status => 200, :body => "", :headers => {}},
{:status => 400, :body => "", :headers => {}})
# Mock put for update_config
stub_request(:put, "https://#{data[:region]}.mockrcbu.local/v1.0/#{data[:api_tenant_id]}/backup-configuration/#{data[:dummy_config_id]}").
with(:body => data[:dummy_write_data],
:headers => {
# Headers with values we care about
'Content-Type' => 'application/json',
'X-Auth-Token' => data[:api_token],
# Headers we don't care about, but need to specify for mocking
'Accept' => /.*/,
'Accept-Encoding' => /.*/,
'Content-Length' => /.*/,
'User-Agent' => /.*/
}).
# Overload the data response for bad call testing
to_return({:status => 200, :body => "", :headers => {}},
{:status => 400, :body => "", :headers => {}})
end
module_function :mock_rcbu_backup_configuration_api
end
describe 'RcbuApiWrapper' do
describe 'initialize' do
before :each do
@test_data = RcbuApiWrapperTestHelpers.test_data
@identity_data = RcbuApiWrapperTestHelpers.identity_API_data
RcbuApiWrapperTestHelpers.mock_identity_API(@test_data, @identity_data)
end
it 'sets the agent_id class instance variable' do
@test_obj = Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username], @test_data[:api_key], @test_data[:region], @test_data[:agent_id], @test_data[:api_url])
@test_obj.agent_id.should eql @test_data[:agent_id]
end
it 'sets the api_url class instance variable' do
@test_obj = Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username], @test_data[:api_key], @test_data[:region], @test_data[:agent_id], @test_data[:api_url])
@test_obj.api_url.should eql @test_data[:api_url]
end
it 'sets the api token class instance variable' do
@test_obj = Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username], @test_data[:api_key], @test_data[:region], @test_data[:agent_id], @test_data[:api_url])
@test_obj.token.should eql @test_data[:api_token]
end
it 'fails if "cloudBackup" is not in the catalog' do
fail 'Assert error on test data: serviceCatalog order' if @identity_data['access']['serviceCatalog'][0]['name'] != 'cloudBackup'
@identity_data['access']['serviceCatalog'][0]['name'] = 'notCloudBackup'
RcbuApiWrapperTestHelpers.mock_identity_API(@test_data, @identity_data)
expect { Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username],
@test_data[:api_key],
@test_data[:region],
@test_data[:agent_id],
@test_data[:api_url]) }.to raise_exception
end
it 'fails if the region is not in the cloudBackup service catalog' do
expect { Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username],
@test_data[:api_key],
'Atlantis',
@test_data[:agent_id],
@test_data[:api_url]) }.to raise_exception
end
it 'sets the rcbu API URL class instance variable' do
fail 'Assert error on test data: serviceCatalog order' if @identity_data['access']['serviceCatalog'][0]['name'] != 'cloudBackup'
fail 'Assert error on test data: endpoint order' if @identity_data['access']['serviceCatalog'][0]['endpoints'][0]['region'] != @test_data[:region]
@test_obj = Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username], @test_data[:api_key], @test_data[:region], @test_data[:agent_id], @test_data[:api_url])
@test_obj.rcbu_api_url.should eql @identity_data['access']['serviceCatalog'][0]['endpoints'][0]['publicURL']
end
end
describe 'lookup_configurations' do
before :each do
@test_data = RcbuApiWrapperTestHelpers.test_data
@identity_data = RcbuApiWrapperTestHelpers.identity_API_data
@configurations_data = RcbuApiWrapperTestHelpers.rcbu_API_configurations_data
RcbuApiWrapperTestHelpers.mock_identity_API(@test_data, @identity_data)
RcbuApiWrapperTestHelpers.mock_rcbu_backup_configuration_api(@test_data, @configurations_data)
@test_obj = Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username], @test_data[:api_key], @test_data[:region], @test_data[:agent_id], @test_data[:api_url])
end
it 'sets the configurations class instance variable' do
@test_obj.configurations.should eql nil
@test_obj.lookup_configurations
@test_obj.configurations.should eql @configurations_data[0]
end
# This is really testing the test, but it is important to verify as locate_existing_config() tests depend on this behavior.
it 'updates the configurations class instance variable' do
# Rehash of the last test to get to proper state
@test_obj.configurations.should eql nil
@test_obj.lookup_configurations
@test_obj.configurations.should eql @configurations_data[0]
# Content of the new test
@test_obj.lookup_configurations
@test_obj.configurations.should eql @configurations_data[1]
end
it 'fails on bad response code' do
@test_obj.configurations.should eql nil
@test_obj.lookup_configurations
@test_obj.configurations.should eql @configurations_data[0]
@test_obj.lookup_configurations
@test_obj.configurations.should eql @configurations_data[1]
expect { @test_obj.lookup_configurations }.to raise_error
end
end
describe 'locate_existing_config' do
before :each do
@test_data = RcbuApiWrapperTestHelpers.test_data
@identity_data = RcbuApiWrapperTestHelpers.identity_API_data
@configurations_data = RcbuApiWrapperTestHelpers.rcbu_API_configurations_data
RcbuApiWrapperTestHelpers.mock_identity_API(@test_data, @identity_data)
RcbuApiWrapperTestHelpers.mock_rcbu_backup_configuration_api(@test_data, @configurations_data)
@test_obj = Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username], @test_data[:api_key], @test_data[:region], @test_data[:agent_id], @test_data[:api_url])
end
it 'looks up configurations when configurations class instance variable is nil' do
@test_obj.configurations.should eql nil
@test_obj.locate_existing_config('data0').should eql @configurations_data[0][0]
end
it 'only looks up configurations once when configurations class instance variable is nil' do
@test_obj.configurations.should eql nil
# This relies on the mock returning more data on the second call: data4 shouldn't be present in the first lookup
@test_obj.locate_existing_config('data3').should eql nil
end
it 'returns data from configurations class instance variable when configurations is not nil' do
@test_obj.configurations.should eql nil
@test_obj.lookup_configurations
@test_obj.configurations.should eql @configurations_data[0]
# This relies on the mock returning different data on the second call: data0 shouldn't be present in the second lookup
# so this should expose unnecessairy lookups
@test_obj.locate_existing_config('data0').should eql @configurations_data[0][0]
end
it 'performs a fresh lookup if desired value is not in configurations class instance variable' do
@test_obj.configurations.should eql nil
@test_obj.lookup_configurations
@test_obj.configurations.should eql @configurations_data[0]
@test_obj.locate_existing_config('data3').should eql @configurations_data[1][2]
end
it 'returns nil on no match' do
@test_obj.locate_existing_config('bogus').should eql nil
end
end
describe 'config writer' do
before :each do
@test_data = RcbuApiWrapperTestHelpers.test_data
@identity_data = RcbuApiWrapperTestHelpers.identity_API_data
@configurations_data = RcbuApiWrapperTestHelpers.rcbu_API_configurations_data
RcbuApiWrapperTestHelpers.mock_identity_API(@test_data, @identity_data)
RcbuApiWrapperTestHelpers.mock_rcbu_backup_configuration_api(@test_data, @configurations_data)
@test_obj = Opscode::Rackspace::CloudBackup::RcbuApiWrapper.new(@test_data[:api_username], @test_data[:api_key], @test_data[:region], @test_data[:agent_id], @test_data[:api_url])
end
it 'create_config posts the configuration to the API' do
@test_obj.create_config(@test_data[:dummy_write_data])
end
it 'create_config fails on non-200 status code' do
# Like the get above we're relying on differing data for subsequent calls
@test_obj.create_config(@test_data[:dummy_write_data])
expect { @test_obj.create_config(@test_data[:dummy_write_data]) }.to raise_exception
end
it 'update_config puts the configuration to the API' do
@test_obj.update_config(@test_data[:dummy_config_id], @test_data[:dummy_write_data])
end
it 'update_config fails on non-200 status code' do
# Like the get above we're relying on differing data for subsequent calls
@test_obj.update_config(@test_data[:dummy_config_id], @test_data[:dummy_write_data])
expect { @test_obj.update_config(@test_data[:dummy_config_id], @test_data[:dummy_write_data]) }.to raise_exception
end
end
end
|
require 'zlib'
class TrashRecord < ActiveRecord::Base
# Create a new trash record for the provided record.
def initialize (record)
super({})
self.trashable_type = record.class.base_class.name
self.trashable_id = record.id
self.data = Zlib::Deflate.deflate(Marshal.dump(serialize_attributes(record)))
end
# Restore a trashed record into an object. The record will not be saved.
def restore
restore_class = self.trashable_type.constantize
# Check if we have a type field, if yes, assume single table inheritance and restore the actual class instead of the stored base class
sti_type = self.trashable_attributes[restore_class.inheritance_column]
if sti_type
begin
restore_class = self.trashable_type.send(:type_name_with_module, sti_type).constantize
rescue NameError
# Seems our assumption was wrong and we have no STI
end
end
attrs, association_attrs = attributes_and_associations(restore_class, self.trashable_attributes)
record = restore_class.new
attrs.each_pair do |key, value|
record.send("#{key}=", value)
end
association_attrs.each_pair do |association, attribute_values|
restore_association(record, association, attribute_values)
end
return record
end
# Restore a trashed record into an object, save it, and delete the trash entry.
def restore!
record = self.restore
record.save!
self.destroy
return record
end
# Attributes of the trashed record as a hash.
def trashable_attributes
return nil unless self.data
uncompressed = Zlib::Inflate.inflate(self.data) rescue uncompressed = self.data # backward compatibility with uncompressed data
Marshal.load(uncompressed)
end
# Find a trash entry by class and id.
def self.find_trash (klass, id)
find(:all, :conditions => {:trashable_type => klass.base_class.name, :trashable_id => id}).last
end
# Empty the trash by deleting records older than the specified maximum age. You can optionally specify
# :only or :except in the options hash with a class or array of classes as the value to limit the trashed
# classes which should be cleared. This is useful if you want to keep different classes for different
# lengths of time.
def self.empty_trash (max_age, options = {})
sql = 'created_at <= ?'
args = [max_age.ago]
vals = options[:only] || options[:except]
if vals
vals = [vals] unless vals.kind_of?(Array)
sql << ' AND trashable_type'
sql << ' NOT' unless options[:only]
sql << " IN (#{vals.collect{|v| '?'}.join(', ')})"
args.concat(vals.collect{|v| v.kind_of?(Class) ? v.base_class.name : v.to_s.camelize})
end
delete_all([sql] + args)
end
private
def serialize_attributes (record, already_serialized = {})
return if already_serialized["#{record.class}.#{record.id}"]
attrs = record.attributes.dup
already_serialized["#{record.class}.#{record.id}"] = true
record.class.reflections.values.each do |association|
if association.macro == :has_many and [:destroy, :delete_all].include?(association.options[:dependent])
attrs[association.name] = record.send(association.name).collect{|r| serialize_attributes(r, already_serialized)}
elsif association.macro == :has_one and [:destroy, :delete_all].include?(association.options[:dependent])
associated = record.send(association.name)
attrs[association.name] = serialize_attributes(associated, already_serialized) unless associated.nil?
elsif association.macro == :has_and_belongs_to_many
attrs[association.name] = record.send("#{association.name.to_s.singularize}_ids".to_sym)
end
end
return attrs
end
def attributes_and_associations (klass, hash)
attrs = {}
association_attrs = {}
hash.each_pair do |key, value|
if klass.reflections.include?(key)
association_attrs[key] = value
else
attrs[key] = value
end
end
return [attrs, association_attrs]
end
def restore_association (record, association, attributes)
reflection = record.class.reflections[association]
associated_record = nil
if reflection.macro == :has_many
if attributes.kind_of?(Array)
attributes.each do |association_attributes|
restore_association(record, association, association_attributes)
end
else
associated_record = record.send(association).build
end
elsif reflection.macro == :has_one
associated_record = reflection.klass.new
record.send("#{association}=", associated_record)
elsif reflection.macro == :has_and_belongs_to_many
record.send("#{association.to_s.singularize}_ids=", attributes)
return
end
return unless associated_record
attrs, association_attrs = attributes_and_associations(associated_record.class, attributes)
attrs.each_pair do |key, value|
associated_record.send("#{key}=", value)
end
association_attrs.each_pair do |key, values|
restore_association(associated_record, key, values)
end
end
end
changed how TrashRecord initializes the restore_class so that attributes get set before after_initialise triggers run.
require 'zlib'
class TrashRecord < ActiveRecord::Base
# Create a new trash record for the provided record.
def initialize (record)
super({})
self.trashable_type = record.class.base_class.name
self.trashable_id = record.id
self.data = Zlib::Deflate.deflate(Marshal.dump(serialize_attributes(record)))
end
# Restore a trashed record into an object. The record will not be saved.
def restore
restore_class = self.trashable_type.constantize
# Check if we have a type field, if yes, assume single table inheritance and restore the actual class instead of the stored base class
sti_type = self.trashable_attributes[restore_class.inheritance_column]
if sti_type
begin
restore_class = self.trashable_type.send(:type_name_with_module, sti_type).constantize
rescue NameError
# Seems our assumption was wrong and we have no STI
end
end
attrs, association_attrs = attributes_and_associations(restore_class, self.trashable_attributes)
record = restore_class.new attrs
association_attrs.each_pair do |association, attribute_values|
restore_association(record, association, attribute_values)
end
return record
end
# Restore a trashed record into an object, save it, and delete the trash entry.
def restore!
record = self.restore
record.save!
self.destroy
return record
end
# Attributes of the trashed record as a hash.
def trashable_attributes
return nil unless self.data
uncompressed = Zlib::Inflate.inflate(self.data) rescue uncompressed = self.data # backward compatibility with uncompressed data
Marshal.load(uncompressed)
end
# Find a trash entry by class and id.
def self.find_trash (klass, id)
find(:all, :conditions => {:trashable_type => klass.base_class.name, :trashable_id => id}).last
end
# Empty the trash by deleting records older than the specified maximum age. You can optionally specify
# :only or :except in the options hash with a class or array of classes as the value to limit the trashed
# classes which should be cleared. This is useful if you want to keep different classes for different
# lengths of time.
def self.empty_trash (max_age, options = {})
sql = 'created_at <= ?'
args = [max_age.ago]
vals = options[:only] || options[:except]
if vals
vals = [vals] unless vals.kind_of?(Array)
sql << ' AND trashable_type'
sql << ' NOT' unless options[:only]
sql << " IN (#{vals.collect{|v| '?'}.join(', ')})"
args.concat(vals.collect{|v| v.kind_of?(Class) ? v.base_class.name : v.to_s.camelize})
end
delete_all([sql] + args)
end
private
def serialize_attributes (record, already_serialized = {})
return if already_serialized["#{record.class}.#{record.id}"]
attrs = record.attributes.dup
already_serialized["#{record.class}.#{record.id}"] = true
record.class.reflections.values.each do |association|
if association.macro == :has_many and [:destroy, :delete_all].include?(association.options[:dependent])
attrs[association.name] = record.send(association.name).collect{|r| serialize_attributes(r, already_serialized)}
elsif association.macro == :has_one and [:destroy, :delete_all].include?(association.options[:dependent])
associated = record.send(association.name)
attrs[association.name] = serialize_attributes(associated, already_serialized) unless associated.nil?
elsif association.macro == :has_and_belongs_to_many
attrs[association.name] = record.send("#{association.name.to_s.singularize}_ids".to_sym)
end
end
return attrs
end
def attributes_and_associations (klass, hash)
attrs = {}
association_attrs = {}
hash.each_pair do |key, value|
if klass.reflections.include?(key)
association_attrs[key] = value
else
attrs[key] = value
end
end
return [attrs, association_attrs]
end
def restore_association (record, association, attributes)
reflection = record.class.reflections[association]
associated_record = nil
if reflection.macro == :has_many
if attributes.kind_of?(Array)
attributes.each do |association_attributes|
restore_association(record, association, association_attributes)
end
else
associated_record = record.send(association).build
end
elsif reflection.macro == :has_one
associated_record = reflection.klass.new
record.send("#{association}=", associated_record)
elsif reflection.macro == :has_and_belongs_to_many
record.send("#{association.to_s.singularize}_ids=", attributes)
return
end
return unless associated_record
attrs, association_attrs = attributes_and_associations(associated_record.class, attributes)
attrs.each_pair do |key, value|
associated_record.send("#{key}=", value)
end
association_attrs.each_pair do |key, values|
restore_association(associated_record, key, values)
end
end
end
|
require "spec_helper"
describe ApplicationHelper do
context "::Dialogs" do
describe "#dialog_dropdown_select_values" do
before do
val_array = [["cat", "Cat"], ["dog", "Dog"]]
@val_array_reversed = val_array.collect(&:reverse)
@field = DialogFieldDropDownList.new(:values => val_array)
end
it "not required" do
@field.required = false
values = helper.dialog_dropdown_select_values(@field, nil)
values.should == [["<None>", nil]] + @val_array_reversed
end
it "required, nil selected" do
@field.required = true
values = helper.dialog_dropdown_select_values(@field, nil)
values.should == [["<Choose>", nil]] + @val_array_reversed
end
it "required, non-nil selected" do
@field.required = true
values = helper.dialog_dropdown_select_values(@field, "cat")
values.should == @val_array_reversed
end
end
describe "#textbox_tag_options" do
let(:dialog_field) { active_record_instance_double("DialogField", :id => "100", :read_only => read_only) }
context "when the field is read_only" do
let(:read_only) { true }
it "returns the tag options with a disabled true" do
expect(helper.textbox_tag_options(dialog_field, "url")).to eq(
:maxlength => 50,
:class => "dynamic-text-box-100",
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
context "when the dialog field is not read only" do
let(:read_only) { false }
it "returns the tag options with a data-miq-observe" do
expect(helper.textbox_tag_options(dialog_field, "url")).to eq(
:maxlength => 50,
:class => "dynamic-text-box-100",
"data-miq_observe" => "{\"interval\":\".5\",\"url\":\"url\",\"auto_refresh\":true,\"field_id\":\"100\"}"
)
end
end
end
describe "#textarea_tag_options" do
let(:dialog_field) { active_record_instance_double("DialogField", :id => "100", :read_only => read_only) }
context "when the field is read_only" do
let(:read_only) { true }
it "returns the tag options with a disabled true" do
expect(helper.textarea_tag_options(dialog_field, "url")).to eq(
:class => "dynamic-text-area-100",
:maxlength => 8192,
:size => "50x6",
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
context "when the dialog field is not read only" do
let(:read_only) { false }
it "returns the tag options with a data-miq-observe" do
expect(helper.textarea_tag_options(dialog_field, "url")).to eq(
:class => "dynamic-text-area-100",
:maxlength => 8192,
:size => "50x6",
"data-miq_observe" => "{\"interval\":\".5\",\"url\":\"url\"}"
)
end
end
end
describe "#checkbox_tag_options" do
let(:dialog_field) { active_record_instance_double("DialogField", :id => "100", :read_only => read_only) }
context "when the field is read_only" do
let(:read_only) { true }
it "returns the tag options with a disabled true" do
expect(helper.checkbox_tag_options(dialog_field, "url")).to eq(
:class => "dynamic-checkbox-100",
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
context "when the dialog field is not read only" do
let(:read_only) { false }
it "returns the tag options with a few data-miq attributes" do
expect(helper.checkbox_tag_options(dialog_field, "url")).to eq(
:class => "dynamic-checkbox-100",
"data-miq_sparkle_on" => true,
"data-miq_sparkle_off" => true,
"data-miq_observe_checkbox" => "{\"url\":\"url\"}"
)
end
end
end
describe "#date_tag_options" do
let(:dialog_field) { active_record_instance_double("DialogField", :id => "100", :read_only => read_only) }
context "when the field is read_only" do
let(:read_only) { true }
it "returns the tag options with a disabled true" do
expect(helper.date_tag_options(dialog_field, "url")).to eq(
:class => "css1 dynamic-date-100",
:readonly => "true",
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
context "when the dialog field is not read only" do
let(:read_only) { false }
it "returns the tag options with a few data-miq attributes" do
expect(helper.date_tag_options(dialog_field, "url")).to eq(
:class => "css1 dynamic-date-100",
:readonly => "true",
"data-miq_observe_date" => "{\"url\":\"url\"}"
)
end
end
end
describe "#time_tag_options" do
let(:dialog_field) { active_record_instance_double("DialogField", :id => "100", :read_only => read_only) }
context "when the field is read_only" do
let(:read_only) { true }
it "returns the tag options with a disabled true" do
expect(helper.time_tag_options(dialog_field, "url", "hour_or_min")).to eq(
:class => "dynamic-date-hour_or_min-100",
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
context "when the dialog field is not read only" do
let(:read_only) { false }
it "returns the tag options with a few data-miq attributes" do
expect(helper.time_tag_options(dialog_field, "url", "hour_or_min")).to eq(
:class => "dynamic-date-hour_or_min-100",
"data-miq_observe" => "{\"url\":\"url\"}"
)
end
end
end
describe "#drop_down_options" do
let(:dialog_field) { active_record_instance_double("DialogField", :id => "100", :read_only => read_only) }
context "when the field is read_only" do
let(:read_only) { true }
it "returns the tag options with a disabled true" do
expect(helper.drop_down_options(dialog_field, "url")).to eq(
:class => "dynamic-drop-down-100",
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
context "when the dialog field is not read only" do
let(:read_only) { false }
it "returns the tag options with a few data-miq attributes" do
expect(helper.drop_down_options(dialog_field, "url")).to eq(
:class => "dynamic-drop-down-100",
"data-miq_sparkle_on" => true,
"data-miq_sparkle_off" => true,
"data-miq_observe" => "{\"url\":\"url\"}"
)
end
end
end
describe "#radio_options" do
let(:dialog_field) do
active_record_instance_double(
"DialogField",
:default_value => "some_value",
:name => "field_name",
:id => "100",
:read_only => read_only,
:value => value
)
end
context "when the field is read_only" do
let(:read_only) { true }
context "when the current value is equal to the default value" do
let(:value) { "some_value" }
it "returns the tag options with a disabled true and checked" do
expect(helper.radio_options(dialog_field, "url", value)).to eq(
:type => "radio",
:id => "100",
:value => "some_value",
:name => "field_name",
:checked => '',
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
context "when the current value is not equal to the default value" do
let(:value) { "bogus" }
it "returns the tag options with a disabled true and checked" do
expect(helper.radio_options(dialog_field, "url", value)).to eq(
:type => "radio",
:id => "100",
:value => "bogus",
:name => "field_name",
:checked => nil,
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
end
context "when the dialog field is not read only" do
let(:read_only) { false }
context "when the current value is equal to the default value" do
let(:value) { "some_value" }
it "returns the tag options with a disabled true and checked" do
expect(helper.radio_options(dialog_field, "url", value)).to eq(
:type => "radio",
:id => "100",
:value => "some_value",
:name => "field_name",
:checked => '',
:onclick => "$.ajax({beforeSend:function(request){miqSparkle(true);}, complete:function(request){miqSparkle(false);}, data:miqSerializeForm('dynamic-radio-100'), dataType:'script', type:'post', url:'url'})"
)
end
end
context "when the current value is not equal to the default value" do
let(:value) { "bogus" }
it "returns the tag options with a disabled true and checked" do
expect(helper.radio_options(dialog_field, "url", value)).to eq(
:type => "radio",
:id => "100",
:value => "bogus",
:name => "field_name",
:checked => nil,
:onclick => "$.ajax({beforeSend:function(request){miqSparkle(true);}, complete:function(request){miqSparkle(false);}, data:miqSerializeForm('dynamic-radio-100'), dataType:'script', type:'post', url:'url'})"
)
end
end
end
end
end
end
Set up auto_refresh for text area boxes
https://trello.com/c/wgsyusq2
(transferred from ManageIQ/manageiq@6f6aed8c40a33418afdceb8e6ce3f5f57e09811c)
require "spec_helper"
describe ApplicationHelper do
context "::Dialogs" do
describe "#dialog_dropdown_select_values" do
before do
val_array = [["cat", "Cat"], ["dog", "Dog"]]
@val_array_reversed = val_array.collect(&:reverse)
@field = DialogFieldDropDownList.new(:values => val_array)
end
it "not required" do
@field.required = false
values = helper.dialog_dropdown_select_values(@field, nil)
values.should == [["<None>", nil]] + @val_array_reversed
end
it "required, nil selected" do
@field.required = true
values = helper.dialog_dropdown_select_values(@field, nil)
values.should == [["<Choose>", nil]] + @val_array_reversed
end
it "required, non-nil selected" do
@field.required = true
values = helper.dialog_dropdown_select_values(@field, "cat")
values.should == @val_array_reversed
end
end
describe "#textbox_tag_options" do
let(:dialog_field) { active_record_instance_double("DialogField", :id => "100", :read_only => read_only) }
context "when the field is read_only" do
let(:read_only) { true }
it "returns the tag options with a disabled true" do
expect(helper.textbox_tag_options(dialog_field, "url")).to eq(
:maxlength => 50,
:class => "dynamic-text-box-100",
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
context "when the dialog field is not read only" do
let(:read_only) { false }
it "returns the tag options with a data-miq-observe" do
expect(helper.textbox_tag_options(dialog_field, "url")).to eq(
:maxlength => 50,
:class => "dynamic-text-box-100",
"data-miq_observe" => "{\"interval\":\".5\",\"url\":\"url\",\"auto_refresh\":true,\"field_id\":\"100\"}"
)
end
end
end
describe "#textarea_tag_options" do
let(:dialog_field) { active_record_instance_double("DialogField", :id => "100", :read_only => read_only) }
context "when the field is read_only" do
let(:read_only) { true }
it "returns the tag options with a disabled true" do
expect(helper.textarea_tag_options(dialog_field, "url")).to eq(
:class => "dynamic-text-area-100",
:maxlength => 8192,
:size => "50x6",
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
context "when the dialog field is not read only" do
let(:read_only) { false }
it "returns the tag options with a data-miq-observe" do
expect(helper.textarea_tag_options(dialog_field, "url")).to eq(
:class => "dynamic-text-area-100",
:maxlength => 8192,
:size => "50x6",
"data-miq_observe" => "{\"interval\":\".5\",\"url\":\"url\",\"auto_refresh\":true,\"field_id\":\"100\"}"
)
end
end
end
describe "#checkbox_tag_options" do
let(:dialog_field) { active_record_instance_double("DialogField", :id => "100", :read_only => read_only) }
context "when the field is read_only" do
let(:read_only) { true }
it "returns the tag options with a disabled true" do
expect(helper.checkbox_tag_options(dialog_field, "url")).to eq(
:class => "dynamic-checkbox-100",
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
context "when the dialog field is not read only" do
let(:read_only) { false }
it "returns the tag options with a few data-miq attributes" do
expect(helper.checkbox_tag_options(dialog_field, "url")).to eq(
:class => "dynamic-checkbox-100",
"data-miq_sparkle_on" => true,
"data-miq_sparkle_off" => true,
"data-miq_observe_checkbox" => "{\"url\":\"url\"}"
)
end
end
end
describe "#date_tag_options" do
let(:dialog_field) { active_record_instance_double("DialogField", :id => "100", :read_only => read_only) }
context "when the field is read_only" do
let(:read_only) { true }
it "returns the tag options with a disabled true" do
expect(helper.date_tag_options(dialog_field, "url")).to eq(
:class => "css1 dynamic-date-100",
:readonly => "true",
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
context "when the dialog field is not read only" do
let(:read_only) { false }
it "returns the tag options with a few data-miq attributes" do
expect(helper.date_tag_options(dialog_field, "url")).to eq(
:class => "css1 dynamic-date-100",
:readonly => "true",
"data-miq_observe_date" => "{\"url\":\"url\"}"
)
end
end
end
describe "#time_tag_options" do
let(:dialog_field) { active_record_instance_double("DialogField", :id => "100", :read_only => read_only) }
context "when the field is read_only" do
let(:read_only) { true }
it "returns the tag options with a disabled true" do
expect(helper.time_tag_options(dialog_field, "url", "hour_or_min")).to eq(
:class => "dynamic-date-hour_or_min-100",
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
context "when the dialog field is not read only" do
let(:read_only) { false }
it "returns the tag options with a few data-miq attributes" do
expect(helper.time_tag_options(dialog_field, "url", "hour_or_min")).to eq(
:class => "dynamic-date-hour_or_min-100",
"data-miq_observe" => "{\"url\":\"url\"}"
)
end
end
end
describe "#drop_down_options" do
let(:dialog_field) { active_record_instance_double("DialogField", :id => "100", :read_only => read_only) }
context "when the field is read_only" do
let(:read_only) { true }
it "returns the tag options with a disabled true" do
expect(helper.drop_down_options(dialog_field, "url")).to eq(
:class => "dynamic-drop-down-100",
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
context "when the dialog field is not read only" do
let(:read_only) { false }
it "returns the tag options with a few data-miq attributes" do
expect(helper.drop_down_options(dialog_field, "url")).to eq(
:class => "dynamic-drop-down-100",
"data-miq_sparkle_on" => true,
"data-miq_sparkle_off" => true,
"data-miq_observe" => "{\"url\":\"url\"}"
)
end
end
end
describe "#radio_options" do
let(:dialog_field) do
active_record_instance_double(
"DialogField",
:default_value => "some_value",
:name => "field_name",
:id => "100",
:read_only => read_only,
:value => value
)
end
context "when the field is read_only" do
let(:read_only) { true }
context "when the current value is equal to the default value" do
let(:value) { "some_value" }
it "returns the tag options with a disabled true and checked" do
expect(helper.radio_options(dialog_field, "url", value)).to eq(
:type => "radio",
:id => "100",
:value => "some_value",
:name => "field_name",
:checked => '',
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
context "when the current value is not equal to the default value" do
let(:value) { "bogus" }
it "returns the tag options with a disabled true and checked" do
expect(helper.radio_options(dialog_field, "url", value)).to eq(
:type => "radio",
:id => "100",
:value => "bogus",
:name => "field_name",
:checked => nil,
:disabled => true,
:title => "This element is disabled because it is read only"
)
end
end
end
context "when the dialog field is not read only" do
let(:read_only) { false }
context "when the current value is equal to the default value" do
let(:value) { "some_value" }
it "returns the tag options with a disabled true and checked" do
expect(helper.radio_options(dialog_field, "url", value)).to eq(
:type => "radio",
:id => "100",
:value => "some_value",
:name => "field_name",
:checked => '',
:onclick => "$.ajax({beforeSend:function(request){miqSparkle(true);}, complete:function(request){miqSparkle(false);}, data:miqSerializeForm('dynamic-radio-100'), dataType:'script', type:'post', url:'url'})"
)
end
end
context "when the current value is not equal to the default value" do
let(:value) { "bogus" }
it "returns the tag options with a disabled true and checked" do
expect(helper.radio_options(dialog_field, "url", value)).to eq(
:type => "radio",
:id => "100",
:value => "bogus",
:name => "field_name",
:checked => nil,
:onclick => "$.ajax({beforeSend:function(request){miqSparkle(true);}, complete:function(request){miqSparkle(false);}, data:miqSerializeForm('dynamic-radio-100'), dataType:'script', type:'post', url:'url'})"
)
end
end
end
end
end
end
|
require_relative '../spec_helper'
require 'rubygems'
require 'rubygems/command_manager'
describe "CVE-2019-8325 is resisted by" do
describe "sanitising error message components" do
it "for the 'while executing' message" do
manager = Gem::CommandManager.new
def manager.process_args(args, build_args)
raise StandardError, "\e]2;nyan\a"
end
def manager.terminate_interaction(n)
end
manager.should_receive(:alert_error).with("While executing gem ... (StandardError)\n .]2;nyan.")
manager.run nil, nil
end
it "for the 'invalid option' message" do
manager = Gem::CommandManager.new
def manager.terminate_interaction(n)
end
manager.should_receive(:alert_error).with("Invalid option: --.]2;nyan.. See 'gem --help'.")
manager.process_args ["--\e]2;nyan\a"], nil
end
it "for the 'loading command' message" do
manager = Gem::CommandManager.new
def manager.require(x)
raise 'foo'
end
manager.should_receive(:alert_error).with("Loading command: .]2;nyan. (RuntimeError)\n\tfoo")
manager.send :load_and_instantiate, "\e]2;nyan\a"
end
end
end
Silent backtrace from cve_2019_8325_spec.rb
Since the change at f310ac1cb2964f635f582862763b2155aacf2c12 to show
the backtraces by default, this test started to show the backtraces.
As the backtraces are not the subject of this test, silence them by
using Gem::SilentUI.
require_relative '../spec_helper'
require 'rubygems'
require 'rubygems/command_manager'
describe "CVE-2019-8325 is resisted by" do
describe "sanitising error message components" do
silent_ui = Module.new do
attr_accessor :ui
def self.extended(obj)
obj.ui = Gem::SilentUI.new
end
end
it "for the 'while executing' message" do
manager = Gem::CommandManager.new
manager.extend(silent_ui)
def manager.process_args(args, build_args)
raise StandardError, "\e]2;nyan\a"
end
def manager.terminate_interaction(n)
end
manager.should_receive(:alert_error).with("While executing gem ... (StandardError)\n .]2;nyan.")
manager.run nil, nil
end
it "for the 'invalid option' message" do
manager = Gem::CommandManager.new
def manager.terminate_interaction(n)
end
manager.should_receive(:alert_error).with("Invalid option: --.]2;nyan.. See 'gem --help'.")
manager.process_args ["--\e]2;nyan\a"], nil
end
it "for the 'loading command' message" do
manager = Gem::CommandManager.new
manager.extend(silent_ui)
def manager.require(x)
raise 'foo'
end
manager.should_receive(:alert_error).with("Loading command: .]2;nyan. (RuntimeError)\n\tfoo")
manager.send :load_and_instantiate, "\e]2;nyan\a"
end
end
end
|
require 'rails_helper'
describe 'harvests/index.rss.haml' do
before(:each) do
controller.stub(:current_user) { nil }
@member = FactoryBot.create(:member)
@tomato = FactoryBot.create(:tomato)
@maize = FactoryBot.create(:maize)
@pp = FactoryBot.create(:plant_part)
page = 1
per_page = 2
total_entries = 2
harvests = WillPaginate::Collection.create(page, per_page, total_entries) do |pager|
pager.replace([
FactoryBot.create(:harvest,
crop: @tomato,
owner: @member),
FactoryBot.create(:harvest,
crop: @maize,
plant_part: @pp,
owner: @member,
quantity: 2)
])
end
assign(:harvests, harvests)
render
end
it 'shows RSS feed title' do
rendered.should have_content "Recent harvests from all members"
end
it "displays crop's name in title" do
assign(:crop, @tomato)
render
rendered.should have_content "#{@tomato.name.to_s}"
end
it 'shows formatted content of harvest posts' do
rendered.should have_content "<p>Quantity: "
end
end
Fix linter issues
require 'rails_helper'
describe 'harvests/index.rss.haml' do
before(:each) do
controller.stub(:current_user) { nil }
@member = FactoryBot.create(:member)
@tomato = FactoryBot.create(:tomato)
@maize = FactoryBot.create(:maize)
@pp = FactoryBot.create(:plant_part)
page = 1
per_page = 2
total_entries = 2
harvests = WillPaginate::Collection.create(page, per_page, total_entries) do |pager|
pager.replace([
FactoryBot.create(:harvest,
crop: @tomato,
owner: @member),
FactoryBot.create(:harvest,
crop: @maize,
plant_part: @pp,
owner: @member,
quantity: 2)
])
end
assign(:harvests, harvests)
render
end
it 'shows RSS feed title' do
rendered.should have_content "Recent harvests from all members"
end
it "displays crop's name in title" do
assign(:crop, @tomato)
render
expect(rendered).to have_content @tomato.name
end
it 'shows formatted content of harvest posts' do
expect(rendered).to have_content "<p>Quantity: "
end
end
|
require 'rails_helper'
RSpec.describe 'projects/index.html.erb', type: :view do
before(:each) do
assign(:projects, [
FactoryGirl.create(:project),
FactoryGirl.create(:project)
])
@user = FactoryGirl.create(:user)
login_as @user
end
it 'renders a list of projects' do
render
end
it 'shows all details about a project' do
chair = FactoryGirl.create(:chair)
project = FactoryGirl.create(:project, chair_id: chair.id)
project.update(status:true)
project.update(public:true)
visit projects_path
expect(page).to have_content(project.title)
expect(page).to have_content(l(project.created_at))
expect(page).to have_content(chair.name)
expect(page).to have_content(I18n.t('.public', default:'public'))
expect(page).to have_content(I18n.t('.active', default:'active'))
project.update(status:false)
project.update(public:false)
visit projects_path
expect(page).to have_content(I18n.t('.inactive', default:'inactive'))
expect(page).to have_content(I18n.t('.private', default:'private'))
end
end
Remove unnecessary changes
require 'rails_helper'
RSpec.describe 'projects/index', type: :view do
before(:each) do
assign(:projects, [
FactoryGirl.create(:project),
FactoryGirl.create(:project)
])
@user = FactoryGirl.create(:user)
login_as @user
end
it 'renders a list of projects' do
render
end
it 'shows all details about a project' do
chair = FactoryGirl.create(:chair)
project = FactoryGirl.create(:project, chair_id: chair.id)
project.update(status:true)
project.update(public:true)
visit projects_path
expect(page).to have_content(project.title)
expect(page).to have_content(l(project.created_at))
expect(page).to have_content(chair.name)
expect(page).to have_content(I18n.t('.public', default:'public'))
expect(page).to have_content(I18n.t('.active', default:'active'))
project.update(status:false)
project.update(public:false)
visit projects_path
expect(page).to have_content(I18n.t('.inactive', default:'inactive'))
expect(page).to have_content(I18n.t('.private', default:'private'))
end
end |
require "spec_helper"
require "sidekiq/testing"
RSpec.describe PublishManualWorker do
after do
GdsApi::GovukHeaders.clear_headers
end
it "places job in the default queue" do
Sidekiq::Testing.fake! do
PublishManualWorker.perform_async("1")
expect(PublishManualWorker.jobs.size).to eq(1)
expect(PublishManualWorker.jobs.first.fetch("queue")).to eq("default")
end
end
it "repopulates worker request headers" do
task = double(:task, start!: nil, finish!: nil, manual_id: 1, version_number: 2)
expect(ManualPublishTask).to receive(:find).with("1").and_return(task)
expect(Manual::PublishService).to receive(:new).and_return(double(:publish, call: nil))
Sidekiq::Testing.inline! do
PublishManualWorker.perform_async("1", request_id: "12345", authenticated_user: "abc123")
expect(GdsApi::GovukHeaders.headers[:govuk_request_id]).to eq("12345")
expect(GdsApi::GovukHeaders.headers[:x_govuk_authenticated_user]).to eq("abc123")
end
end
context 'when encountering an HTTP server error connecting to the GDS API' do
let(:publish_service) { double(:publish_service) }
let(:task) { ManualPublishTask.create! }
let(:worker) { PublishManualWorker.new }
let(:http_error) { GdsApi::HTTPServerError.new(500) }
let(:logger) { double(:logger, error: nil) }
before do
allow(Manual::PublishService).to receive(:new).and_return(publish_service)
allow(publish_service).to receive(:call).and_raise(http_error)
allow(Rails).to receive(:logger).and_return(logger)
end
it 'raises a failed to publish error so that Sidekiq can retry the job' do
expect { worker.perform(task.id) }
.to raise_error(PublishManualWorker::FailedToPublishError)
end
it 'notifies Airbrake of the error' do
expect(Airbrake).to receive(:notify).with(http_error)
worker.perform(task.id) rescue PublishManualWorker::FailedToPublishError
end
it 'logs the error to the Rails log' do
expect(logger).to receive(:error).with(/#{http_error}/)
worker.perform(task.id) rescue PublishManualWorker::FailedToPublishError
end
end
context 'when encountering an HTTP error connecting to the GDS API' do
let(:publish_service) { double(:publish_service) }
let(:task) { ManualPublishTask.create! }
let(:worker) { PublishManualWorker.new }
let(:http_error) { GdsApi::HTTPErrorResponse.new(400) }
let(:logger) { double(:logger, error: nil) }
before do
allow(Manual::PublishService).to receive(:new).and_return(publish_service)
allow(publish_service).to receive(:call).and_raise(http_error)
allow(Rails).to receive(:logger).and_return(logger)
end
it 'stores the error message on the task' do
allow(http_error).to receive(:message).and_return('http-error-message')
worker.perform(task.id)
task.reload
expect(task.error).to eql('http-error-message')
end
it 'marks the task as aborted' do
worker.perform(task.id)
task.reload
expect(task).to be_aborted
end
it 'notifies Airbrake of the error' do
expect(Airbrake).to receive(:notify).with(http_error)
worker.perform(task.id) rescue PublishManualWorker::FailedToPublishError
end
it 'logs the error to the Rails log' do
expect(logger).to receive(:error).with(/#{http_error}/)
worker.perform(task.id) rescue PublishManualWorker::FailedToPublishError
end
end
context 'when encountering a version mismatch error' do
let(:publish_service) { double(:publish_service) }
let(:task) { ManualPublishTask.create! }
let(:worker) { PublishManualWorker.new }
let(:version_error) { Manual::PublishService::VersionMismatchError.new }
let(:logger) { double(:logger, error: nil) }
before do
allow(Manual::PublishService).to receive(:new).and_return(publish_service)
allow(publish_service).to receive(:call).and_raise(version_error)
allow(Rails).to receive(:logger).and_return(logger)
end
it 'stores the error message on the task' do
allow(version_error).to receive(:message).and_return('version-mismatch-message')
worker.perform(task.id)
task.reload
expect(task.error).to eql('version-mismatch-message')
end
it 'marks the task as aborted' do
worker.perform(task.id)
task.reload
expect(task).to be_aborted
end
it 'notifies Airbrake of the error' do
expect(Airbrake).to receive(:notify).with(version_error)
worker.perform(task.id) rescue PublishManualWorker::FailedToPublishError
end
it 'logs the error to the Rails log' do
expect(logger).to receive(:error).with(/#{version_error}/)
worker.perform(task.id) rescue PublishManualWorker::FailedToPublishError
end
end
end
Introduce outer context in preparation for removing duplication
This is best viewed with --ignore-all-space.
require "spec_helper"
require "sidekiq/testing"
RSpec.describe PublishManualWorker do
after do
GdsApi::GovukHeaders.clear_headers
end
it "places job in the default queue" do
Sidekiq::Testing.fake! do
PublishManualWorker.perform_async("1")
expect(PublishManualWorker.jobs.size).to eq(1)
expect(PublishManualWorker.jobs.first.fetch("queue")).to eq("default")
end
end
it "repopulates worker request headers" do
task = double(:task, start!: nil, finish!: nil, manual_id: 1, version_number: 2)
expect(ManualPublishTask).to receive(:find).with("1").and_return(task)
expect(Manual::PublishService).to receive(:new).and_return(double(:publish, call: nil))
Sidekiq::Testing.inline! do
PublishManualWorker.perform_async("1", request_id: "12345", authenticated_user: "abc123")
expect(GdsApi::GovukHeaders.headers[:govuk_request_id]).to eq("12345")
expect(GdsApi::GovukHeaders.headers[:x_govuk_authenticated_user]).to eq("abc123")
end
end
context 'when publishing and encountering' do
context 'an HTTP server error connecting to the GDS API' do
let(:publish_service) { double(:publish_service) }
let(:task) { ManualPublishTask.create! }
let(:worker) { PublishManualWorker.new }
let(:http_error) { GdsApi::HTTPServerError.new(500) }
let(:logger) { double(:logger, error: nil) }
before do
allow(Manual::PublishService).to receive(:new).and_return(publish_service)
allow(publish_service).to receive(:call).and_raise(http_error)
allow(Rails).to receive(:logger).and_return(logger)
end
it 'raises a failed to publish error so that Sidekiq can retry the job' do
expect { worker.perform(task.id) }
.to raise_error(PublishManualWorker::FailedToPublishError)
end
it 'notifies Airbrake of the error' do
expect(Airbrake).to receive(:notify).with(http_error)
worker.perform(task.id) rescue PublishManualWorker::FailedToPublishError
end
it 'logs the error to the Rails log' do
expect(logger).to receive(:error).with(/#{http_error}/)
worker.perform(task.id) rescue PublishManualWorker::FailedToPublishError
end
end
context 'an HTTP error connecting to the GDS API' do
let(:publish_service) { double(:publish_service) }
let(:task) { ManualPublishTask.create! }
let(:worker) { PublishManualWorker.new }
let(:http_error) { GdsApi::HTTPErrorResponse.new(400) }
let(:logger) { double(:logger, error: nil) }
before do
allow(Manual::PublishService).to receive(:new).and_return(publish_service)
allow(publish_service).to receive(:call).and_raise(http_error)
allow(Rails).to receive(:logger).and_return(logger)
end
it 'stores the error message on the task' do
allow(http_error).to receive(:message).and_return('http-error-message')
worker.perform(task.id)
task.reload
expect(task.error).to eql('http-error-message')
end
it 'marks the task as aborted' do
worker.perform(task.id)
task.reload
expect(task).to be_aborted
end
it 'notifies Airbrake of the error' do
expect(Airbrake).to receive(:notify).with(http_error)
worker.perform(task.id) rescue PublishManualWorker::FailedToPublishError
end
it 'logs the error to the Rails log' do
expect(logger).to receive(:error).with(/#{http_error}/)
worker.perform(task.id) rescue PublishManualWorker::FailedToPublishError
end
end
context 'a version mismatch error' do
let(:publish_service) { double(:publish_service) }
let(:task) { ManualPublishTask.create! }
let(:worker) { PublishManualWorker.new }
let(:version_error) { Manual::PublishService::VersionMismatchError.new }
let(:logger) { double(:logger, error: nil) }
before do
allow(Manual::PublishService).to receive(:new).and_return(publish_service)
allow(publish_service).to receive(:call).and_raise(version_error)
allow(Rails).to receive(:logger).and_return(logger)
end
it 'stores the error message on the task' do
allow(version_error).to receive(:message).and_return('version-mismatch-message')
worker.perform(task.id)
task.reload
expect(task.error).to eql('version-mismatch-message')
end
it 'marks the task as aborted' do
worker.perform(task.id)
task.reload
expect(task).to be_aborted
end
it 'notifies Airbrake of the error' do
expect(Airbrake).to receive(:notify).with(version_error)
worker.perform(task.id) rescue PublishManualWorker::FailedToPublishError
end
it 'logs the error to the Rails log' do
expect(logger).to receive(:error).with(/#{version_error}/)
worker.perform(task.id) rescue PublishManualWorker::FailedToPublishError
end
end
end
end
|
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'date'
if RUBY_VERSION < '2.0.0'
require 'sensu-plugins-logstash'
else
require_relative 'lib/sensu-plugins-logstash'
end
pvt_key = '~/.ssh/gem-private_key.pem'
Gem::Specification.new do |s|
s.authors = ['Sensu Plugins and contributors']
s.cert_chain = ['certs/sensu-plugins.pem']
s.date = Date.today.to_s
s.description = 'Sensu plugins for working with logstash'
s.email = '<sensu-users@googlegroups.com>'
s.executables = Dir.glob("bin/**/*").map { |file| File.basename(file) }
s.files = Dir.glob('{bin,lib}/**/*') + %w(LICENSE README.md CHANGELOG.md)
s.homepage = 'https://github.com/sensu-plugins/sensu-plugins-logstash'
s.license = 'MIT'
s.metadata = { 'maintainer' => '@mattyjones',
'development_status' => 'active',
'production_status' => 'unstable - testing recommended',
'release_draft' => 'false',
'release_prerelease' => 'false'
}
s.name = 'sensu-plugins-logstash'
s.platform = Gem::Platform::RUBY
s.post_install_message = 'You can use the embedded Ruby by setting EMBEDDED_RUBY=true in /etc/default/sensu'
s.require_paths = ['lib']
s.required_ruby_version = '>= 1.9.3'
s.signing_key = File.expand_path(pvt_key) if $PROGRAM_NAME =~ /gem\z/
s.summary = 'Sensu plugins for working with logstash'
s.test_files = s.files.grep(%r{^(test|spec|features)/})
s.version = SensuPluginsLogstash::Version::VER_STRING
s.add_runtime_dependency 'sensu-plugin', '1.1.0'
s.add_runtime_dependency 'redis', '3.2.1'
s.add_runtime_dependency 'json', '1.8.2'
s.add_development_dependency 'codeclimate-test-reporter', '~> 0.4'
s.add_development_dependency 'rubocop', '~> 0.30'
s.add_development_dependency 'rspec', '~> 3.1'
s.add_development_dependency 'bundler', '~> 1.7'
s.add_development_dependency 'rake', '~> 10.0'
s.add_development_dependency 'github-markup', '~> 1.3'
s.add_development_dependency 'redcarpet', '~> 3.2'
s.add_development_dependency 'yard', '~> 0.8'
s.add_development_dependency 'pry', '~> 0.10'
end
update repo
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'date'
if RUBY_VERSION < '2.0.0'
require 'sensu-plugins-logstash'
else
require_relative 'lib/sensu-plugins-logstash'
end
pvt_key = '~/.ssh/gem-private_key.pem'
Gem::Specification.new do |s|
s.authors = ['Sensu Plugins and contributors']
s.cert_chain = ['certs/sensu-plugins.pem']
s.date = Date.today.to_s
s.description = 'Sensu plugins for working with logstash'
s.email = '<sensu-users@googlegroups.com>'
s.executables = Dir.glob('bin/**/*').map { |file| File.basename(file) }
s.files = Dir.glob('{bin,lib}/**/*') + %w(LICENSE README.md CHANGELOG.md)
s.homepage = 'https://github.com/sensu-plugins/sensu-plugins-logstash'
s.license = 'MIT'
s.metadata = { 'maintainer' => '@mattyjones',
'development_status' => 'active',
'production_status' => 'unstable - testing recommended',
'release_draft' => 'false',
'release_prerelease' => 'false'
}
s.name = 'sensu-plugins-logstash'
s.platform = Gem::Platform::RUBY
s.post_install_message = 'You can use the embedded Ruby by setting EMBEDDED_RUBY=true in /etc/default/sensu'
s.require_paths = ['lib']
s.required_ruby_version = '>= 1.9.3'
s.signing_key = File.expand_path(pvt_key) if $PROGRAM_NAME =~ /gem\z/
s.summary = 'Sensu plugins for working with logstash'
s.test_files = s.files.grep(%r{^(test|spec|features)/})
s.version = SensuPluginsLogstash::Version::VER_STRING
s.add_runtime_dependency 'sensu-plugin', '1.1.0'
s.add_runtime_dependency 'redis', '3.2.1'
s.add_runtime_dependency 'json', '1.8.2'
s.add_development_dependency 'codeclimate-test-reporter', '~> 0.4'
s.add_development_dependency 'rubocop', '~> 0.30'
s.add_development_dependency 'rspec', '~> 3.1'
s.add_development_dependency 'bundler', '~> 1.7'
s.add_development_dependency 'rake', '~> 10.0'
s.add_development_dependency 'github-markup', '~> 1.3'
s.add_development_dependency 'redcarpet', '~> 3.2'
s.add_development_dependency 'yard', '~> 0.8'
s.add_development_dependency 'pry', '~> 0.10'
end
|
module Spaceship
VERSION = "0.31.7".freeze
DESCRIPTION = "Ruby library to access the Apple Dev Center and iTunes Connect".freeze
end
Version bump
module Spaceship
VERSION = "0.31.8".freeze
DESCRIPTION = "Ruby library to access the Apple Dev Center and iTunes Connect".freeze
end
|
# encoding: utf-8
require File.dirname(__FILE__) + '../../../spec_helper'
describe BMO::APNS::Notification::DeviceToken do
describe '#token' do
it 'returns the token' do
device_token = described_class.new('abc')
expect(device_token.token).to eq('abc')
end
end
describe '#==' do
it 'returns true for equal device token' do
device_token = described_class.new('abc')
device_token_bis = described_class.new('abc')
expect(device_token == device_token_bis).to be_true
end
it 'returns true for equal device token' do
device_token = described_class.new('abc')
device_token_bis = described_class.new('abc')
expect(device_token).to eq(device_token_bis)
end
it 'returns false for equal device token' do
device_token = described_class.new('abc')
device_token_bis = described_class.new('def')
expect(device_token).to_not eq(device_token_bis)
end
it 'returns true for equal device token' do
device_token = described_class.new('abc')
device_token_bis = described_class.new('def')
expect(device_token == device_token_bis).to be_false
end
end
describe '#validate!' do
it 'returns true if the token is 64 chars' do
device_token = described_class.new('a' * 64)
expect(device_token.validate!).to be_true
end
it 'returns false if the token is not 64 chars' do
device_token = described_class.new('a' * 63)
expect { device_token.validate! }.to raise_error
BMO::APNS::Notification::DeviceToken::MalformedDeviceToken
end
it 'returns false if the token contains a special char' do
device_token = described_class.new(('a' * 63) + '"')
expect { device_token.validate! }.to raise_error
BMO::APNS::Notification::DeviceToken::MalformedDeviceToken
end
end
describe '#to_package' do
it 'returns the packaged token' do
device_token = described_class.new('0' * 64)
expect(device_token.to_package).to eq("\x00" * 32)
end
end
end
describe BMO::APNS::Notification::Payload do
it 'coerce hash keys to symbols' do
payload = described_class.new('Finn' => 'The Human')
expect(payload.data).to eq(Finn: 'The Human')
end
it "doesn't coerce incompatible types" do
payload = described_class.new(1 => 'For Money')
expect(payload.data).to eq(1 => 'For Money')
end
it 'returns true for equality between coerced hash and symbolized hash ' do
payload = described_class.new('Jake' => 'The Dog')
expect(payload).to eq(described_class.new(Jake: 'The Dog'))
end
end
Fix specs
# encoding: utf-8
require File.dirname(__FILE__) + '../../../spec_helper'
describe BMO::APNS::Notification::DeviceToken do
describe '#token' do
it 'returns the token' do
device_token = described_class.new('abc')
expect(device_token.token).to eq('abc')
end
end
describe '#==' do
it 'returns true for equal device token' do
device_token = described_class.new('abc')
device_token_bis = described_class.new('abc')
expect(device_token == device_token_bis).to be_true
end
it 'returns true for equal device token' do
device_token = described_class.new('abc')
device_token_bis = described_class.new('abc')
expect(device_token).to eq(device_token_bis)
end
it 'returns false for equal device token' do
device_token = described_class.new('abc')
device_token_bis = described_class.new('def')
expect(device_token).to_not eq(device_token_bis)
end
it 'returns true for equal device token' do
device_token = described_class.new('abc')
device_token_bis = described_class.new('def')
expect(device_token == device_token_bis).to be_false
end
end
describe '#validate!' do
it 'returns true if the token is 64 chars' do
device_token = described_class.new('a' * 64)
expect(device_token.validate!).to be_true
end
it 'returns false if the token is not 64 chars' do
device_token = described_class.new('a' * 63)
expect { device_token.validate! }.to raise_error(
BMO::APNS::Notification::DeviceToken::MalformedDeviceToken)
end
it 'returns false if the token contains a special char' do
device_token = described_class.new(('a' * 63) + '"')
expect { device_token.validate! }.to raise_error(
BMO::APNS::Notification::DeviceToken::MalformedDeviceToken)
end
end
describe '#to_package' do
it 'returns the packaged token' do
device_token = described_class.new('0' * 64)
expect(device_token.to_package).to eq("\x00" * 32)
end
end
end
describe BMO::APNS::Notification::Payload do
it 'coerce hash keys to symbols' do
payload = described_class.new('Finn' => 'The Human')
expect(payload.data).to eq(Finn: 'The Human')
end
it "doesn't coerce incompatible types" do
payload = described_class.new(1 => 'For Money')
expect(payload.data).to eq(1 => 'For Money')
end
it 'returns true for equality between coerced hash and symbolized hash ' do
payload = described_class.new('Jake' => 'The Dog')
expect(payload).to eq(described_class.new(Jake: 'The Dog'))
end
end
|
require "rspec"
require "brainguy/subscription"
module Brainguy
RSpec.describe Subscription do
it "is equal to another subscription with the same owner and listener" do
listener = double("listener")
owner = double("owner")
sub1 = Subscription.new(owner, listener)
sub2 = Subscription.new(owner, listener)
expect(sub1).to eq(sub2)
expect(sub1.hash).to eq(sub2.hash)
expect(sub1).to eql(sub2)
set = Set.new([sub1])
expect(set.add?(sub2)).to be_nil
expect(set.size).to eq(1)
set.delete(sub2)
expect(set.size).to eq(0)
end
it "is not equal to another subscription with a different owner" do
listener = double("listener")
sub1 = Subscription.new(double, listener)
sub2 = Subscription.new(double, listener)
expect(sub1).not_to eq(sub2)
expect(sub1.hash).not_to eq(sub2.hash)
expect(sub1).not_to eql(sub2)
end
it "is not equal to another subscription with a different listener" do
owner = double("owner")
sub1 = Subscription.new(owner, double)
sub2 = Subscription.new(owner, double)
expect(sub1).not_to eq(sub2)
expect(sub1.hash).not_to eq(sub2.hash)
expect(sub1).not_to eql(sub2)
end
it "is frozen" do
s = Subscription.new(double, double)
expect(s).to be_frozen
end
end
end
Alignment.
require "rspec"
require "brainguy/subscription"
module Brainguy
RSpec.describe Subscription do
it "is equal to another subscription with the same owner and listener" do
listener = double("listener")
owner = double("owner")
sub1 = Subscription.new(owner, listener)
sub2 = Subscription.new(owner, listener)
expect(sub1).to eq(sub2)
expect(sub1.hash).to eq(sub2.hash)
expect(sub1).to eql(sub2)
set = Set.new([sub1])
expect(set.add?(sub2)).to be_nil
expect(set.size).to eq(1)
set.delete(sub2)
expect(set.size).to eq(0)
end
it "is not equal to another subscription with a different owner" do
listener = double("listener")
sub1 = Subscription.new(double, listener)
sub2 = Subscription.new(double, listener)
expect(sub1).not_to eq(sub2)
expect(sub1.hash).not_to eq(sub2.hash)
expect(sub1).not_to eql(sub2)
end
it "is not equal to another subscription with a different listener" do
owner = double("owner")
sub1 = Subscription.new(owner, double)
sub2 = Subscription.new(owner, double)
expect(sub1).not_to eq(sub2)
expect(sub1.hash).not_to eq(sub2.hash)
expect(sub1).not_to eql(sub2)
end
it "is frozen" do
s = Subscription.new(double, double)
expect(s).to be_frozen
end
end
end
|
require_relative "helpers/paypal_helper"
require_relative "helpers/drop_in_helper"
require_relative "helpers/skip_browser_helper"
HOSTNAME = `hostname`.chomp
PORT = 4567
describe "Drop-in" do
include SkipBrowser
include DropIn
include PayPal
describe "tokenizes" do
it "a card" do
visit "http://#{HOSTNAME}:#{PORT}"
click_option("card")
hosted_field_send_input("number", "4111111111111111")
hosted_field_send_input("expirationDate", "1019")
hosted_field_send_input("cvv", "123")
submit_pay
expect(find(".braintree-heading")).to have_content("Paying with")
# Drop-in Details
expect(page).to have_content("Ending in ••11")
# Nonce Details
expect(page).to have_content("CreditCard")
expect(page).to have_content("ending in 11")
expect(page).to have_content("Visa")
end
it "PayPal", :paypal do
visit "http://#{HOSTNAME}:#{PORT}"
click_option("paypal")
open_popup_and_complete_login
submit_pay
expect(find(".braintree-heading")).to have_content("Paying with PayPal")
expect(page).to have_content("PayPalAccount")
expect(page).to have_content(ENV["PAYPAL_USERNAME"])
end
it "PayPal Credit", :paypal do
visit "http://#{HOSTNAME}:#{PORT}"
click_option("paypalCredit")
open_popup_and_complete_login do
expect(page).to have_content("PayPal Credit");
end
submit_pay
expect(find(".braintree-heading")).to have_content("Paying with PayPal")
expect(page).to have_content("PayPalAccount")
expect(page).to have_content(ENV["PAYPAL_USERNAME"])
end
end
describe "updateConfiguration" do
it "updates PayPal configuration" do
visit "http://#{HOSTNAME}:#{PORT}?showUpdatePayPalMenu=true"
find("#paypal-config-checkout").click()
click_option("paypal")
open_popup_and_complete_login do
expect(page).to_not have_content("future payments");
end
find("#paypal-config-vault").click()
click_option("paypal")
complete_iframe_flow do
expect(page).to have_content("future payments");
end
end
it "updates PayPal Credit configuration" do
visit "http://#{HOSTNAME}:#{PORT}?showUpdatePayPalMenu=true"
find("#paypal-config-checkout").click()
click_option("paypalCredit")
open_popup_and_complete_login do
expect(page).to_not have_content("future payments");
end
find("#paypal-config-vault").click()
click_option("paypalCredit")
complete_iframe_flow do
expect(page).to have_content("future payments");
end
end
it "removes authorized PayPal account when configuration is updated" do
visit "http://#{HOSTNAME}:#{PORT}?showUpdatePayPalMenu=true"
find("#paypal-config-checkout").click()
click_option("paypal")
open_popup_and_complete_login do
expect(page).to_not have_content("future payments");
end
expect(page).to have_content(ENV["PAYPAL_USERNAME"])
find("#paypal-config-vault").click()
expect(page).to_not have_content(ENV["PAYPAL_USERNAME"])
end
end
describe "events" do
it "disable and enable submit button on credit card validity" do
visit "http://#{HOSTNAME}:#{PORT}"
click_option("card")
expect(page).to have_button('Pay', disabled: true)
# Put in valid state
hosted_field_send_input("number", "4111111111111111")
hosted_field_send_input("expirationDate", "1019")
hosted_field_send_input("cvv", "123")
expect(page).to have_button('Pay', disabled: false)
# Put in invalid state
hosted_field_send_input("expirationDate", :backspace)
hosted_field_send_input("expirationDate", "2")
expect(page).to have_button('Pay', disabled: true)
# Put in valid state again
hosted_field_send_input("expirationDate", :backspace)
hosted_field_send_input("expirationDate", "9")
expect(page).to have_button('Pay', disabled: false)
end
it "enable submit button on PayPal authorization", :paypal do
visit "http://#{HOSTNAME}:#{PORT}"
click_option("paypal")
expect(page).to have_button('Pay', disabled: true)
open_popup_and_complete_login
expect(page).to have_button('Pay', disabled: false)
find('.braintree-toggle').click
expect(page).to have_button('Pay', disabled: false)
click_option("paypal")
expect(page).to have_button('Pay', disabled: true)
find('.braintree-toggle').click
expect(page).to have_button('Pay', disabled: false)
end
end
describe "setup" do
it "requires a selector or container" do
visit "http://#{HOSTNAME}:#{PORT}?container=null&selector=null"
expect(find("#error")).to have_content("options.container is required.")
end
it "requires authorization" do
visit "http://#{HOSTNAME}:#{PORT}?authorization=null"
expect(find("#error")).to have_content("options.authorization is required.")
end
it "does not setup paypal when not configured" do
visit "http://#{HOSTNAME}:#{PORT}?paypal=null&paypalCredit=null"
expect(page).not_to have_selector(".braintree-option__paypal")
expect(page).to have_content("Card Number")
expect(page).to have_content("Expiration Date")
end
it "supports locale" do
visit "http://#{HOSTNAME}:#{PORT}?locale=es_ES"
expect(page).to have_content("Tarjeta")
end
end
describe "payment option priority" do
it "uses default priority of card, paypal, paypalCredit" do
visit "http://#{HOSTNAME}:#{PORT}"
find(".braintree-heading")
payment_options = all(:css, ".braintree-option__label")
expect(payment_options[0]).to have_content("Card")
expect(payment_options[1]).to have_content("PayPal")
expect(payment_options[2]).to have_content("PayPal Credit")
end
it "uses custom priority of paypal, card, paypalCredit" do
options = '["paypal","card","paypalCredit"]'
visit URI.encode("http://#{HOSTNAME}:#{PORT}?paymentOptionPriority=#{options}")
find(".braintree-heading")
payment_options = all(:css, ".braintree-option__label")
expect(payment_options[0]).to have_content("PayPal")
expect(payment_options[1]).to have_content("Card")
expect(payment_options[2]).to have_content("PayPal Credit")
end
it "shows an error when an unrecognized payment option is specified" do
options = '["dummy","card"]'
visit URI.encode("http://#{HOSTNAME}:#{PORT}?paymentOptionPriority=#{options}")
expect(find("#error")).to have_content("paymentOptionPriority: Invalid payment option specified.")
end
end
end
Increase retries around PayPal updateConfiguration specs
require_relative "helpers/paypal_helper"
require_relative "helpers/drop_in_helper"
require_relative "helpers/skip_browser_helper"
HOSTNAME = `hostname`.chomp
PORT = 4567
describe "Drop-in" do
include SkipBrowser
include DropIn
include PayPal
describe "tokenizes" do
it "a card" do
visit "http://#{HOSTNAME}:#{PORT}"
click_option("card")
hosted_field_send_input("number", "4111111111111111")
hosted_field_send_input("expirationDate", "1019")
hosted_field_send_input("cvv", "123")
submit_pay
expect(find(".braintree-heading")).to have_content("Paying with")
# Drop-in Details
expect(page).to have_content("Ending in ••11")
# Nonce Details
expect(page).to have_content("CreditCard")
expect(page).to have_content("ending in 11")
expect(page).to have_content("Visa")
end
it "PayPal", :paypal do
visit "http://#{HOSTNAME}:#{PORT}"
click_option("paypal")
open_popup_and_complete_login
submit_pay
expect(find(".braintree-heading")).to have_content("Paying with PayPal")
expect(page).to have_content("PayPalAccount")
expect(page).to have_content(ENV["PAYPAL_USERNAME"])
end
it "PayPal Credit", :paypal do
visit "http://#{HOSTNAME}:#{PORT}"
click_option("paypalCredit")
open_popup_and_complete_login do
expect(page).to have_content("PayPal Credit");
end
submit_pay
expect(find(".braintree-heading")).to have_content("Paying with PayPal")
expect(page).to have_content("PayPalAccount")
expect(page).to have_content(ENV["PAYPAL_USERNAME"])
end
end
describe "updateConfiguration" do
it "updates PayPal configuration", :paypal do
visit "http://#{HOSTNAME}:#{PORT}?showUpdatePayPalMenu=true"
find("#paypal-config-checkout").click()
click_option("paypal")
open_popup_and_complete_login do
expect(page).to_not have_content("future payments");
end
find("#paypal-config-vault").click()
click_option("paypal")
complete_iframe_flow do
expect(page).to have_content("future payments");
end
end
it "updates PayPal Credit configuration", :paypal do
visit "http://#{HOSTNAME}:#{PORT}?showUpdatePayPalMenu=true"
find("#paypal-config-checkout").click()
click_option("paypalCredit")
open_popup_and_complete_login do
expect(page).to_not have_content("future payments");
end
find("#paypal-config-vault").click()
click_option("paypalCredit")
complete_iframe_flow do
expect(page).to have_content("future payments");
end
end
it "removes authorized PayPal account when configuration is updated", :paypal do
visit "http://#{HOSTNAME}:#{PORT}?showUpdatePayPalMenu=true"
find("#paypal-config-checkout").click()
click_option("paypal")
open_popup_and_complete_login do
expect(page).to_not have_content("future payments");
end
expect(page).to have_content(ENV["PAYPAL_USERNAME"])
find("#paypal-config-vault").click()
expect(page).to_not have_content(ENV["PAYPAL_USERNAME"])
end
end
describe "events" do
it "disable and enable submit button on credit card validity" do
visit "http://#{HOSTNAME}:#{PORT}"
click_option("card")
expect(page).to have_button('Pay', disabled: true)
# Put in valid state
hosted_field_send_input("number", "4111111111111111")
hosted_field_send_input("expirationDate", "1019")
hosted_field_send_input("cvv", "123")
expect(page).to have_button('Pay', disabled: false)
# Put in invalid state
hosted_field_send_input("expirationDate", :backspace)
hosted_field_send_input("expirationDate", "2")
expect(page).to have_button('Pay', disabled: true)
# Put in valid state again
hosted_field_send_input("expirationDate", :backspace)
hosted_field_send_input("expirationDate", "9")
expect(page).to have_button('Pay', disabled: false)
end
it "enable submit button on PayPal authorization", :paypal do
visit "http://#{HOSTNAME}:#{PORT}"
click_option("paypal")
expect(page).to have_button('Pay', disabled: true)
open_popup_and_complete_login
expect(page).to have_button('Pay', disabled: false)
find('.braintree-toggle').click
expect(page).to have_button('Pay', disabled: false)
click_option("paypal")
expect(page).to have_button('Pay', disabled: true)
find('.braintree-toggle').click
expect(page).to have_button('Pay', disabled: false)
end
end
describe "setup" do
it "requires a selector or container" do
visit "http://#{HOSTNAME}:#{PORT}?container=null&selector=null"
expect(find("#error")).to have_content("options.container is required.")
end
it "requires authorization" do
visit "http://#{HOSTNAME}:#{PORT}?authorization=null"
expect(find("#error")).to have_content("options.authorization is required.")
end
it "does not setup paypal when not configured" do
visit "http://#{HOSTNAME}:#{PORT}?paypal=null&paypalCredit=null"
expect(page).not_to have_selector(".braintree-option__paypal")
expect(page).to have_content("Card Number")
expect(page).to have_content("Expiration Date")
end
it "supports locale" do
visit "http://#{HOSTNAME}:#{PORT}?locale=es_ES"
expect(page).to have_content("Tarjeta")
end
end
describe "payment option priority" do
it "uses default priority of card, paypal, paypalCredit" do
visit "http://#{HOSTNAME}:#{PORT}"
find(".braintree-heading")
payment_options = all(:css, ".braintree-option__label")
expect(payment_options[0]).to have_content("Card")
expect(payment_options[1]).to have_content("PayPal")
expect(payment_options[2]).to have_content("PayPal Credit")
end
it "uses custom priority of paypal, card, paypalCredit" do
options = '["paypal","card","paypalCredit"]'
visit URI.encode("http://#{HOSTNAME}:#{PORT}?paymentOptionPriority=#{options}")
find(".braintree-heading")
payment_options = all(:css, ".braintree-option__label")
expect(payment_options[0]).to have_content("PayPal")
expect(payment_options[1]).to have_content("Card")
expect(payment_options[2]).to have_content("PayPal Credit")
end
it "shows an error when an unrecognized payment option is specified" do
options = '["dummy","card"]'
visit URI.encode("http://#{HOSTNAME}:#{PORT}?paymentOptionPriority=#{options}")
expect(find("#error")).to have_content("paymentOptionPriority: Invalid payment option specified.")
end
end
end
|
debian-bullseye: Added spec
require 'spec_helper'
describe 'minimum2scp/debian-bullseye' do
describe 'apt' do
before(:all) do
set :backend, :docker
set :docker_image, ENV['DOCKER_IMAGE'] || "minimum2scp/#{File.basename(__dir__)}:latest"
end
after(:all) do
set :backend, :ssh
set :docker_image, nil
end
describe command('apt list --upgradable') do
let(:pre_command){ 'apt-get update -qq' }
its(:stdout) {
should_not match /\[upgradable from: /
}
its(:exit_status){ should eq 0 }
end
packages = JSON.parse(File.read("#{File.basename(__dir__)}/debian-packages.json"))
packages.each do |package|
name = package["Package"]
version = package["Version"]
describe package(name) do
it {
should be_installed.with_version(version)
}
end
end
end
end
|
# coding: utf-8
require 'spec_helper'
# This file is special, given what we're checking here is the output of our
# generated PowerShell script, we want to
# rubocop:disable Style/TrailingWhitespace
describe 'download_file', type: :define do
describe 'when downloading a file without a proxy' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp'
}
end
it do should contain_exec('download-test.exe').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\test.exe') { exit 1 } else { exit 0 }",
)
end
end
describe 'when downloading a file with a empty string proxy' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp',
proxy_address: ''
}
end
it do should contain_exec('download-test.exe').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\test.exe') { exit 1 } else { exit 0 }",
)
end
end
describe 'when downloading a file without a proxy we want to check that the erb gets evaluated correctly' do
let(:title) { 'Download DotNet 4.0' }
let(:params) { { url: 'http://myserver.com/test.exe', destination_directory: 'c:\temp' } }
ps1 = <<-PS1.gsub(%r{^ {6}}, '')
$webclient = New-Object System.Net.WebClient
$proxyAddress = ''
$proxyUser = ''
$proxyPassword = ''
if ($proxyAddress -ne '') {
if (!($proxyAddress.StartsWith('http://') -or $proxyAddress.StartsWith('https://'))) {
$proxyAddress = 'http://' + $proxyAddress
}
$proxy = new-object System.Net.WebProxy
$proxy.Address = $proxyAddress
if (($proxyPassword -ne '') -and ($proxyUser -ne '')) {
$password = ConvertTo-SecureString -string $proxyPassword
$proxy.Credentials = New-Object System.Management.Automation.PSCredential($proxyUser, $password)
$webclient.UseDefaultCredentials = $true
}
$webclient.proxy = $proxy
}
try {
$webclient.DownloadFile('http://myserver.com/test.exe', 'c:\\temp\\test.exe')
}
catch [Exception] {
write-host $_.Exception.GetType().FullName
write-host $_.Exception.Message
write-host $_.Exception.InnerException.Message
throw $_.Exception
}
PS1
it { should contain_file('download-test.exe.ps1').with_content(ps1) }
end
describe 'when downloading a file using a proxy server without credentials' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp',
proxy_address: 'test-proxy-01:8888'
}
end
it do should contain_exec('download-test.exe').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\test.exe') { exit 1 } else { exit 0 }",
)
end
end
describe 'when downloading a file using a proxy server without credentials we want to check that the erb gets evaluated correctly' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp',
proxy_address: 'test-proxy-01:8888'
}
end
ps1 = <<-PS1.gsub(%r{^ {6}}, '')
$webclient = New-Object System.Net.WebClient
$proxyAddress = 'test-proxy-01:8888'
$proxyUser = ''
$proxyPassword = ''
if ($proxyAddress -ne '') {
if (!($proxyAddress.StartsWith('http://') -or $proxyAddress.StartsWith('https://'))) {
$proxyAddress = 'http://' + $proxyAddress
}
$proxy = new-object System.Net.WebProxy
$proxy.Address = $proxyAddress
if (($proxyPassword -ne '') -and ($proxyUser -ne '')) {
$password = ConvertTo-SecureString -string $proxyPassword
$proxy.Credentials = New-Object System.Management.Automation.PSCredential($proxyUser, $password)
$webclient.UseDefaultCredentials = $true
}
$webclient.proxy = $proxy
}
try {
$webclient.DownloadFile('http://myserver.com/test.exe', 'c:\\temp\\test.exe')
}
catch [Exception] {
write-host $_.Exception.GetType().FullName
write-host $_.Exception.Message
write-host $_.Exception.InnerException.Message
throw $_.Exception
}
PS1
it { should contain_file('download-test.exe.ps1').with_content(ps1) }
end
describe 'when downloading a file using a proxy server with credentials' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp',
proxy_address: 'test-proxy-01:8888',
proxy_user: 'test-user',
proxy_password: 'test-secure'
}
end
it do should contain_exec('download-test.exe').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\test.exe') { exit 1 } else { exit 0 }"
)
end
end
describe 'when downloading a file using a proxy server with secure credentials we want to check that the erb gets evaluated correctly' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp',
proxy_address: 'test-proxy-01:8888',
proxy_user: 'test-user',
proxy_password: 'test-secure'
}
end
ps1 = <<-PS1.gsub(%r{^ {6}}, '')
$webclient = New-Object System.Net.WebClient
$proxyAddress = 'test-proxy-01:8888'
$proxyUser = 'test-user'
$proxyPassword = 'test-secure'
if ($proxyAddress -ne '') {
if (!($proxyAddress.StartsWith('http://') -or $proxyAddress.StartsWith('https://'))) {
$proxyAddress = 'http://' + $proxyAddress
}
$proxy = new-object System.Net.WebProxy
$proxy.Address = $proxyAddress
if (($proxyPassword -ne '') -and ($proxyUser -ne '')) {
$password = ConvertTo-SecureString -string $proxyPassword
$proxy.Credentials = New-Object System.Management.Automation.PSCredential($proxyUser, $password)
$webclient.UseDefaultCredentials = $true
}
$webclient.proxy = $proxy
}
try {
$webclient.DownloadFile('http://myserver.com/test.exe', 'c:\\temp\\test.exe')
}
catch [Exception] {
write-host $_.Exception.GetType().FullName
write-host $_.Exception.Message
write-host $_.Exception.InnerException.Message
throw $_.Exception
}
PS1
it { should contain_file('download-test.exe.ps1').with_content(ps1) }
end
describe 'when downloading a file using a proxy server with insecure credentials' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp',
proxy_address: 'test-proxy-01:8888',
proxy_user: 'test-user',
proxy_password: 'test',
is_password_secure: false
}
end
it do should contain_exec('download-test.exe').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\test.exe') { exit 1 } else { exit 0 }"
)
end
end
describe 'when downloading a file using a proxy server with insecure credentials we want to check that the erb gets evaluated correctly' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp',
proxy_address: 'test-proxy-01:8888',
proxy_user: 'test-user',
proxy_password: 'test',
is_password_secure: false
}
end
ps1 = <<-PS1.gsub(%r{^ {6}}, '')
$webclient = New-Object System.Net.WebClient
$proxyAddress = 'test-proxy-01:8888'
$proxyUser = 'test-user'
$proxyPassword = 'test'
if ($proxyAddress -ne '') {
if (!($proxyAddress.StartsWith('http://') -or $proxyAddress.StartsWith('https://'))) {
$proxyAddress = 'http://' + $proxyAddress
}
$proxy = new-object System.Net.WebProxy
$proxy.Address = $proxyAddress
if (($proxyPassword -ne '') -and ($proxyUser -ne '')) {
$password = ConvertTo-SecureString "$proxyPassword" -AsPlainText -Force
$proxy.Credentials = New-Object System.Management.Automation.PSCredential($proxyUser, $password)
$webclient.UseDefaultCredentials = $true
}
$webclient.proxy = $proxy
}
try {
$webclient.DownloadFile('http://myserver.com/test.exe', 'c:\\temp\\test.exe')
}
catch [Exception] {
write-host $_.Exception.GetType().FullName
write-host $_.Exception.Message
write-host $_.Exception.InnerException.Message
throw $_.Exception
}
PS1
it { should contain_file('download-test.exe.ps1').with_content(ps1) }
end
describe 'when not passing a destination url to the download define' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe'
}
end
it do
expect do
should contain_exec('download-test.exe')
end.to raise_error(Puppet::Error)
end
end
describe 'when not passing a URL to the file to download to the define' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
destination_directory: 'c:\temp'
}
end
it do
expect do
should contain_exec('download-test.exe')
end.to raise_error(Puppet::Error)
end
end
describe 'when downloading a non-exe file' do
let(:title) { 'Download MSI' }
let(:params) do {
url: 'http://myserver.com/test.msi',
destination_directory: 'c:\temp'
}
end
it do should contain_exec('download-test.msi').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\test.msi') { exit 1 } else { exit 0 }",
)
end
end
describe 'when downloading the nodejs installer' do
let(:title) { 'Download nodejs installer' }
let(:params) do {
url: 'http://artifactory.otsql.opentable.com:8081/artifactory/simple/puppet/windows/nodejs/0.10.15/nodejs-0.10.15-x64.msi',
destination_directory: 'c:\temp'
}
end
it do should contain_exec('download-nodejs-0.10.15-x64.msi').with(
'command' => 'c:\\temp\\download-nodejs-0.10.15-x64.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\nodejs-0.10.15-x64.msi') { exit 1 } else { exit 0 }",
)
end
end
describe 'when the destination is a folder' do
let(:title) { 'Download nodejs installer' }
let(:params) do
{
url: 'http://my.server/test.exe',
destination_directory: 'c:\temp'
}
end
it do should contain_exec('download-test.exe').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\test.exe') { exit 1 } else { exit 0 }",
)
end
end
describe 'when the filename is different to the filename in the url' do
let(:title) { 'Download nodejs installer' }
let :params do
{
url: 'http://my.server/test.exe',
destination_directory: 'c:\temp',
destination_file: 'foo.exe'
}
end
it do should contain_exec('download-foo.exe').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\foo.exe') { exit 1 } else { exit 0 }",
)
end
end
describe 'the timeout parameter' do
context 'when not specified' do
let(:title) { 'Download nodejs installer' }
let(:params) do {
url: 'http://my.server/test.exe',
destination_directory: 'c:\temp',
destination_file: 'foo.exe'
}
end
it { should contain_exec('download-foo.exe').with('timeout' => nil) }
end
context 'when given an integer value' do
let(:title) { 'Download nodejs installer' }
let(:params) do {
url: 'http://my.server/test.exe',
destination_directory: 'c:\temp',
destination_file: 'foo.exe',
timeout: '30000'
}
end
it { should contain_exec('download-foo.exe').with('timeout' => '30000') }
end
context 'when given a non-integer value' do
let(:title) { 'Download nodejs installer' }
let(:params) do {
url: 'http://my.server/test.exe',
destination_directory: 'c:\temp',
destination_file: 'foo.exe',
timeout: 'this-cannot-work'
}
end
it do
expect do
should contain_exec('download-foo.exe')
end.to raise_error(Puppet::Error, %r{Integer})
end
end
end
describe 'the proxyAddress parameter' do
let(:title) { 'Download nodejs installer' }
let(:params) do {
url: 'http://my.server/test.exe',
destination_directory: 'c:\temp',
destination_file: 'foo.exe',
proxyAddress: 'http://localhost:9090'
}
end
describe 'is not supported any more' do
it do
expect { should contain_exec('download-foo.exe') }.to raise_error
end
end
end
end
rubocop: fix Style/TrailingCommaInArguments
# coding: utf-8
require 'spec_helper'
# This file is special, given what we're checking here is the output of our
# generated PowerShell script, we want to
# rubocop:disable Style/TrailingWhitespace
describe 'download_file', type: :define do
describe 'when downloading a file without a proxy' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp'
}
end
it do should contain_exec('download-test.exe').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\test.exe') { exit 1 } else { exit 0 }"
)
end
end
describe 'when downloading a file with a empty string proxy' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp',
proxy_address: ''
}
end
it do should contain_exec('download-test.exe').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\test.exe') { exit 1 } else { exit 0 }"
)
end
end
describe 'when downloading a file without a proxy we want to check that the erb gets evaluated correctly' do
let(:title) { 'Download DotNet 4.0' }
let(:params) { { url: 'http://myserver.com/test.exe', destination_directory: 'c:\temp' } }
ps1 = <<-PS1.gsub(%r{^ {6}}, '')
$webclient = New-Object System.Net.WebClient
$proxyAddress = ''
$proxyUser = ''
$proxyPassword = ''
if ($proxyAddress -ne '') {
if (!($proxyAddress.StartsWith('http://') -or $proxyAddress.StartsWith('https://'))) {
$proxyAddress = 'http://' + $proxyAddress
}
$proxy = new-object System.Net.WebProxy
$proxy.Address = $proxyAddress
if (($proxyPassword -ne '') -and ($proxyUser -ne '')) {
$password = ConvertTo-SecureString -string $proxyPassword
$proxy.Credentials = New-Object System.Management.Automation.PSCredential($proxyUser, $password)
$webclient.UseDefaultCredentials = $true
}
$webclient.proxy = $proxy
}
try {
$webclient.DownloadFile('http://myserver.com/test.exe', 'c:\\temp\\test.exe')
}
catch [Exception] {
write-host $_.Exception.GetType().FullName
write-host $_.Exception.Message
write-host $_.Exception.InnerException.Message
throw $_.Exception
}
PS1
it { should contain_file('download-test.exe.ps1').with_content(ps1) }
end
describe 'when downloading a file using a proxy server without credentials' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp',
proxy_address: 'test-proxy-01:8888'
}
end
it do should contain_exec('download-test.exe').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\test.exe') { exit 1 } else { exit 0 }"
)
end
end
describe 'when downloading a file using a proxy server without credentials we want to check that the erb gets evaluated correctly' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp',
proxy_address: 'test-proxy-01:8888'
}
end
ps1 = <<-PS1.gsub(%r{^ {6}}, '')
$webclient = New-Object System.Net.WebClient
$proxyAddress = 'test-proxy-01:8888'
$proxyUser = ''
$proxyPassword = ''
if ($proxyAddress -ne '') {
if (!($proxyAddress.StartsWith('http://') -or $proxyAddress.StartsWith('https://'))) {
$proxyAddress = 'http://' + $proxyAddress
}
$proxy = new-object System.Net.WebProxy
$proxy.Address = $proxyAddress
if (($proxyPassword -ne '') -and ($proxyUser -ne '')) {
$password = ConvertTo-SecureString -string $proxyPassword
$proxy.Credentials = New-Object System.Management.Automation.PSCredential($proxyUser, $password)
$webclient.UseDefaultCredentials = $true
}
$webclient.proxy = $proxy
}
try {
$webclient.DownloadFile('http://myserver.com/test.exe', 'c:\\temp\\test.exe')
}
catch [Exception] {
write-host $_.Exception.GetType().FullName
write-host $_.Exception.Message
write-host $_.Exception.InnerException.Message
throw $_.Exception
}
PS1
it { should contain_file('download-test.exe.ps1').with_content(ps1) }
end
describe 'when downloading a file using a proxy server with credentials' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp',
proxy_address: 'test-proxy-01:8888',
proxy_user: 'test-user',
proxy_password: 'test-secure'
}
end
it do should contain_exec('download-test.exe').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\test.exe') { exit 1 } else { exit 0 }"
)
end
end
describe 'when downloading a file using a proxy server with secure credentials we want to check that the erb gets evaluated correctly' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp',
proxy_address: 'test-proxy-01:8888',
proxy_user: 'test-user',
proxy_password: 'test-secure'
}
end
ps1 = <<-PS1.gsub(%r{^ {6}}, '')
$webclient = New-Object System.Net.WebClient
$proxyAddress = 'test-proxy-01:8888'
$proxyUser = 'test-user'
$proxyPassword = 'test-secure'
if ($proxyAddress -ne '') {
if (!($proxyAddress.StartsWith('http://') -or $proxyAddress.StartsWith('https://'))) {
$proxyAddress = 'http://' + $proxyAddress
}
$proxy = new-object System.Net.WebProxy
$proxy.Address = $proxyAddress
if (($proxyPassword -ne '') -and ($proxyUser -ne '')) {
$password = ConvertTo-SecureString -string $proxyPassword
$proxy.Credentials = New-Object System.Management.Automation.PSCredential($proxyUser, $password)
$webclient.UseDefaultCredentials = $true
}
$webclient.proxy = $proxy
}
try {
$webclient.DownloadFile('http://myserver.com/test.exe', 'c:\\temp\\test.exe')
}
catch [Exception] {
write-host $_.Exception.GetType().FullName
write-host $_.Exception.Message
write-host $_.Exception.InnerException.Message
throw $_.Exception
}
PS1
it { should contain_file('download-test.exe.ps1').with_content(ps1) }
end
describe 'when downloading a file using a proxy server with insecure credentials' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp',
proxy_address: 'test-proxy-01:8888',
proxy_user: 'test-user',
proxy_password: 'test',
is_password_secure: false
}
end
it do should contain_exec('download-test.exe').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\test.exe') { exit 1 } else { exit 0 }"
)
end
end
describe 'when downloading a file using a proxy server with insecure credentials we want to check that the erb gets evaluated correctly' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe',
destination_directory: 'c:\temp',
proxy_address: 'test-proxy-01:8888',
proxy_user: 'test-user',
proxy_password: 'test',
is_password_secure: false
}
end
ps1 = <<-PS1.gsub(%r{^ {6}}, '')
$webclient = New-Object System.Net.WebClient
$proxyAddress = 'test-proxy-01:8888'
$proxyUser = 'test-user'
$proxyPassword = 'test'
if ($proxyAddress -ne '') {
if (!($proxyAddress.StartsWith('http://') -or $proxyAddress.StartsWith('https://'))) {
$proxyAddress = 'http://' + $proxyAddress
}
$proxy = new-object System.Net.WebProxy
$proxy.Address = $proxyAddress
if (($proxyPassword -ne '') -and ($proxyUser -ne '')) {
$password = ConvertTo-SecureString "$proxyPassword" -AsPlainText -Force
$proxy.Credentials = New-Object System.Management.Automation.PSCredential($proxyUser, $password)
$webclient.UseDefaultCredentials = $true
}
$webclient.proxy = $proxy
}
try {
$webclient.DownloadFile('http://myserver.com/test.exe', 'c:\\temp\\test.exe')
}
catch [Exception] {
write-host $_.Exception.GetType().FullName
write-host $_.Exception.Message
write-host $_.Exception.InnerException.Message
throw $_.Exception
}
PS1
it { should contain_file('download-test.exe.ps1').with_content(ps1) }
end
describe 'when not passing a destination url to the download define' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
url: 'http://myserver.com/test.exe'
}
end
it do
expect do
should contain_exec('download-test.exe')
end.to raise_error(Puppet::Error)
end
end
describe 'when not passing a URL to the file to download to the define' do
let(:title) { 'Download DotNet 4.0' }
let(:params) do {
destination_directory: 'c:\temp'
}
end
it do
expect do
should contain_exec('download-test.exe')
end.to raise_error(Puppet::Error)
end
end
describe 'when downloading a non-exe file' do
let(:title) { 'Download MSI' }
let(:params) do {
url: 'http://myserver.com/test.msi',
destination_directory: 'c:\temp'
}
end
it do should contain_exec('download-test.msi').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\test.msi') { exit 1 } else { exit 0 }"
)
end
end
describe 'when downloading the nodejs installer' do
let(:title) { 'Download nodejs installer' }
let(:params) do {
url: 'http://artifactory.otsql.opentable.com:8081/artifactory/simple/puppet/windows/nodejs/0.10.15/nodejs-0.10.15-x64.msi',
destination_directory: 'c:\temp'
}
end
it do should contain_exec('download-nodejs-0.10.15-x64.msi').with(
'command' => 'c:\\temp\\download-nodejs-0.10.15-x64.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\nodejs-0.10.15-x64.msi') { exit 1 } else { exit 0 }"
)
end
end
describe 'when the destination is a folder' do
let(:title) { 'Download nodejs installer' }
let(:params) do
{
url: 'http://my.server/test.exe',
destination_directory: 'c:\temp'
}
end
it do should contain_exec('download-test.exe').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\test.exe') { exit 1 } else { exit 0 }"
)
end
end
describe 'when the filename is different to the filename in the url' do
let(:title) { 'Download nodejs installer' }
let :params do
{
url: 'http://my.server/test.exe',
destination_directory: 'c:\temp',
destination_file: 'foo.exe'
}
end
it do should contain_exec('download-foo.exe').with(
'command' => 'c:\\temp\\download-test.ps1',
'onlyif' => "if(Test-Path -Path 'c:\\temp\\foo.exe') { exit 1 } else { exit 0 }"
)
end
end
describe 'the timeout parameter' do
context 'when not specified' do
let(:title) { 'Download nodejs installer' }
let(:params) do {
url: 'http://my.server/test.exe',
destination_directory: 'c:\temp',
destination_file: 'foo.exe'
}
end
it { should contain_exec('download-foo.exe').with('timeout' => nil) }
end
context 'when given an integer value' do
let(:title) { 'Download nodejs installer' }
let(:params) do {
url: 'http://my.server/test.exe',
destination_directory: 'c:\temp',
destination_file: 'foo.exe',
timeout: '30000'
}
end
it { should contain_exec('download-foo.exe').with('timeout' => '30000') }
end
context 'when given a non-integer value' do
let(:title) { 'Download nodejs installer' }
let(:params) do {
url: 'http://my.server/test.exe',
destination_directory: 'c:\temp',
destination_file: 'foo.exe',
timeout: 'this-cannot-work'
}
end
it do
expect do
should contain_exec('download-foo.exe')
end.to raise_error(Puppet::Error, %r{Integer})
end
end
end
describe 'the proxyAddress parameter' do
let(:title) { 'Download nodejs installer' }
let(:params) do {
url: 'http://my.server/test.exe',
destination_directory: 'c:\temp',
destination_file: 'foo.exe',
proxyAddress: 'http://localhost:9090'
}
end
describe 'is not supported any more' do
it do
expect { should contain_exec('download-foo.exe') }.to raise_error
end
end
end
end
|
describe Everything::Piece::Find do
it 'has a version number' do
expect(Everything::Piece::Find::VERSION).not_to be nil
end
let(:fake_class) do
class FakeClass
include Everything::Piece::Find
end
end
let(:finder) do
fake_class.new
end
shared_context 'with stubbed everything path' do
let(:expected_everything_path) do
'/everything/repo'
end
before do
allow(Everything).to receive(:path).and_return(expected_everything_path)
end
end
context '#find_by_name' do
include_context 'with stubbed everything path'
let(:given_piece_name) { 'super-duper' }
it 'returns a piece' do
expect(finder.find_by_name('super-duper')).to be_a(Everything::Piece)
end
end
context '#find_path_for_piece_name' do
include_context 'with stubbed everything path'
let(:given_piece_name) do
'seconds-away-from-you'
end
let(:expected_piece_path) do
'/everything/repo/seconds-away-from-you'
end
let(:actual_piece_path) do
finder.find_path_for_piece_name(given_piece_name)
end
it 'returns path for the piece' do
expect(actual_piece_path).to eq(expected_piece_path)
end
end
end
Add another spec for #find_by_name
describe Everything::Piece::Find do
it 'has a version number' do
expect(Everything::Piece::Find::VERSION).not_to be nil
end
let(:fake_class) do
class FakeClass
include Everything::Piece::Find
end
end
let(:finder) do
fake_class.new
end
shared_context 'with stubbed everything path' do
let(:expected_everything_path) do
'/everything/repo'
end
before do
allow(Everything).to receive(:path).and_return(expected_everything_path)
end
end
context '#find_by_name' do
include_context 'with stubbed everything path'
let(:given_piece_name) { 'super-duper' }
it 'returns a piece' do
expect(finder.find_by_name('super-duper')).to be_a(Everything::Piece)
end
it 'returns the piece with that name' do
actual_piece = finder.find_by_name('super-duper')
actual_piece_basename = File.basename(actual_piece.full_path)
expect(actual_piece_basename).to eq(given_piece_name)
end
end
context '#find_path_for_piece_name' do
include_context 'with stubbed everything path'
let(:given_piece_name) do
'seconds-away-from-you'
end
let(:expected_piece_path) do
'/everything/repo/seconds-away-from-you'
end
let(:actual_piece_path) do
finder.find_path_for_piece_name(given_piece_name)
end
it 'returns path for the piece' do
expect(actual_piece_path).to eq(expected_piece_path)
end
end
end
|
# Read about factories at https://github.com/thoughtbot/factory_girl
FactoryGirl.define do
factory :got_fixed_issue, :class => 'Issue' do
title "MyString"
closed false
end
end
Extend FactoryGirl model of Issue
# Read about factories at https://github.com/thoughtbot/factory_girl
FactoryGirl.define do
factory :got_fixed_issue, :class => 'Issue' do
title "A very bad issue..."
closed false
vendor_id "iWnj89"
number 7
end
end
|
require 'feature_spec_helper'
feature 'User goes to root URL' do
scenario 'they see the button for starting an application' do
visit '/'
expect(page).to have_button("Start your application")
end
end
feature 'User leaves required fields empty' do
pending
scenario 'the error message is shown to the user to fill in those fields before continuing' do
visit '/application/basic_info?'
fill_in('name', with: '')
fill_in('date_of_birth', with: '')
click_on('Next Step')
# This test is not complete and remains pending.
end
end
feature 'User goes through full application (up to review and submit)' do
# This test is pending DG's
pending
scenario 'it all works!' do
visit '/application/basic_info?'
fill_in 'name', with: 'Hot Snakes'
fill_in 'date_of_birth', with: "01/01/2000"
click_button 'Next Step'
# I don't like having tests dependent on copywriting, and I don't think they add value, except while writing.
expect(page).to have_content('Contact information')
fill_in 'home_phone_number', with: "5555555555"
fill_in 'email', with: "hotsnakes@gmail.com"
fill_in 'home_address', with: "2015 Market Street"
fill_in 'home_zip_code', with: "94122"
click_on('Next Step')
expect(page).to have_content('Personal information')
fill_in 'ssn', with: "000000000"
choose('no-answer')
click_on('Next Step')
expect(page).to have_content("Would you like to apply for Medi-Cal?")
choose('no')
click_on('Next Step')
expect(page).to have_content("When do you prefer your interview?")
check('monday')
check('friday')
check('mid-morning')
check('late-afternoon')
click_on('Next Step')
expect(page).to have_content("Do you buy and cook food with anyone?")
click_link('Yes')
expect(page).to have_content("Household member information")
fill_in 'their_name', with: 'Hot Snakes'
fill_in 'their_date_of_birth', with: "01/01/2000"
fill_in 'their_ssn', with: "000000000"
choose('male')
click_on('Next Step')
expect(page).to have_content("Do you buy and cook food with anyone?")
click_link('No')
expect(page).to have_content("Confirm and submit")
end
end
Integration test passing for up to but not including review and submit
require 'feature_spec_helper'
feature 'User goes to root URL' do
scenario 'they see the button for starting an application' do
visit '/'
expect(page).to have_button("Start your application")
end
end
feature 'User leaves required fields empty', :js => true do
scenario 'the error message is shown to the user to fill in those fields before continuing' do
visit '/application/basic_info?'
expect(page.current_path).to eq('/application/basic_info')
fill_in('name', with: '')
fill_in('date_of_birth', with: '')
click_on('Next Step')
expect(page.current_path).to eq('/application/basic_info')
expect(page).to have_content('Name is required to apply')
expect(page).to have_content('Please provide your date of birth in the format described')
end
end
feature 'User goes through full application (up to review and submit)' do
scenario 'with basic interactions' do
visit '/application/basic_info?'
fill_in 'name', with: 'Hot Snakes'
fill_in 'date_of_birth', with: "01/01/2000"
click_button 'Next Step'
expect(page.current_path).to eq('/application/contact_info')
fill_in 'home_phone_number', with: "5555555555"
fill_in 'email', with: "hotsnakes@gmail.com"
fill_in 'home_address', with: "2015 Market Street"
fill_in 'home_zip_code', with: "94122"
click_on('Next Step')
expect(page.current_path).to eq('/application/sex_and_ssn')
fill_in 'ssn', with: "000000000"
choose('no-answer')
click_on('Next Step')
expect(page.current_path).to eq('/application/medical')
choose('no')
click_on('Next Step')
expect(page.current_path).to eq('/application/interview')
check('monday')
check('friday')
check('mid-morning')
check('late-afternoon')
click_on('Next Step')
expect(page.current_path).to eq('/application/household_question')
click_link('Yes')
expect(page.current_path).to eq('/application/additional_household_member')
fill_in 'their_name', with: 'Hot Snakes'
fill_in 'their_date_of_birth', with: "01/01/2000"
fill_in 'their_ssn', with: "000000000"
choose('male')
click_on('Next Step')
expect(page).to have_content("Do you buy and cook food with anyone?")
expect(page.current_path).to eq('/application/household_question')
click_link('No')
expect(page.current_path).to eq('/application/review_and_submit')
end
end
|
require 'spec_helper'
describe "needs review", :order => :defined do
Capybara.javascript_driver = :webkit
before :all do
@owner = User.find_by(login: OWNER)
@user = User.find_by(login: USER)
@collection = Collection.second
@work = @collection.works.third
@page1 = @work.pages.first
@page2 = @work.pages.second
@page3 = @work.pages.third
@page4 = @work.pages.fourth
@page5 = @work.pages.fifth
@page6 = @work.pages.last
end
before :each do
login_as(@user, :scope => :user)
end
it "sets the work to translation" do
logout(@user)
login_as(@owner, :scope => :user)
visit "/work/edit?work_id=#{@work.id}"
expect(page).to have_content(@work.title)
page.check('work_supports_translation')
click_button('Save Changes')
expect(Work.find_by(id: @work.id).supports_translation).to be true
logout(@owner)
end
it "marks pages blank" do
visit collection_read_work_path(@work.collection.owner, @work.collection, @work)
expect(@page1.status).to be_nil
expect(@page2.status).to be_nil
expect(page).to have_content(@work.title)
page.find('.work-page_title', text: @page1.title).click_link(@page1.title)
page.check('page_mark_blank')
click_button('Save Changes')
expect(page).to have_content("This page is blank")
expect(Page.find_by(id: @page1.id).status).to eq ('blank')
expect(Page.find_by(id: @page1.id).translation_status).to eq ('blank')
page.find('.page-nav_next').click
expect(page).to have_content(@page2.title)
expect(page).to have_content("This page is not transcribed")
page.find('a', text: 'mark the page blank').click
expect(page).to have_content("This page is blank")
expect(page).to have_content("This page is blank")
expect(Page.find_by(id: @page2.id).status).to eq ('blank')
expect(Page.find_by(id: @page2.id).translation_status).to eq ('blank')
end
it "marks translated pages as blank" do
visit collection_read_work_path(@work.collection.owner, @work.collection, @work)
expect(page).to have_content(@work.title)
expect(@page3.translation_status).to be_nil
page.find('.work-page_title', text: @page3.title).click_link(@page3.title)
page.find('.tabs').click_link("Translate")
page.check('page_mark_blank')
click_button('Save Changes')
expect(page).to have_content("This page is blank")
expect(Page.find_by(id: @page3.id).translation_status).to eq ('blank')
end
it "marks pages as needing review" do
visit collection_path(@collection.owner, @collection)
expect(@page4.status).to be_nil
expect(@page5.status).to be_nil
expect(page).to have_content(@collection.title)
click_link @work.title
page.find('.work-page_title', text: @page4.title).click_link(@page4.title)
page.fill_in 'page_source_text', with: "Review Text"
page.check('page_needs_review')
click_button('Save Changes')
expect(page).to have_content("This page has been marked as \"needs review\"")
page.click_link("Overview")
expect(page).to have_content("Review Text")
expect(page).to have_content("Transcription")
expect(Page.find_by(id: @page4.id).status).to eq ('review')
page.find('.page-nav_next').click
expect(page).to have_content(@page5.title)
page.find('.tabs').click_link("Transcribe")
page.fill_in 'page_source_text', with: "Review Text 2"
page.check('page_needs_review')
click_button('Save Changes')
expect(page).to have_content("Review Text 2")
expect(page).to have_content("Transcription")
expect(Page.find_by(id: @page5.id).status).to eq ('review')
end
it "marks translated pages as needing review" do
visit "/display/display_page?page_id=#{@page6.id}"
expect(@page6.translation_status).to be_nil
page.find('.tabs').click_link("Translate")
page.fill_in 'page_source_translation', with: "Review Translate Text"
page.check('page_needs_review')
click_button('Save Changes')
page.click_link("Overview")
page.click_link('Show Translation')
expect(page).to have_content("Review Translate Text")
expect(page).to have_content("Translation")
expect(Page.find_by(id: @page6.id).translation_status).to eq ('review')
end
it "filters list of review pages" do
visit collection_read_work_path(@work.collection.owner, @work.collection, @work)
expect(page).to have_content(@work.title)
pages = @work.pages.limit(5)
pages.each do |p|
expect(page.find('.maincol')).to have_selector('a', text: p.title)
end
#look at review list
click_button('Pages That Need Review')
expect(page.find('.maincol')).to have_selector('a', text: @page4.title)
expect(page.find('.maincol')).to have_selector('a', text: @page5.title)
expect(page.find('.maincol')).not_to have_selector('a', text: @page1.title)
expect(page.find('.maincol')).not_to have_selector('a', text: @page2.title)
expect(page.find('.maincol')).not_to have_selector('a', text: @page3.title)
expect(page).to have_button('View All Pages')
expect(page.find('.pagination_info')).to have_content(@work.pages.review.count)
#return to original list
click_button('View All Pages')
pages = @work.pages.limit(5)
pages.each do |p|
expect(page.find('.maincol')).to have_selector('a', text: p.title)
end
expect(page).to have_button('Pages That Need Review')
expect(page.find('.pagination_info')).to have_content(@work.pages.count)
#look at translated review list
click_button('Translations That Need Review')
expect(page.find('.maincol')).to have_selector('a', text: @page6.title)
expect(page.find('.maincol')).not_to have_selector('a', text: @page3.title)
expect(page.find('.maincol')).not_to have_selector('a', text: @page4.title)
expect(page.find('.maincol')).not_to have_selector('a', text: @page5.title)
expect(page).to have_button('View All Pages')
expect(page.find('.pagination_info')).to have_content(@work.pages.translation_review.count)
end
it "checks collection overview stats view" do
visit collection_path(@collection.owner, @collection)
#show all works before checking for stats
page.click_link("Show Fully Transcribed Works")
@collection.works.each do |w|
if w.supports_translation
wording = "translated"
completed = w.work_statistic.pct_translation_completed.round
review = w.work_statistic.pct_translation_needs_review.round
indexed = w.work_statistic.pct_translation_annotated.round
else
if w.ocr_correction
wording = "corrected"
else
wording = "transcribed"
end
completed = w.work_statistic.pct_completed.round
review = w.work_statistic.pct_needs_review.round
indexed = w.work_statistic.pct_annotated.round
end
stats = page.find('.collection-work', text: w.title).find('.collection-work_stats')
expect(stats).to have_content("#{indexed}% indexed")
expect(stats).to have_content("#{completed}% #{wording}")
unless review == 0
expect(stats).to have_content("#{review}% needs review")
end
#check for the existence of the progress bar
stats.find('.progress')
end
end
it "checks statistics in works list" do
logout(@user)
login_as(@owner, :scope => :user)
visit collection_works_list_path(@collection.owner, @collection)
expect(page).to have_content(@collection.title)
@collection.works.each do |w|
if w.supports_translation
wording = "translated"
completed = w.work_statistic.pct_translation_completed.round
review = w.work_statistic.pct_translation_needs_review.round
else
if w.ocr_correction
wording = "corrected"
else
wording = "transcribed"
end
completed = w.work_statistic.pct_completed.round
review = w.work_statistic.pct_needs_review.round
end
stats = w.work_statistic
list = page.find('.collection-work-stats').find('li', text: w.title)
expect(list).to have_content(w.title)
expect(list).to have_content(w.pages.count)
expect(list.find('span', text: 'indexed')).to have_content(stats.pct_annotated.round)
expect(list).to have_content("#{completed}% #{wording}")
unless review == 0
expect(list.find('span', text: 'needs review')).to have_content(review)
end
end
end
it "marks pages as no longer needing review" do
@page4 = @work.pages.fourth
visit collection_path(@collection.owner, @collection)
expect(@page4.status).to eq ('review')
expect(page).to have_content(@collection.title)
page.find('.collection-work_title', text: @work.title).click_link
page.find('.work-page_title', text: @page4.title).click_link(@page4.title)
page.find('.tabs').click_link("Transcribe")
page.fill_in 'page_source_text', with: "Change Review Text"
page.uncheck('page_needs_review')
click_button('Save Changes')
expect(page).to have_content("Change Review Text")
expect(page).to have_content("Transcription")
expect(Page.find_by(id: @page4.id).status).to eq ('transcribed')
expect(Page.find_by(id: @page5.id).status).to eq ('review')
end
it "marks translated pages as no longer needing review" do
@page6 = @work.pages.last
visit "/display/display_page?page_id=#{@page6.id}"
expect(@page6.translation_status).to eq ('review')
page.find('.tabs').click_link("Translate")
page.fill_in 'page_source_translation', with: "Change Review Translate Text"
page.uncheck('page_needs_review')
click_button('Save Changes')
page.click_link("Overview")
page.click_link('Show Translation')
expect(page).to have_content("Change Review Translate Text")
expect(page).to have_content("Translation")
expect(Page.find_by(id: @page6.id).translation_status).to eq ('translated')
end
it "marks pages not blank" do
visit collection_read_work_path(@work.collection.owner, @work.collection, @work)
expect(page).to have_content("This page is blank")
@page1 = @work.pages.first
expect(@page1.status).to eq ('blank')
expect(page).to have_content(@work.title)
page.find('.work-page_title', text: @page1.title).click_link(@page1.title)
expect(page).to have_content("This page is blank")
page.find('.tabs').click_link("Transcribe")
page.uncheck('page_mark_blank')
click_button('Save Changes')
expect(page).not_to have_content("This page is blank")
expect(Page.find_by(id: @page1.id).status).to be_nil
expect(Page.find_by(id: @page1.id).translation_status).to be_nil
end
it "checks needs review/blank checkboxes", :js => true do
@page1 = @work.pages.first
expect(@page1.status).to be_nil
visit collection_transcribe_page_path(@work.collection.owner, @work.collection, @work, @page1.id)
expect(page.find('#page_needs_review')).not_to be_checked
expect(page.find('#page_mark_blank')).not_to be_checked
page.check('page_needs_review')
page.check('page_mark_blank')
expect(page.find('#page_needs_review')).not_to be_checked
expect(page.find('#page_mark_blank')).to be_checked
page.check('page_needs_review')
expect(page.find('#page_needs_review')).to be_checked
expect(page.find('#page_mark_blank')).not_to be_checked
end
it "sets a collection to needs review workflow" do
login_as(@owner, :scope => :user)
visit collection_path(@collection.owner, @collection)
page.find('.tabs').click_link("Settings")
page.check('collection_review_workflow')
click_button('Save Changes')
review_page = @work.pages.first
expect(review_page.status).to be_nil
expect(review_page.translation_status).to be_nil
visit collection_transcribe_page_path(@work.collection.owner, @work.collection, @work, review_page.id)
page.fill_in 'page_source_text', with: "Needs Review Workflow Text"
click_button('Save Changes')
expect(page).to have_content("Needs Review Workflow Text")
expect(Page.find_by(id: review_page.id).status).to eq ('review')
visit collection_translate_page_path(@work.collection.owner, @work.collection, @work, review_page.id)
page.fill_in 'page_source_translation', with: "Translation Needs Review Workflow Text"
click_button('Save Changes')
expect(page).to have_content("Translation Needs Review Workflow Text")
expect(Page.find_by(id: review_page.id).translation_status).to eq ('review')
end
end
spec test for needs review
require 'spec_helper'
describe "needs review", :order => :defined do
Capybara.javascript_driver = :webkit
before :all do
@owner = User.find_by(login: OWNER)
@user = User.find_by(login: USER)
@collection = Collection.second
@work = @collection.works.third
@page1 = @work.pages.first
@page2 = @work.pages.second
@page3 = @work.pages.third
@page4 = @work.pages.fourth
@page5 = @work.pages.fifth
@page6 = @work.pages.last
end
before :each do
login_as(@user, :scope => :user)
end
it "sets the work to translation" do
logout(@user)
login_as(@owner, :scope => :user)
visit "/work/edit?work_id=#{@work.id}"
expect(page).to have_content(@work.title)
page.check('work_supports_translation')
click_button('Save Changes')
expect(Work.find_by(id: @work.id).supports_translation).to be true
logout(@owner)
end
it "marks pages blank" do
visit collection_read_work_path(@work.collection.owner, @work.collection, @work)
expect(@page1.status).to be_nil
expect(@page2.status).to be_nil
expect(page).to have_content(@work.title)
page.find('.work-page_title', text: @page1.title).click_link(@page1.title)
page.check('page_mark_blank')
click_button('Save Changes')
expect(page).to have_content("This page is blank")
expect(Page.find_by(id: @page1.id).status).to eq ('blank')
expect(Page.find_by(id: @page1.id).translation_status).to eq ('blank')
page.find('.page-nav_next').click
expect(page).to have_content(@page2.title)
expect(page).to have_content("This page is not transcribed")
page.find('a', text: 'mark the page blank').click
expect(page).to have_content("This page is blank")
expect(page).to have_content("This page is blank")
expect(Page.find_by(id: @page2.id).status).to eq ('blank')
expect(Page.find_by(id: @page2.id).translation_status).to eq ('blank')
end
it "marks translated pages as blank" do
visit collection_read_work_path(@work.collection.owner, @work.collection, @work)
expect(page).to have_content(@work.title)
expect(@page3.translation_status).to be_nil
page.find('.work-page_title', text: @page3.title).click_link(@page3.title)
page.find('.tabs').click_link("Translate")
page.check('page_mark_blank')
click_button('Save Changes')
expect(page).to have_content("This page is blank")
expect(Page.find_by(id: @page3.id).translation_status).to eq ('blank')
end
it "marks pages as needing review" do
visit collection_path(@collection.owner, @collection)
expect(@page4.status).to be_nil
expect(@page5.status).to be_nil
expect(page).to have_content(@collection.title)
click_link @work.title
page.find('.work-page_title', text: @page4.title).click_link(@page4.title)
page.fill_in 'page_source_text', with: "Review Text"
page.check('page_needs_review')
click_button('Save Changes')
expect(page).to have_content("This page has been marked as \"needs review\"")
page.click_link("Overview")
expect(page).to have_content("Review Text")
expect(page).to have_content("Transcription")
expect(Page.find_by(id: @page4.id).status).to eq ('review')
page.find('.page-nav_next').click
expect(page).to have_content(@page5.title)
page.find('.tabs').click_link("Transcribe")
page.fill_in 'page_source_text', with: "Review Text 2"
page.check('page_needs_review')
click_button('Save Changes')
expect(page).to have_content("Review Text 2")
expect(page).to have_content("Transcription")
expect(Page.find_by(id: @page5.id).status).to eq ('review')
end
it "marks translated pages as needing review" do
visit "/display/display_page?page_id=#{@page6.id}"
expect(@page6.translation_status).to be_nil
page.find('.tabs').click_link("Translate")
page.fill_in 'page_source_translation', with: "Review Translate Text"
page.check('page_needs_review')
click_button('Save Changes')
expect(page).to have_content("This page has been marked as \"needs review\"")
page.click_link("Overview")
page.click_link('Show Translation')
expect(page).to have_content("Review Translate Text")
expect(page).to have_content("Translation")
expect(Page.find_by(id: @page6.id).translation_status).to eq ('review')
end
it "filters list of review pages" do
visit collection_read_work_path(@work.collection.owner, @work.collection, @work)
expect(page).to have_content(@work.title)
pages = @work.pages.limit(5)
pages.each do |p|
expect(page.find('.maincol')).to have_selector('a', text: p.title)
end
#look at review list
click_button('Pages That Need Review')
expect(page.find('.maincol')).to have_selector('a', text: @page4.title)
expect(page.find('.maincol')).to have_selector('a', text: @page5.title)
expect(page.find('.maincol')).not_to have_selector('a', text: @page1.title)
expect(page.find('.maincol')).not_to have_selector('a', text: @page2.title)
expect(page.find('.maincol')).not_to have_selector('a', text: @page3.title)
expect(page).to have_button('View All Pages')
expect(page.find('.pagination_info')).to have_content(@work.pages.review.count)
#return to original list
click_button('View All Pages')
pages = @work.pages.limit(5)
pages.each do |p|
expect(page.find('.maincol')).to have_selector('a', text: p.title)
end
expect(page).to have_button('Pages That Need Review')
expect(page.find('.pagination_info')).to have_content(@work.pages.count)
#look at translated review list
click_button('Translations That Need Review')
expect(page.find('.maincol')).to have_selector('a', text: @page6.title)
expect(page.find('.maincol')).not_to have_selector('a', text: @page3.title)
expect(page.find('.maincol')).not_to have_selector('a', text: @page4.title)
expect(page.find('.maincol')).not_to have_selector('a', text: @page5.title)
expect(page).to have_button('View All Pages')
expect(page.find('.pagination_info')).to have_content(@work.pages.translation_review.count)
end
it "checks collection overview stats view" do
visit collection_path(@collection.owner, @collection)
#show all works before checking for stats
page.click_link("Show Fully Transcribed Works")
@collection.works.each do |w|
if w.supports_translation
wording = "translated"
completed = w.work_statistic.pct_translation_completed.round
review = w.work_statistic.pct_translation_needs_review.round
indexed = w.work_statistic.pct_translation_annotated.round
else
if w.ocr_correction
wording = "corrected"
else
wording = "transcribed"
end
completed = w.work_statistic.pct_completed.round
review = w.work_statistic.pct_needs_review.round
indexed = w.work_statistic.pct_annotated.round
end
stats = page.find('.collection-work', text: w.title).find('.collection-work_stats')
expect(stats).to have_content("#{indexed}% indexed")
expect(stats).to have_content("#{completed}% #{wording}")
unless review == 0
expect(stats).to have_content("#{review}% needs review")
end
#check for the existence of the progress bar
stats.find('.progress')
end
end
it "checks statistics in works list" do
logout(@user)
login_as(@owner, :scope => :user)
visit collection_works_list_path(@collection.owner, @collection)
expect(page).to have_content(@collection.title)
@collection.works.each do |w|
if w.supports_translation
wording = "translated"
completed = w.work_statistic.pct_translation_completed.round
review = w.work_statistic.pct_translation_needs_review.round
else
if w.ocr_correction
wording = "corrected"
else
wording = "transcribed"
end
completed = w.work_statistic.pct_completed.round
review = w.work_statistic.pct_needs_review.round
end
stats = w.work_statistic
list = page.find('.collection-work-stats').find('li', text: w.title)
expect(list).to have_content(w.title)
expect(list).to have_content(w.pages.count)
expect(list.find('span', text: 'indexed')).to have_content(stats.pct_annotated.round)
expect(list).to have_content("#{completed}% #{wording}")
unless review == 0
expect(list.find('span', text: 'needs review')).to have_content(review)
end
end
end
it "marks pages as no longer needing review" do
@page4 = @work.pages.fourth
visit collection_path(@collection.owner, @collection)
expect(@page4.status).to eq ('review')
expect(page).to have_content(@collection.title)
page.find('.collection-work_title', text: @work.title).click_link
page.find('.work-page_title', text: @page4.title).click_link(@page4.title)
page.find('.tabs').click_link("Transcribe")
page.fill_in 'page_source_text', with: "Change Review Text"
page.uncheck('page_needs_review')
click_button('Save Changes')
expect(page).not_to have_content("This page has been marked as \"needs review\"")
expect(page).to have_content("Change Review Text")
expect(page).to have_content("Transcription")
expect(Page.find_by(id: @page4.id).status).to eq ('transcribed')
expect(Page.find_by(id: @page5.id).status).to eq ('review')
end
it "marks translated pages as no longer needing review" do
@page6 = @work.pages.last
visit "/display/display_page?page_id=#{@page6.id}"
expect(@page6.translation_status).to eq ('review')
page.find('.tabs').click_link("Translate")
page.fill_in 'page_source_translation', with: "Change Review Translate Text"
page.uncheck('page_needs_review')
click_button('Save Changes')
expect(page).not_to have_content("This page has been marked as \"needs review\"")
page.click_link("Overview")
page.click_link('Show Translation')
expect(page).to have_content("Change Review Translate Text")
expect(page).to have_content("Translation")
expect(Page.find_by(id: @page6.id).translation_status).to eq ('translated')
end
it "marks pages not blank" do
visit collection_read_work_path(@work.collection.owner, @work.collection, @work)
expect(page).to have_content("This page is blank")
@page1 = @work.pages.first
expect(@page1.status).to eq ('blank')
expect(page).to have_content(@work.title)
page.find('.work-page_title', text: @page1.title).click_link(@page1.title)
expect(page).to have_content("This page is blank")
page.find('.tabs').click_link("Transcribe")
page.uncheck('page_mark_blank')
click_button('Save Changes')
expect(page).not_to have_content("This page is blank")
expect(Page.find_by(id: @page1.id).status).to be_nil
expect(Page.find_by(id: @page1.id).translation_status).to be_nil
end
it "checks needs review/blank checkboxes", :js => true do
@page1 = @work.pages.first
expect(@page1.status).to be_nil
visit collection_transcribe_page_path(@work.collection.owner, @work.collection, @work, @page1.id)
expect(page.find('#page_needs_review')).not_to be_checked
expect(page.find('#page_mark_blank')).not_to be_checked
page.check('page_needs_review')
page.check('page_mark_blank')
expect(page.find('#page_needs_review')).not_to be_checked
expect(page.find('#page_mark_blank')).to be_checked
page.check('page_needs_review')
expect(page.find('#page_needs_review')).to be_checked
expect(page.find('#page_mark_blank')).not_to be_checked
end
it "sets a collection to needs review workflow" do
login_as(@owner, :scope => :user)
visit collection_path(@collection.owner, @collection)
page.find('.tabs').click_link("Settings")
page.check('collection_review_workflow')
click_button('Save Changes')
review_page = @work.pages.first
expect(review_page.status).to be_nil
expect(review_page.translation_status).to be_nil
visit collection_transcribe_page_path(@work.collection.owner, @work.collection, @work, review_page.id)
page.fill_in 'page_source_text', with: "Needs Review Workflow Text"
click_button('Save Changes')
expect(page).to have_content("Needs Review Workflow Text")
expect(Page.find_by(id: review_page.id).status).to eq ('review')
visit collection_translate_page_path(@work.collection.owner, @work.collection, @work, review_page.id)
page.fill_in 'page_source_translation', with: "Translation Needs Review Workflow Text"
click_button('Save Changes')
expect(page).to have_content("Translation Needs Review Workflow Text")
expect(Page.find_by(id: review_page.id).translation_status).to eq ('review')
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.