CombinedText stringlengths 4 3.42M |
|---|
module UI
class View < CSSNode
ANIMATION_OPTIONS = {
ease_out: UIViewAnimationOptionCurveEaseOut,
ease_in: UIViewAnimationOptionCurveEaseIn,
linear: UIViewAnimationOptionCurveLinear
}
attr_accessor :_previous_width, :_previous_height
def animate(options = {}, &block)
animation_options = options.fetch(:options, :linear)
UIView.animateWithDuration(options.fetch(:duration, 0),
delay: options.fetch(:delay, 0),
options: ANIMATION_OPTIONS.values_at(*animation_options).reduce(&:|),
animations: lambda {
self.root.update_layout
},
completion: lambda {|completion|
block.call if block
})
end
def border_color=(color)
proxy.layer.borderColor = UI::Color(color).proxy.CGColor
end
def border_radius=(radius)
proxy.layer.cornerRadius = radius
end
def border_width=(width)
proxy.layer.borderWidth = width
end
def border_color
proxy.layer.borderColor
end
def border_radius
proxy.layer.cornerRadius
end
def border_width
proxy.layer.borderWidth
end
def background_color
UI::Color(proxy.backgroundColor)
end
def background_color=(background_color)
proxy.backgroundColor = UI::Color(background_color).proxy
end
def hidden?
proxy.hidden
end
def hidden=(hidden)
if hidden
self._previous_width = self.width
self._previous_height = self.height
self.width = 0
self.height = 0
else
self.width = self._previous_width
self.height = self._previous_height
end
proxy.hidden = hidden
self.root.update_layout
end
def alpha
proxy.alpha
end
def alpha=(value)
proxy.alpha = value
end
def add_child(child)
super
proxy.addSubview(child.proxy)
end
def delete_child(child)
if super
child.proxy.removeFromSuperview
end
end
def update_layout
super
_apply_layout([0, 0], proxy.frame.origin)
end
def proxy
@proxy ||= begin
ui_view = UIView.alloc.init
ui_view.translatesAutoresizingMaskIntoConstraints = false
ui_view
end
end
def _apply_layout(absolute_point, origin_point)
left, top, width, height = layout
top_left = [absolute_point[0] + left, absolute_point[1] + top]
bottom_right = [absolute_point[0] + left + width, absolute_point[1] + top + height]
proxy.frame = [[left + origin_point[0], top + origin_point[1]], [bottom_right[0] - top_left[0], bottom_right[1] - top_left[1]]]
absolute_point[0] += left
absolute_point[1] += top
children.each { |x| x._apply_layout(absolute_point, [0, 0]) }
end
end
end
when hiding view, keep previous width and height from being recorded if they aren't numbers
module UI
class View < CSSNode
ANIMATION_OPTIONS = {
ease_out: UIViewAnimationOptionCurveEaseOut,
ease_in: UIViewAnimationOptionCurveEaseIn,
linear: UIViewAnimationOptionCurveLinear
}
attr_accessor :_previous_width, :_previous_height
def animate(options = {}, &block)
animation_options = options.fetch(:options, :linear)
UIView.animateWithDuration(options.fetch(:duration, 0),
delay: options.fetch(:delay, 0),
options: ANIMATION_OPTIONS.values_at(*animation_options).reduce(&:|),
animations: lambda {
self.root.update_layout
},
completion: lambda {|completion|
block.call if block
})
end
def border_color=(color)
proxy.layer.borderColor = UI::Color(color).proxy.CGColor
end
def border_radius=(radius)
proxy.layer.cornerRadius = radius
end
def border_width=(width)
proxy.layer.borderWidth = width
end
def border_color
proxy.layer.borderColor
end
def border_radius
proxy.layer.cornerRadius
end
def border_width
proxy.layer.borderWidth
end
def background_color
UI::Color(proxy.backgroundColor)
end
def background_color=(background_color)
proxy.backgroundColor = UI::Color(background_color).proxy
end
def hidden?
proxy.hidden
end
def hidden=(hidden)
if hidden
if !self.width.nan?
self._previous_width = self.width
self.width = 0
end
if !self.height.nan?
self._previous_height = self.height
self.height = 0
end
else
self.width = self._previous_width if self._previous_width
self.height = self._previous_height if self._previous_height
end
proxy.hidden = hidden
self.root.update_layout
end
def alpha
proxy.alpha
end
def alpha=(value)
proxy.alpha = value
end
def add_child(child)
super
proxy.addSubview(child.proxy)
end
def delete_child(child)
if super
child.proxy.removeFromSuperview
end
end
def update_layout
super
_apply_layout([0, 0], proxy.frame.origin)
end
def proxy
@proxy ||= begin
ui_view = UIView.alloc.init
ui_view.translatesAutoresizingMaskIntoConstraints = false
ui_view
end
end
def _apply_layout(absolute_point, origin_point)
left, top, width, height = layout
top_left = [absolute_point[0] + left, absolute_point[1] + top]
bottom_right = [absolute_point[0] + left + width, absolute_point[1] + top + height]
proxy.frame = [[left + origin_point[0], top + origin_point[1]], [bottom_right[0] - top_left[0], bottom_right[1] - top_left[1]]]
absolute_point[0] += left
absolute_point[1] += top
children.each { |x| x._apply_layout(absolute_point, [0, 0]) }
end
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "seattle_weather_gem"
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Josh Rubinstein"]
s.date = "2012-12-11"
s.description = "The Seattle Weather Gem uses data from the OpenWeatherMapAPI to retrieve the current temperature for Seattle and convert it from Kelvin to Fehrenheit."
s.email = "josh@estately.com"
s.executables = ["current_temp"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
".rspec",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"bin/current_temp",
"lib/seattle_weather_gem.rb",
"seattle_weather_gem.gemspec",
"spec/seattle_weather_gem_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = "http://github.com/hungrysquirrel/seattle_weather_gem"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.24"
s.summary = "The Seattle Weather Gem will tell you the current temperature."
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, ["~> 2.8.0"])
s.add_development_dependency(%q<bundler>, [">= 0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.4"])
else
s.add_dependency(%q<rspec>, ["~> 2.8.0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
end
else
s.add_dependency(%q<rspec>, ["~> 2.8.0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
end
end
Regenerate gemspec for version 0.2.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "seattle_weather_gem"
s.version = "0.2.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Josh Rubinstein"]
s.date = "2012-12-11"
s.description = "The Seattle Weather Gem uses data from the OpenWeatherMapAPI to retrieve the current temperature for Seattle and convert it from Kelvin to Fehrenheit."
s.email = "josh@estately.com"
s.executables = ["current_temp"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
".rspec",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"bin/current_temp",
"lib/seattle_weather_gem.rb",
"seattle_weather_gem.gemspec",
"spec/seattle_weather_gem_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = "http://github.com/hungrysquirrel/seattle_weather_gem"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.24"
s.summary = "The Seattle Weather Gem will tell you the current temperature."
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, ["~> 2.8.0"])
s.add_development_dependency(%q<bundler>, [">= 0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.4"])
else
s.add_dependency(%q<rspec>, ["~> 2.8.0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
end
else
s.add_dependency(%q<rspec>, ["~> 2.8.0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
end
end
|
require 'set'
module Rails
module Paths
# This object is an extended hash that behaves as root of the <tt>Rails::Paths</tt> system.
# It allows you to collect information about how you want to structure your application
# paths by a Hash like API. It requires you to give a physical path on initialization.
#
# root = Root.new "/rails"
# root.add "app/controllers", :eager_load => true
#
# The command above creates a new root object and add "app/controllers" as a path.
# This means we can get a +Rails::Paths::Path+ object back like below:
#
# path = root["app/controllers"]
# path.eager_load? # => true
# path.is_a?(Rails::Paths::Path) # => true
#
# The +Path+ object is simply an array and allows you to easily add extra paths:
#
# path.is_a?(Array) # => true
# path.inspect # => ["app/controllers"]
#
# path << "lib/controllers"
# path.inspect # => ["app/controllers", "lib/controllers"]
#
# Notice that when you add a path using +add+, the path object created already
# contains the path with the same path value given to +add+. In some situations,
# you may not want this behavior, so you can give :with as option.
#
# root.add "config/routes", :with => "config/routes.rb"
# root["config/routes"].inspect # => ["config/routes.rb"]
#
# The +add+ method accepts the following options as arguments:
# eager_load, autoload, autoload_once and glob.
#
# Finally, the +Path+ object also provides a few helpers:
#
# root = Root.new "/rails"
# root.add "app/controllers"
#
# root["app/controllers"].expanded # => ["/rails/app/controllers"]
# root["app/controllers"].existent # => ["/rails/app/controllers"]
#
# Check the <tt>Rails::Paths::Path</tt> documentation for more information.
class Root < ::Hash
attr_accessor :path
def initialize(path)
raise "Argument should be a String of the physical root path" if path.is_a?(Array)
@current = nil
@path = path
@root = self
super()
end
def []=(path, value)
value = Path.new(self, path, value) unless value.is_a?(Path)
super(path, value)
end
def add(path, options={})
with = options[:with] || path
self[path] = Path.new(self, path, with, options)
end
def all_paths
values.tap { |v| v.uniq! }
end
def autoload_once
filter_by(:autoload_once?)
end
def eager_load
filter_by(:eager_load?)
end
def autoload_paths
filter_by(:autoload?)
end
def load_paths
filter_by(:load_path?)
end
protected
def filter_by(constraint)
all = []
all_paths.each do |path|
if path.send(constraint)
paths = path.existent
paths -= path.children.map { |p| p.send(constraint) ? [] : p.existent }.flatten
all.concat(paths)
end
end
all.uniq!
all
end
end
class Path < Array
attr_reader :path
attr_accessor :glob
def initialize(root, current, *paths)
options = paths.last.is_a?(::Hash) ? paths.pop : {}
super(paths.flatten)
@current = current
@root = root
@glob = options[:glob]
options[:autoload_once] ? autoload_once! : skip_autoload_once!
options[:eager_load] ? eager_load! : skip_eager_load!
options[:autoload] ? autoload! : skip_autoload!
options[:load_path] ? load_path! : skip_load_path!
end
def children
keys = @root.keys.select { |k| k.include?(@current) }
keys.delete(@current)
@root.values_at(*keys.sort)
end
def first
expanded.first
end
def last
expanded.last
end
%w(autoload_once eager_load autoload load_path).each do |m|
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{m}! # def eager_load!
@#{m} = true # @eager_load = true
end # end
#
def skip_#{m}! # def skip_eager_load!
@#{m} = false # @eager_load = false
end # end
#
def #{m}? # def eager_load?
@#{m} # @eager_load
end # end
RUBY
end
# Expands all paths against the root and return all unique values.
def expanded
raise "You need to set a path root" unless @root.path
result = []
each do |p|
path = File.expand_path(p, @root.path)
if @glob
result.concat Dir[File.join(path, @glob)].sort
else
result << path
end
end
result.uniq!
result
end
# Returns all expanded paths but only if they exist in the filesystem.
def existent
expanded.select { |f| File.exists?(f) }
end
def existent_directories
expanded.select { |d| File.directory?(d) }
end
alias to_a expanded
end
end
end
chdir before globbing so that we don't need to escape directory names.
fixes #5521
require 'set'
module Rails
module Paths
# This object is an extended hash that behaves as root of the <tt>Rails::Paths</tt> system.
# It allows you to collect information about how you want to structure your application
# paths by a Hash like API. It requires you to give a physical path on initialization.
#
# root = Root.new "/rails"
# root.add "app/controllers", :eager_load => true
#
# The command above creates a new root object and add "app/controllers" as a path.
# This means we can get a +Rails::Paths::Path+ object back like below:
#
# path = root["app/controllers"]
# path.eager_load? # => true
# path.is_a?(Rails::Paths::Path) # => true
#
# The +Path+ object is simply an array and allows you to easily add extra paths:
#
# path.is_a?(Array) # => true
# path.inspect # => ["app/controllers"]
#
# path << "lib/controllers"
# path.inspect # => ["app/controllers", "lib/controllers"]
#
# Notice that when you add a path using +add+, the path object created already
# contains the path with the same path value given to +add+. In some situations,
# you may not want this behavior, so you can give :with as option.
#
# root.add "config/routes", :with => "config/routes.rb"
# root["config/routes"].inspect # => ["config/routes.rb"]
#
# The +add+ method accepts the following options as arguments:
# eager_load, autoload, autoload_once and glob.
#
# Finally, the +Path+ object also provides a few helpers:
#
# root = Root.new "/rails"
# root.add "app/controllers"
#
# root["app/controllers"].expanded # => ["/rails/app/controllers"]
# root["app/controllers"].existent # => ["/rails/app/controllers"]
#
# Check the <tt>Rails::Paths::Path</tt> documentation for more information.
class Root < ::Hash
attr_accessor :path
def initialize(path)
raise "Argument should be a String of the physical root path" if path.is_a?(Array)
@current = nil
@path = path
@root = self
super()
end
def []=(path, value)
value = Path.new(self, path, value) unless value.is_a?(Path)
super(path, value)
end
def add(path, options={})
with = options[:with] || path
self[path] = Path.new(self, path, with, options)
end
def all_paths
values.tap { |v| v.uniq! }
end
def autoload_once
filter_by(:autoload_once?)
end
def eager_load
filter_by(:eager_load?)
end
def autoload_paths
filter_by(:autoload?)
end
def load_paths
filter_by(:load_path?)
end
protected
def filter_by(constraint)
all = []
all_paths.each do |path|
if path.send(constraint)
paths = path.existent
paths -= path.children.map { |p| p.send(constraint) ? [] : p.existent }.flatten
all.concat(paths)
end
end
all.uniq!
all
end
end
class Path < Array
attr_reader :path
attr_accessor :glob
def initialize(root, current, *paths)
options = paths.last.is_a?(::Hash) ? paths.pop : {}
super(paths.flatten)
@current = current
@root = root
@glob = options[:glob]
options[:autoload_once] ? autoload_once! : skip_autoload_once!
options[:eager_load] ? eager_load! : skip_eager_load!
options[:autoload] ? autoload! : skip_autoload!
options[:load_path] ? load_path! : skip_load_path!
end
def children
keys = @root.keys.select { |k| k.include?(@current) }
keys.delete(@current)
@root.values_at(*keys.sort)
end
def first
expanded.first
end
def last
expanded.last
end
%w(autoload_once eager_load autoload load_path).each do |m|
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{m}! # def eager_load!
@#{m} = true # @eager_load = true
end # end
#
def skip_#{m}! # def skip_eager_load!
@#{m} = false # @eager_load = false
end # end
#
def #{m}? # def eager_load?
@#{m} # @eager_load
end # end
RUBY
end
# Expands all paths against the root and return all unique values.
def expanded
raise "You need to set a path root" unless @root.path
result = []
each do |p|
path = File.expand_path(p, @root.path)
if @glob
if File.directory? path
result.concat expand_dir(path, @glob)
else
# FIXME: I think we can remove this branch, but I'm not sure.
# Say the filesystem has this file:
#
# /tmp/foobar
#
# and someone adds this path:
#
# /tmp/foo
#
# with a glob of "*", then this function will return
#
# /tmp/foobar
#
# We need to figure out if that is desired behavior.
result.concat expand_file(path, @glob)
end
else
result << path
end
end
result.uniq!
result
end
# Returns all expanded paths but only if they exist in the filesystem.
def existent
expanded.select { |f| File.exists?(f) }
end
def existent_directories
expanded.select { |d| File.directory?(d) }
end
alias to_a expanded
private
def expand_file(path, glob)
Dir[File.join(path, glob)].sort
end
def expand_dir(path, glob)
Dir.chdir(path) do
Dir.glob(@glob).map { |file| File.join path, file }.sort
end
end
end
end
end
|
require_relative '../lib/carmen'
RSpec.describe '#carmen' do
it 'returns the position of the element '\carmen\' in the array' do
expect(carmen(['chris', 'kim', 'carmen'])).to eq 2
expect(carmen(['linn', 'carmen', 'sandy', 'bob'])).to eq 1
expect(carmen(['carmen', 'fae', 'mel', 'alice'])).to eq 0
end
end
Fix typo
require_relative '../lib/carmen'
RSpec.describe '#carmen' do
it 'returns the position of the element \'carmen\' in the array' do
expect(carmen(['chris', 'kim', 'carmen'])).to eq 2
expect(carmen(['linn', 'carmen', 'sandy', 'bob'])).to eq 1
expect(carmen(['carmen', 'fae', 'mel', 'alice'])).to eq 0
end
end
|
require 'spec_helper'
describe RubyJawbone::Client do
#
end
Add specs for initializing a client and processing a file.
require 'spec_helper'
describe RubyJawbone::Client do
let(:client) { RubyJawbone::Client.new }
let(:jawbone_file_reader) { double "Jawbone File Reader" }
let(:file) { double "File" }
describe "#initialize" do
it "starts off with empty collections for both activity and sleep" do
new_client = RubyJawbone::Client.new
expect(new_client.activity).to eq []
expect(new_client.sleep).to eq []
end
end
describe "#process_file" do
before do
expect(RubyJawbone::FileReaders::Jawbone).to receive(:new).and_return(jawbone_file_reader)
end
let(:mock_activity_1) { double "DataSet::Activity #1" }
let(:mock_activity_2) { double "DataSet::Activity #2" }
let(:mock_activity_3) { double "DataSet::Activity #3" }
let(:mock_sleep_1) { double "DataSet::Activity #1" }
let(:mock_sleep_2) { double "DataSet::Activity #2" }
it "gets the data parsed from the file, instantiates actvity and sleep objects from that data, and builds up collections of those activity and sleep objects" do
dates = [(Date.today - 2), (Date.today - 1), Date.today]
expect(jawbone_file_reader).to receive(:parse_file).and_return({
:activity => [
{:date => dates[0], :steps => 000, :distance => 111},
{:date => dates[1], :steps => 222, :distance => 333},
{:date => dates[2], :steps => 444, :distance => 555},
],
:sleep => [
{:date => dates[0], :total_time_asleep => 666, :total_time_awake => 777},
{:date => dates[1], :total_time_asleep => 888, :total_time_awake => 999}
]
})
expect(RubyJawbone::DataSet::Activity).to receive(:new).with(dates[0], 000, 111).and_return(mock_activity_1)
expect(RubyJawbone::DataSet::Activity).to receive(:new).with(dates[1], 222, 333).and_return(mock_activity_2)
expect(RubyJawbone::DataSet::Activity).to receive(:new).with(dates[2], 444, 555).and_return(mock_activity_3)
expect(RubyJawbone::DataSet::Sleep).to receive(:new).with(dates[0], 666, 777).and_return(mock_sleep_1)
expect(RubyJawbone::DataSet::Sleep).to receive(:new).with(dates[1], 888, 999).and_return(mock_sleep_2)
client.process_file(file)
expect(client.activity).to eq [mock_activity_1, mock_activity_2, mock_activity_3]
expect(client.sleep).to eq [mock_sleep_1, mock_sleep_2]
end
end
end
|
require 'moolah'
describe Moolah::Client do
describe ".initialize" do
it "complains when API key is not configured" do
expect { Moolah::Client.new }.to raise_error(ArgumentError)
end
context "with API key" do
before do
allow(Moolah).to receive(:api_key).and_return("1234567890")
end
it "should not complain if API key is given" do
expect(Moolah::api_key).to eq("1234567890")
expect { Moolah::Client.new }.not_to raise_error
end
it "can take api_secret and ipn as optional parameters" do
client = Moolah::Client.new({ ipn: "http://www.example.com", api_secret: "a_secret_key" })
expect(client.ipn).to eq("http://www.example.com")
expect(client.api_secret).to eq("a_secret_key")
end
it "allows passing of optional fields" do
client = Moolah::Client.new({ api_secret: "secret", ipn: "www.example.com/processed_payment" })
expect(client.api_secret).to eq("secret")
expect(client.ipn).to eq("www.example.com/processed_payment")
end
end
end
describe ".create_transaction" do
let(:action_path) { "/private/merchant/create" }
let(:transaction_params) { { coin: "dogecoin", amount: "20", currency: "USD", product: "Coingecko Pro" } }
let(:request_stubs) { Faraday::Adapter::Test::Stubs.new }
let (:test_connection) do
Faraday.new do |builder|
builder.adapter :test, request_stubs
end
end
# Provide API Key first
before do
allow(Moolah).to receive(:api_key).and_return("1234567890")
end
shared_examples :success_transaction do
it { expect(transaction.response).to be_an_instance_of(Moolah::TransactionResponse) }
it { expect(transaction.response.status).to eq("success") }
it { expect(transaction.response.amount).to eq("121526.39285714") }
it { expect(transaction.response.coin).to eq("dogecoin") }
it { expect(transaction.response.guid).to eq("a4dc89fcc-8ad-3f4c1bf529-6396c1acc4-") }
it { expect(transaction.response.address).to eq("DS6frMZR5jFVEf9V6pBi9qtcVJa2JX5ewR") }
it { expect(transaction.response.timestamp).to eq(1407579569) }
it { expect(transaction.response.url).to eq("https://pay.moolah.io/a4dc89fcc-8ad-3f4c1bf529-6396c1acc4-") }
end
shared_examples :failure_transaction do
it { expect(transaction.response).to be_an_instance_of(Moolah::TransactionResponse) }
it { expect(transaction.response.status).to eq("failure") }
it { expect(transaction.response.amount).to eq(nil) }
it { expect(transaction.response.coin).to eq(nil) }
it { expect(transaction.response.guid).to eq(nil) }
it { expect(transaction.response.address).to eq(nil) }
it { expect(transaction.response.timestamp).to eq(nil) }
it { expect(transaction.response.url).to eq(nil) }
end
context "successful transaction" do
context "without optional parameters (ipn, api_secret, ipn_extra)" do
let(:client) { Moolah::Client.new }
let(:post_path) { "#{action_path}?amount=20&apiKey=1234567890&coin=dogecoin¤cy=USD&product=Coingecko+Pro" }
let(:json_response) { '{"status":"success","guid":"a4dc89fcc-8ad-3f4c1bf529-6396c1acc4-","url":"https:\/\/pay.moolah.io\/a4dc89fcc-8ad-3f4c1bf529-6396c1acc4-","coin":"dogecoin","amount":"121526.39285714","address":"DS6frMZR5jFVEf9V6pBi9qtcVJa2JX5ewR","timestamp":1407579569}' }
before do
allow(client).to receive(:connection).and_return(test_connection)
request_stubs.post(post_path) { |env| [ 200, {}, json_response ] }
end
context "allows transaction params to be given as argument" do
let(:transaction) { client.create_transaction transaction_params }
it_behaves_like :success_transaction
end
context "allows transaction params to be given in the block" do
let(:transaction) do
client.create_transaction do |t|
t.coin = "dogecoin"
t.currency = "USD"
t.amount = "20"
t.product = "Coingecko Pro"
end
end
it_behaves_like :success_transaction
end
end
context "with optional parameters" do
before do
allow(client).to receive(:connection).and_return(test_connection)
request_stubs.post(post_path) { |env| [ 200, {}, json_response ] }
end
let(:client) { Moolah::Client.new({ api_secret: "secret", ipn: "www.example.com/processed_payment" }) }
let(:post_path) { "#{action_path}?amount=20&apiKey=1234567890&apiSecret=secret&coin=dogecoin¤cy=USD&ipn=www.example.com%2Fprocessed_payment&product=Coingecko+Pro" }
let(:json_response) { '{"status":"success","guid":"a4dc89fcc-8ad-3f4c1bf529-6396c1acc4-","url":"https:\/\/pay.moolah.io\/a4dc89fcc-8ad-3f4c1bf529-6396c1acc4-","coin":"dogecoin","amount":"121526.39285714","address":"DS6frMZR5jFVEf9V6pBi9qtcVJa2JX5ewR","timestamp":1407579569}' }
let(:transaction) { client.create_transaction transaction_params }
it_behaves_like :success_transaction
end
end
context "failure transaction" do
let(:client) { Moolah::Client.new }
let(:post_path) { "#{action_path}?amount=20&apiKey=1234567890&coin=dogecoin¤cy=USD&product=Coingecko+Pro" }
let(:json_response) { '{"status":"failure"}' }
let(:transaction) { client.create_transaction transaction_params }
before do
allow(client).to receive(:connection).and_return(test_connection)
request_stubs.post(post_path) { |env| [ 200, {}, json_response ] }
end
it_behaves_like :failure_transaction
end
end
end
Spec top level raising ArgumentError
require 'moolah'
describe Moolah::Client do
describe ".initialize" do
it "complains when API key is not configured" do
expect { Moolah::Client.new }.to raise_error(ArgumentError)
end
context "with API key" do
before do
allow(Moolah).to receive(:api_key).and_return("1234567890")
end
it "should not complain if API key is given" do
expect(Moolah::api_key).to eq("1234567890")
expect { Moolah::Client.new }.not_to raise_error
end
it "can take api_secret and ipn as optional parameters" do
client = Moolah::Client.new({ ipn: "http://www.example.com", api_secret: "a_secret_key" })
expect(client.ipn).to eq("http://www.example.com")
expect(client.api_secret).to eq("a_secret_key")
end
it "allows passing of optional fields" do
client = Moolah::Client.new({ api_secret: "secret", ipn: "www.example.com/processed_payment" })
expect(client.api_secret).to eq("secret")
expect(client.ipn).to eq("www.example.com/processed_payment")
end
end
end
describe ".create_transaction" do
let(:action_path) { "/private/merchant/create" }
let(:transaction_params) { { coin: "dogecoin", amount: "20", currency: "USD", product: "Coingecko Pro" } }
let(:request_stubs) { Faraday::Adapter::Test::Stubs.new }
let (:test_connection) do
Faraday.new do |builder|
builder.adapter :test, request_stubs
end
end
# Provide API Key first
before do
allow(Moolah).to receive(:api_key).and_return("1234567890")
end
shared_examples :success_transaction do
it { expect(transaction.response).to be_an_instance_of(Moolah::TransactionResponse) }
it { expect(transaction.response.status).to eq("success") }
it { expect(transaction.response.amount).to eq("121526.39285714") }
it { expect(transaction.response.coin).to eq("dogecoin") }
it { expect(transaction.response.guid).to eq("a4dc89fcc-8ad-3f4c1bf529-6396c1acc4-") }
it { expect(transaction.response.address).to eq("DS6frMZR5jFVEf9V6pBi9qtcVJa2JX5ewR") }
it { expect(transaction.response.timestamp).to eq(1407579569) }
it { expect(transaction.response.url).to eq("https://pay.moolah.io/a4dc89fcc-8ad-3f4c1bf529-6396c1acc4-") }
end
shared_examples :failure_transaction do
it { expect(transaction.response).to be_an_instance_of(Moolah::TransactionResponse) }
it { expect(transaction.response.status).to eq("failure") }
it { expect(transaction.response.amount).to eq(nil) }
it { expect(transaction.response.coin).to eq(nil) }
it { expect(transaction.response.guid).to eq(nil) }
it { expect(transaction.response.address).to eq(nil) }
it { expect(transaction.response.timestamp).to eq(nil) }
it { expect(transaction.response.url).to eq(nil) }
end
context "incomplete transaction parameters" do
let(:client) { Moolah::Client.new }
let(:incomplete_transaction_params) { { coin: "dogecoin", amount: "20", currency: "USD" } }
it "throws ArgumentError" do
expect { client.create_transaction(incomplete_transaction_params) }.to raise_error(ArgumentError)
end
end
context "successful transaction" do
context "without optional parameters (ipn, api_secret, ipn_extra)" do
let(:client) { Moolah::Client.new }
let(:post_path) { "#{action_path}?amount=20&apiKey=1234567890&coin=dogecoin¤cy=USD&product=Coingecko+Pro" }
let(:json_response) { '{"status":"success","guid":"a4dc89fcc-8ad-3f4c1bf529-6396c1acc4-","url":"https:\/\/pay.moolah.io\/a4dc89fcc-8ad-3f4c1bf529-6396c1acc4-","coin":"dogecoin","amount":"121526.39285714","address":"DS6frMZR5jFVEf9V6pBi9qtcVJa2JX5ewR","timestamp":1407579569}' }
before do
allow(client).to receive(:connection).and_return(test_connection)
request_stubs.post(post_path) { |env| [ 200, {}, json_response ] }
end
context "allows transaction params to be given as argument" do
let(:transaction) { client.create_transaction transaction_params }
it_behaves_like :success_transaction
end
context "allows transaction params to be given in the block" do
let(:transaction) do
client.create_transaction do |t|
t.coin = "dogecoin"
t.currency = "USD"
t.amount = "20"
t.product = "Coingecko Pro"
end
end
it_behaves_like :success_transaction
end
end
context "with optional parameters" do
before do
allow(client).to receive(:connection).and_return(test_connection)
request_stubs.post(post_path) { |env| [ 200, {}, json_response ] }
end
let(:client) { Moolah::Client.new({ api_secret: "secret", ipn: "www.example.com/processed_payment" }) }
let(:post_path) { "#{action_path}?amount=20&apiKey=1234567890&apiSecret=secret&coin=dogecoin¤cy=USD&ipn=www.example.com%2Fprocessed_payment&product=Coingecko+Pro" }
let(:json_response) { '{"status":"success","guid":"a4dc89fcc-8ad-3f4c1bf529-6396c1acc4-","url":"https:\/\/pay.moolah.io\/a4dc89fcc-8ad-3f4c1bf529-6396c1acc4-","coin":"dogecoin","amount":"121526.39285714","address":"DS6frMZR5jFVEf9V6pBi9qtcVJa2JX5ewR","timestamp":1407579569}' }
let(:transaction) { client.create_transaction transaction_params }
it_behaves_like :success_transaction
end
end
context "failure transaction" do
let(:client) { Moolah::Client.new }
let(:post_path) { "#{action_path}?amount=20&apiKey=1234567890&coin=dogecoin¤cy=USD&product=Coingecko+Pro" }
let(:json_response) { '{"status":"failure"}' }
let(:transaction) { client.create_transaction transaction_params }
before do
allow(client).to receive(:connection).and_return(test_connection)
request_stubs.post(post_path) { |env| [ 200, {}, json_response ] }
end
it_behaves_like :failure_transaction
end
end
end
|
require File.expand_path(File.join(File.dirname(__FILE__), "spec_helper"))
describe Jasmine::Config do
describe "configuration" do
before(:each) do
@template_dir = File.expand_path(File.join(File.dirname(__FILE__), "../generators/jasmine/templates"))
@config = Jasmine::Config.new
end
describe "defaults" do
it "src_dir uses root when src dir is blank" do
@config.stub!(:project_root).and_return('some_project_root')
@config.stub!(:simple_config_file).and_return(File.join(@template_dir, 'spec/javascripts/support/jasmine.yml'))
YAML.stub!(:load).and_return({'src_dir' => nil})
@config.src_dir.should == 'some_project_root'
end
it "should use correct default yaml config" do
@config.stub!(:project_root).and_return('some_project_root')
@config.simple_config_file.should == (File.join('some_project_root', 'spec/javascripts/support/jasmine.yml'))
end
it "should provide dir mappings" do
@config.mappings.should == {
'/__root__' => @config.project_root,
'/__spec__' => @config.spec_dir
}
end
end
describe "simple_config" do
before(:each) do
@config.stub!(:src_dir).and_return(File.join(@template_dir, "public"))
@config.stub!(:spec_dir).and_return(File.join(@template_dir, "spec"))
end
it "if sources.yaml not found" do
File.stub!(:exist?).and_return(false)
@config.src_files.should == []
@config.stylesheets.should == []
@config.spec_files.should == ['javascripts/ExampleSpec.js', 'javascripts/SpecHelper.js']
@config.mappings.should == {
'/__root__' => @config.project_root,
'/__spec__' => @config.spec_dir
}
end
it "if jasmine.yml is empty" do
@config.stub!(:simple_config_file).and_return(File.join(@template_dir, 'spec/javascripts/support/jasmine.yml'))
YAML.stub!(:load).and_return(false)
@config.src_files.should == []
@config.stylesheets.should == []
@config.spec_files.should == ['javascripts/ExampleSpec.js', 'javascripts/SpecHelper.js']
@config.mappings.should == {
'/__root__' => @config.project_root,
'/__spec__' => @config.spec_dir
}
end
it "using default jasmine.yml" do
@config.stub!(:simple_config_file).and_return(File.join(@template_dir, 'spec/javascripts/support/jasmine.yml'))
@config.src_files.should == []
@config.spec_files.should == ['javascripts/ExampleSpec.js', 'javascripts/SpecHelper.js']
@config.mappings.should == {
'/__root__' => @config.project_root,
'/__spec__' => @config.spec_dir
}
end
it "simple_config stylesheets" do
@config.stub!(:simple_config_file).and_return(File.join(@template_dir, 'spec/javascripts/support/jasmine.yml'))
YAML.stub!(:load).and_return({'stylesheets' => ['foo.css', 'bar.css']})
Dir.stub!(:glob).and_return do |glob_string|
glob_string
end
@config.stylesheets.should == ['foo.css', 'bar.css']
end
it "using rails jasmine.yml" do
original_glob = Dir.method(:glob)
Dir.stub!(:glob).and_return do |glob_string|
if glob_string =~ /public/
glob_string
else
original_glob.call(glob_string)
end
end
@config.stub!(:simple_config_file).and_return(File.join(@template_dir, 'spec/javascripts/support/jasmine-rails.yml'))
@config.spec_files.should == ['javascripts/ExampleSpec.js', 'javascripts/SpecHelper.js']
@config.src_files.should == ['javascripts/prototype.js',
'javascripts/effects.js',
'javascripts/controls.js',
'javascripts/dragdrop.js',
'javascripts/application.js']
@config.js_files.should == [
'/javascripts/prototype.js',
'/javascripts/effects.js',
'/javascripts/controls.js',
'/javascripts/dragdrop.js',
'/javascripts/application.js',
'/__spec__/javascripts/ExampleSpec.js',
'/__spec__/javascripts/SpecHelper.js',
]
end
it "should provide a list of all spec files with full paths" do
@config.spec_files_full_paths.should == [
File.join(@template_dir, 'spec/javascripts/ExampleSpec.js'),
File.join(@template_dir, 'spec/javascripts/SpecHelper.js')
]
end
end
end
describe "browsers" do
it "should use firefox by default" do
ENV.should_receive(:[], "JASMINE_BROWSER").and_return(nil)
config = Jasmine::Config.new
config.stub!(:start_servers)
Jasmine::SeleniumDriver.should_receive(:new).
with(anything(), anything(), "*firefox", anything()).
and_return(mock(Jasmine::SeleniumDriver, :connect => true))
config.start
end
it "should use ENV['JASMINE_BROWSER'] if set" do
ENV.should_receive(:[], "JASMINE_BROWSER").and_return("mosaic")
config = Jasmine::Config.new
config.stub!(:start_servers)
Jasmine::SeleniumDriver.should_receive(:new).
with(anything(), anything(), "*mosaic", anything()).
and_return(mock(Jasmine::SeleniumDriver, :connect => true))
config.start
end
end
end
Minimal coverage for SELENIUM_SERVER_PORT
require File.expand_path(File.join(File.dirname(__FILE__), "spec_helper"))
describe Jasmine::Config do
describe "configuration" do
before(:each) do
@template_dir = File.expand_path(File.join(File.dirname(__FILE__), "../generators/jasmine/templates"))
@config = Jasmine::Config.new
end
describe "defaults" do
it "src_dir uses root when src dir is blank" do
@config.stub!(:project_root).and_return('some_project_root')
@config.stub!(:simple_config_file).and_return(File.join(@template_dir, 'spec/javascripts/support/jasmine.yml'))
YAML.stub!(:load).and_return({'src_dir' => nil})
@config.src_dir.should == 'some_project_root'
end
it "should use correct default yaml config" do
@config.stub!(:project_root).and_return('some_project_root')
@config.simple_config_file.should == (File.join('some_project_root', 'spec/javascripts/support/jasmine.yml'))
end
it "should provide dir mappings" do
@config.mappings.should == {
'/__root__' => @config.project_root,
'/__spec__' => @config.spec_dir
}
end
end
describe "simple_config" do
before(:each) do
@config.stub!(:src_dir).and_return(File.join(@template_dir, "public"))
@config.stub!(:spec_dir).and_return(File.join(@template_dir, "spec"))
end
it "if sources.yaml not found" do
File.stub!(:exist?).and_return(false)
@config.src_files.should == []
@config.stylesheets.should == []
@config.spec_files.should == ['javascripts/ExampleSpec.js', 'javascripts/SpecHelper.js']
@config.mappings.should == {
'/__root__' => @config.project_root,
'/__spec__' => @config.spec_dir
}
end
it "if jasmine.yml is empty" do
@config.stub!(:simple_config_file).and_return(File.join(@template_dir, 'spec/javascripts/support/jasmine.yml'))
YAML.stub!(:load).and_return(false)
@config.src_files.should == []
@config.stylesheets.should == []
@config.spec_files.should == ['javascripts/ExampleSpec.js', 'javascripts/SpecHelper.js']
@config.mappings.should == {
'/__root__' => @config.project_root,
'/__spec__' => @config.spec_dir
}
end
it "using default jasmine.yml" do
@config.stub!(:simple_config_file).and_return(File.join(@template_dir, 'spec/javascripts/support/jasmine.yml'))
@config.src_files.should == []
@config.spec_files.should == ['javascripts/ExampleSpec.js', 'javascripts/SpecHelper.js']
@config.mappings.should == {
'/__root__' => @config.project_root,
'/__spec__' => @config.spec_dir
}
end
it "simple_config stylesheets" do
@config.stub!(:simple_config_file).and_return(File.join(@template_dir, 'spec/javascripts/support/jasmine.yml'))
YAML.stub!(:load).and_return({'stylesheets' => ['foo.css', 'bar.css']})
Dir.stub!(:glob).and_return do |glob_string|
glob_string
end
@config.stylesheets.should == ['foo.css', 'bar.css']
end
it "using rails jasmine.yml" do
original_glob = Dir.method(:glob)
Dir.stub!(:glob).and_return do |glob_string|
if glob_string =~ /public/
glob_string
else
original_glob.call(glob_string)
end
end
@config.stub!(:simple_config_file).and_return(File.join(@template_dir, 'spec/javascripts/support/jasmine-rails.yml'))
@config.spec_files.should == ['javascripts/ExampleSpec.js', 'javascripts/SpecHelper.js']
@config.src_files.should == ['javascripts/prototype.js',
'javascripts/effects.js',
'javascripts/controls.js',
'javascripts/dragdrop.js',
'javascripts/application.js']
@config.js_files.should == [
'/javascripts/prototype.js',
'/javascripts/effects.js',
'/javascripts/controls.js',
'/javascripts/dragdrop.js',
'/javascripts/application.js',
'/__spec__/javascripts/ExampleSpec.js',
'/__spec__/javascripts/SpecHelper.js',
]
end
it "should provide a list of all spec files with full paths" do
@config.spec_files_full_paths.should == [
File.join(@template_dir, 'spec/javascripts/ExampleSpec.js'),
File.join(@template_dir, 'spec/javascripts/SpecHelper.js')
]
end
end
end
describe "browsers" do
it "should use firefox by default" do
ENV.should_receive(:[], "JASMINE_BROWSER").and_return(nil)
config = Jasmine::Config.new
config.stub!(:start_servers)
Jasmine::SeleniumDriver.should_receive(:new).
with(anything(), anything(), "*firefox", anything()).
and_return(mock(Jasmine::SeleniumDriver, :connect => true))
config.start
end
it "should use ENV['JASMINE_BROWSER'] if set" do
ENV.should_receive(:[], "JASMINE_BROWSER").and_return("mosaic")
config = Jasmine::Config.new
config.stub!(:start_servers)
Jasmine::SeleniumDriver.should_receive(:new).
with(anything(), anything(), "*mosaic", anything()).
and_return(mock(Jasmine::SeleniumDriver, :connect => true))
config.start
end
end
describe "#start_selenium_server" do
it "should use an existing selenium server if SELENIUM_SERVER_PORT is set" do
config = Jasmine::Config.new
ENV.stub!(:[], "SELENIUM_SERVER_PORT").and_return(1234)
Jasmine.should_receive(:wait_for_listener).with(1234, "selenium server")
config.start_selenium_server
end
end
end
|
require 'spec_helper'
require 'mysql2'
SIMPLE_SCHEMA = {
'attr' => [
{'name' => 'name', 'type' => 'str', 'len' => 100, 'mand' => true, 'ind' => true},
{'name' => 'yr', 'type' => 'int', 'mand' => false, 'ind' => true},
]
}
describe Engine do
context 'starting from empty database' do
before(:all) do
sqlclear
@e = Engine.new(CREDENTIALS)
end
it 'should connect to an empty MySQL database' do
expect(@e).not_to be_nil
end
it 'should be able to create simple entity by given scheme' do
book = @e.entity_create(Entity.new('book', SIMPLE_SCHEMA))
expect(book).to be_kind_of(Entity)
expect(@e.entity('book')).to eq(book)
r = sqldump.root.elements
expect(r.to_a('//table_structure[@name="book"]/field').map { |x| x.to_s }).to eq([
"<field Comment='' Extra='auto_increment' Field='_id' Key='PRI' Null='NO' Type='int(11)'/>",
"<field Comment='' Extra='' Field='_data' Key='' Null='YES' Type='mediumtext'/>",
"<field Comment='' Extra='' Field='name' Key='' Null='YES' Type='varchar(100)'/>",
"<field Comment='' Extra='' Field='yr' Key='' Null='YES' Type='int(11)'/>"
])
end
it 'should be able to see newly created entity' do
expect(@e.entity('book')).not_to be_nil
expect(JSON.load(@e.entity('book').to_json)).to eq(SIMPLE_SCHEMA)
expect(@e.entity!('book')).not_to be_nil
end
it 'fails when requested non-existent entity' do
expect { @e.entity!('does_not_exist') }.to raise_error(NotFound)
end
end
context 'starting from existing database' do
it 'should be able to initialize' do
@e = Engine.new(CREDENTIALS)
end
it 'should be able to see newly created entity' do
@e = Engine.new(CREDENTIALS)
ent = @e.entity('book')
expect(ent).not_to be_nil
expect(JSON.load(ent.to_json)).to eq(SIMPLE_SCHEMA)
end
it 'should raise an error trying to delete non-existing entity' do
@e = Engine.new(CREDENTIALS)
expect { @e.entity_delete('foo') }.to raise_error(NotFound)
end
it 'should be able to delete entity' do
@e = Engine.new(CREDENTIALS)
@e.entity_delete('book')
cnt = 0
@e.each_entity { cnt += 1 }
expect(cnt).to eq(0)
end
end
context 'creation of entity with reserved field names' do
RESERVED_SCHEMA = {
'attr' => [
{'name' => 'name', 'type' => 'str', 'len' => 100, 'mand' => true, 'ind' => true},
{'name' => 'group', 'type' => 'int', 'mand' => false, 'ind' => true},
]
}
it 'can create entity with SQL reserved name' do
@e.entity_create(Entity.new('reserved', RESERVED_SCHEMA))
r = sqldump.root.elements
expect(r.to_a('//table_structure[@name="reserved"]/field').map { |x| x.to_s }).to eq([
"<field Comment='' Extra='auto_increment' Field='_id' Key='PRI' Null='NO' Type='int(11)'/>",
"<field Comment='' Extra='' Field='_data' Key='' Null='YES' Type='mediumtext'/>",
"<field Comment='' Extra='' Field='name' Key='' Null='YES' Type='varchar(100)'/>",
"<field Comment='' Extra='' Field='group' Key='' Null='YES' Type='int(11)'/>"
])
end
end
context 'creation of two multi-related entities' do
SERIES_SCHEMA = {
'attr' => [
{'name' => 'name', 'type' => 'str', 'len' => 100, 'mand' => true, 'ind' => true},
],
'rel' => [
{'name' => 'series_book', 'target' => 'book', 'type' => '0n'},
],
}
before(:all) do
sqlclear
@e = Engine.new(CREDENTIALS)
end
it 'should be able to create series with multiple relation to book' do
book = @e.entity_create(Entity.new('book', SIMPLE_SCHEMA))
series = @e.entity_create(Entity.new('series', SERIES_SCHEMA))
expect(series).to be_kind_of(Entity)
expect(@e.entity('series')).to eq(series)
r = sqldump.root.elements
expect(r.to_a('//table_structure[@name="series"]/field').map { |x| x.to_s }).to eq([
"<field Comment='' Extra='auto_increment' Field='_id' Key='PRI' Null='NO' Type='int(11)'/>",
"<field Comment='' Extra='' Field='_data' Key='' Null='YES' Type='mediumtext'/>",
"<field Comment='' Extra='' Field='name' Key='' Null='YES' Type='varchar(100)'/>",
])
expect(r.to_a('//table_structure[@name="series_book"]/field').map { |x| x.to_s }).to eq([
"<field Comment='' Extra='' Field='series' Key='PRI' Null='NO' Type='int(11)'/>",
"<field Comment='' Extra='' Field='book' Key='PRI' Null='NO' Type='int(11)'/>",
])
end
it 'should maintain link table when inserting data into entity with relation' do
book = @e.entity('book')
id1 = book.insert({'name' => 'Foo'})
id2 = book.insert({'name' => 'Bar'})
series = @e.entity('series')
id_series = series.insert({'name' => 'Series', 'series_book' => [id1, id2]})
expect(series.get(id_series)).to eq({
'_header' => 'Series',
'name' => 'Series',
'series_book' => [
{'_id' => id1, '_header' => 'Foo'},
{'_id' => id2, '_header' => 'Bar'},
],
})
r = sqldump.root.elements
expect(r.to_a('//table_data[@name="series_book"]/row').map { |x| x.to_s }).to eq([
"<row><field name='series'>1</field><field name='book'>1</field></row>",
"<row><field name='series'>1</field><field name='book'>2</field></row>",
])
end
it 'can delete both entities' do
@e.entity_delete('book')
@e.entity_delete('series')
end
it 'leaves a clean database' do
r = sqldump.root.elements
expect(r.to_a('//table_data[@name="book"]')).to be_empty
expect(r.to_a('//table_data[@name="series"]')).to be_empty
expect(r.to_a('//table_data[@name="series_book"]')).to be_empty
end
end
context 'creation of entity linked to itself' do
RECURSIVE_SCHEMA = {
'attr' => [
{'name' => 'name', 'type' => 'str', 'len' => 100, 'mand' => true, 'ind' => true},
],
'rel' => [
{'name' => 'linked', 'target' => 'node', 'type' => '0n'},
],
}
before(:all) do
sqlclear
@e = Engine.new(CREDENTIALS)
end
it 'can create entity with self-reference relationship' do
ent = @e.entity_create(Entity.new('node', RECURSIVE_SCHEMA))
ent2 = @e.entity('node')
expect(ent2).not_to be_nil
end
it 'can insert root node' do
node = @e.entity('node')
node.insert({'name' => 'root'})
end
it 'can insert linked nodes' do
node = @e.entity('node')
node.insert({'name' => 'child 1', 'linked' => [1]})
node.insert({'name' => 'child 2', 'linked' => [1]})
node.insert({'name' => 'child 1.1', 'linked' => [2]})
end
end
context 'creation of mixed indexable and non-indexable columns' do
MIXED_SCHEMA = {
'attr' => [
{
'name' => 'name',
'type' => 'str',
'len' => 100,
'mand' => true,
'ind' => true,
},
{
'name' => 'int_non_ind',
'type' => 'int',
'mand' => false,
'ind' => false,
},
{
'name' => 'str_ind',
'type' => 'str',
'len' => 500,
'mand' => false,
'ind' => true,
},
{
'name' => 'str_non_ind',
'type' => 'str',
'len' => 500,
'mand' => false,
'ind' => false,
},
]
}
before(:all) do
sqlclear
@e = Engine.new(CREDENTIALS)
end
it 'should be able to create indexable and non-indexable columns' do
ent = @e.entity_create(Entity.new('ent', MIXED_SCHEMA))
ent2 = @e.entity('ent')
expect(ent2).not_to be_nil
r = sqldump.root.elements
expect(r.to_a('//table_structure[@name="ent"]/field').map { |x| x.to_s }).to eq([
"<field Comment='' Extra='auto_increment' Field='_id' Key='PRI' Null='NO' Type='int(11)'/>",
"<field Comment='' Extra='' Field='_data' Key='' Null='YES' Type='mediumtext'/>",
"<field Comment='' Extra='' Field='name' Key='' Null='YES' Type='varchar(100)'/>",
"<field Comment='' Extra='' Field='str_ind' Key='' Null='YES' Type='varchar(500)'/>"
])
end
it 'should be able to insert data in all columns' do
ent = @e.entity('ent')
expect(ent).not_to be_nil
ent.insert({
'name' => 'Foo',
'int_non_ind' => 42,
'str_ind' => 'Bar',
'str_non_ind' => 'Baz',
})
end
it 'should be able to insert data in all columns' do
ent = @e.entity('ent')
expect(ent).not_to be_nil
r = ent.get(1)
expect(r).to eq({
'_header' => 'Foo',
'name' => 'Foo',
'int_non_ind' => 42,
'str_ind' => 'Bar',
'str_non_ind' => 'Baz',
})
end
end
context 'unique indexed column' do
UNIQ_SCHEMA = {
'attr' => [
{
'name' => 'name',
'type' => 'str',
'ind' => true,
'len' => 100,
},
{
'name' => 'uid',
'type' => 'str',
'ind' => true,
'uniq' => true,
'len' => 16,
},
]
}
before(:all) do
sqlclear
@e = Engine.new(CREDENTIALS)
end
it 'can create unique indexed column' do
ent = @e.entity_create(Entity.new('ent', UNIQ_SCHEMA))
ent2 = @e.entity('ent')
expect(ent2).not_to be_nil
r = sqldump.root.elements
expect(r.to_a('//table_structure[@name="ent"]/field').map { |x| x.to_s }).to eq([
"<field Comment='' Extra='auto_increment' Field='_id' Key='PRI' Null='NO' Type='int(11)'/>",
"<field Comment='' Extra='' Field='_data' Key='' Null='YES' Type='mediumtext'/>",
"<field Comment='' Extra='' Field='name' Key='' Null='YES' Type='varchar(100)'/>",
"<field Comment='' Extra='' Field='uid' Key='UNI' Null='YES' Type='varchar(16)'/>",
])
end
it 'can insert different data' do
ent = @e.entity('ent')
expect(ent).not_to be_nil
ent.insert({'name' => 'Foo', 'uid' => 'A-1'})
ent.insert({'name' => 'Bar', 'uid' => 'B-2'})
end
it 'cannot insert data with same unique column value' do
ent = @e.entity('ent')
expect(ent).not_to be_nil
expect {
ent.insert({'name' => 'Baz', 'uid' => 'A-1'})
}.to raise_error(Mysql2::Error)
end
end
context 'starting from empty database, connect by URL' do
before(:all) do
sqlclear
end
it 'should be able to connect' do
@e = Engine.new(DATABASE_URL)
expect(@e).not_to be_nil
end
end
context 'two engines connected to one DB' do
before(:all) do
sqlclear
@e1 = Engine.new(DATABASE_URL)
@e2 = Engine.new(DATABASE_URL)
end
it 'first engine can create new entity' do
@e1.entity_create(Entity.new('ent', MIXED_SCHEMA))
end
it 'first engine can operate this entity' do
ent = @e1.entity('ent')
expect(ent).not_to be_nil
ent.insert({'name' => 'foo'})
expect(ent.count).to eq(1)
end
it 'second engine does not see entity' do
ent = @e2.entity('ent')
expect(ent).to be_nil
end
it 'second engine sees entity after refresh' do
@e2.refresh!
ent = @e2.entity('ent')
expect(ent).not_to be_nil
expect(ent.count).to eq(1)
end
end
end
Fixed missing initialization from spec
require 'spec_helper'
require 'mysql2'
SIMPLE_SCHEMA = {
'attr' => [
{'name' => 'name', 'type' => 'str', 'len' => 100, 'mand' => true, 'ind' => true},
{'name' => 'yr', 'type' => 'int', 'mand' => false, 'ind' => true},
]
}
describe Engine do
context 'starting from empty database' do
before(:all) do
sqlclear
@e = Engine.new(CREDENTIALS)
end
it 'should connect to an empty MySQL database' do
expect(@e).not_to be_nil
end
it 'should be able to create simple entity by given scheme' do
book = @e.entity_create(Entity.new('book', SIMPLE_SCHEMA))
expect(book).to be_kind_of(Entity)
expect(@e.entity('book')).to eq(book)
r = sqldump.root.elements
expect(r.to_a('//table_structure[@name="book"]/field').map { |x| x.to_s }).to eq([
"<field Comment='' Extra='auto_increment' Field='_id' Key='PRI' Null='NO' Type='int(11)'/>",
"<field Comment='' Extra='' Field='_data' Key='' Null='YES' Type='mediumtext'/>",
"<field Comment='' Extra='' Field='name' Key='' Null='YES' Type='varchar(100)'/>",
"<field Comment='' Extra='' Field='yr' Key='' Null='YES' Type='int(11)'/>"
])
end
it 'should be able to see newly created entity' do
expect(@e.entity('book')).not_to be_nil
expect(JSON.load(@e.entity('book').to_json)).to eq(SIMPLE_SCHEMA)
expect(@e.entity!('book')).not_to be_nil
end
it 'fails when requested non-existent entity' do
expect { @e.entity!('does_not_exist') }.to raise_error(NotFound)
end
end
context 'starting from existing database' do
it 'should be able to initialize' do
@e = Engine.new(CREDENTIALS)
end
it 'should be able to see newly created entity' do
@e = Engine.new(CREDENTIALS)
ent = @e.entity('book')
expect(ent).not_to be_nil
expect(JSON.load(ent.to_json)).to eq(SIMPLE_SCHEMA)
end
it 'should raise an error trying to delete non-existing entity' do
@e = Engine.new(CREDENTIALS)
expect { @e.entity_delete('foo') }.to raise_error(NotFound)
end
it 'should be able to delete entity' do
@e = Engine.new(CREDENTIALS)
@e.entity_delete('book')
cnt = 0
@e.each_entity { cnt += 1 }
expect(cnt).to eq(0)
end
end
context 'creation of entity with reserved field names' do
RESERVED_SCHEMA = {
'attr' => [
{'name' => 'name', 'type' => 'str', 'len' => 100, 'mand' => true, 'ind' => true},
{'name' => 'group', 'type' => 'int', 'mand' => false, 'ind' => true},
]
}
it 'can create entity with SQL reserved name' do
@e = Engine.new(CREDENTIALS)
@e.entity_create(Entity.new('reserved', RESERVED_SCHEMA))
r = sqldump.root.elements
expect(r.to_a('//table_structure[@name="reserved"]/field').map { |x| x.to_s }).to eq([
"<field Comment='' Extra='auto_increment' Field='_id' Key='PRI' Null='NO' Type='int(11)'/>",
"<field Comment='' Extra='' Field='_data' Key='' Null='YES' Type='mediumtext'/>",
"<field Comment='' Extra='' Field='name' Key='' Null='YES' Type='varchar(100)'/>",
"<field Comment='' Extra='' Field='group' Key='' Null='YES' Type='int(11)'/>"
])
end
end
context 'creation of two multi-related entities' do
SERIES_SCHEMA = {
'attr' => [
{'name' => 'name', 'type' => 'str', 'len' => 100, 'mand' => true, 'ind' => true},
],
'rel' => [
{'name' => 'series_book', 'target' => 'book', 'type' => '0n'},
],
}
before(:all) do
sqlclear
@e = Engine.new(CREDENTIALS)
end
it 'should be able to create series with multiple relation to book' do
book = @e.entity_create(Entity.new('book', SIMPLE_SCHEMA))
series = @e.entity_create(Entity.new('series', SERIES_SCHEMA))
expect(series).to be_kind_of(Entity)
expect(@e.entity('series')).to eq(series)
r = sqldump.root.elements
expect(r.to_a('//table_structure[@name="series"]/field').map { |x| x.to_s }).to eq([
"<field Comment='' Extra='auto_increment' Field='_id' Key='PRI' Null='NO' Type='int(11)'/>",
"<field Comment='' Extra='' Field='_data' Key='' Null='YES' Type='mediumtext'/>",
"<field Comment='' Extra='' Field='name' Key='' Null='YES' Type='varchar(100)'/>",
])
expect(r.to_a('//table_structure[@name="series_book"]/field').map { |x| x.to_s }).to eq([
"<field Comment='' Extra='' Field='series' Key='PRI' Null='NO' Type='int(11)'/>",
"<field Comment='' Extra='' Field='book' Key='PRI' Null='NO' Type='int(11)'/>",
])
end
it 'should maintain link table when inserting data into entity with relation' do
book = @e.entity('book')
id1 = book.insert({'name' => 'Foo'})
id2 = book.insert({'name' => 'Bar'})
series = @e.entity('series')
id_series = series.insert({'name' => 'Series', 'series_book' => [id1, id2]})
expect(series.get(id_series)).to eq({
'_header' => 'Series',
'name' => 'Series',
'series_book' => [
{'_id' => id1, '_header' => 'Foo'},
{'_id' => id2, '_header' => 'Bar'},
],
})
r = sqldump.root.elements
expect(r.to_a('//table_data[@name="series_book"]/row').map { |x| x.to_s }).to eq([
"<row><field name='series'>1</field><field name='book'>1</field></row>",
"<row><field name='series'>1</field><field name='book'>2</field></row>",
])
end
it 'can delete both entities' do
@e.entity_delete('book')
@e.entity_delete('series')
end
it 'leaves a clean database' do
r = sqldump.root.elements
expect(r.to_a('//table_data[@name="book"]')).to be_empty
expect(r.to_a('//table_data[@name="series"]')).to be_empty
expect(r.to_a('//table_data[@name="series_book"]')).to be_empty
end
end
context 'creation of entity linked to itself' do
RECURSIVE_SCHEMA = {
'attr' => [
{'name' => 'name', 'type' => 'str', 'len' => 100, 'mand' => true, 'ind' => true},
],
'rel' => [
{'name' => 'linked', 'target' => 'node', 'type' => '0n'},
],
}
before(:all) do
sqlclear
@e = Engine.new(CREDENTIALS)
end
it 'can create entity with self-reference relationship' do
ent = @e.entity_create(Entity.new('node', RECURSIVE_SCHEMA))
ent2 = @e.entity('node')
expect(ent2).not_to be_nil
end
it 'can insert root node' do
node = @e.entity('node')
node.insert({'name' => 'root'})
end
it 'can insert linked nodes' do
node = @e.entity('node')
node.insert({'name' => 'child 1', 'linked' => [1]})
node.insert({'name' => 'child 2', 'linked' => [1]})
node.insert({'name' => 'child 1.1', 'linked' => [2]})
end
end
context 'creation of mixed indexable and non-indexable columns' do
MIXED_SCHEMA = {
'attr' => [
{
'name' => 'name',
'type' => 'str',
'len' => 100,
'mand' => true,
'ind' => true,
},
{
'name' => 'int_non_ind',
'type' => 'int',
'mand' => false,
'ind' => false,
},
{
'name' => 'str_ind',
'type' => 'str',
'len' => 500,
'mand' => false,
'ind' => true,
},
{
'name' => 'str_non_ind',
'type' => 'str',
'len' => 500,
'mand' => false,
'ind' => false,
},
]
}
before(:all) do
sqlclear
@e = Engine.new(CREDENTIALS)
end
it 'should be able to create indexable and non-indexable columns' do
ent = @e.entity_create(Entity.new('ent', MIXED_SCHEMA))
ent2 = @e.entity('ent')
expect(ent2).not_to be_nil
r = sqldump.root.elements
expect(r.to_a('//table_structure[@name="ent"]/field').map { |x| x.to_s }).to eq([
"<field Comment='' Extra='auto_increment' Field='_id' Key='PRI' Null='NO' Type='int(11)'/>",
"<field Comment='' Extra='' Field='_data' Key='' Null='YES' Type='mediumtext'/>",
"<field Comment='' Extra='' Field='name' Key='' Null='YES' Type='varchar(100)'/>",
"<field Comment='' Extra='' Field='str_ind' Key='' Null='YES' Type='varchar(500)'/>"
])
end
it 'should be able to insert data in all columns' do
ent = @e.entity('ent')
expect(ent).not_to be_nil
ent.insert({
'name' => 'Foo',
'int_non_ind' => 42,
'str_ind' => 'Bar',
'str_non_ind' => 'Baz',
})
end
it 'should be able to insert data in all columns' do
ent = @e.entity('ent')
expect(ent).not_to be_nil
r = ent.get(1)
expect(r).to eq({
'_header' => 'Foo',
'name' => 'Foo',
'int_non_ind' => 42,
'str_ind' => 'Bar',
'str_non_ind' => 'Baz',
})
end
end
context 'unique indexed column' do
UNIQ_SCHEMA = {
'attr' => [
{
'name' => 'name',
'type' => 'str',
'ind' => true,
'len' => 100,
},
{
'name' => 'uid',
'type' => 'str',
'ind' => true,
'uniq' => true,
'len' => 16,
},
]
}
before(:all) do
sqlclear
@e = Engine.new(CREDENTIALS)
end
it 'can create unique indexed column' do
ent = @e.entity_create(Entity.new('ent', UNIQ_SCHEMA))
ent2 = @e.entity('ent')
expect(ent2).not_to be_nil
r = sqldump.root.elements
expect(r.to_a('//table_structure[@name="ent"]/field').map { |x| x.to_s }).to eq([
"<field Comment='' Extra='auto_increment' Field='_id' Key='PRI' Null='NO' Type='int(11)'/>",
"<field Comment='' Extra='' Field='_data' Key='' Null='YES' Type='mediumtext'/>",
"<field Comment='' Extra='' Field='name' Key='' Null='YES' Type='varchar(100)'/>",
"<field Comment='' Extra='' Field='uid' Key='UNI' Null='YES' Type='varchar(16)'/>",
])
end
it 'can insert different data' do
ent = @e.entity('ent')
expect(ent).not_to be_nil
ent.insert({'name' => 'Foo', 'uid' => 'A-1'})
ent.insert({'name' => 'Bar', 'uid' => 'B-2'})
end
it 'cannot insert data with same unique column value' do
ent = @e.entity('ent')
expect(ent).not_to be_nil
expect {
ent.insert({'name' => 'Baz', 'uid' => 'A-1'})
}.to raise_error(Mysql2::Error)
end
end
context 'starting from empty database, connect by URL' do
before(:all) do
sqlclear
end
it 'should be able to connect' do
@e = Engine.new(DATABASE_URL)
expect(@e).not_to be_nil
end
end
context 'two engines connected to one DB' do
before(:all) do
sqlclear
@e1 = Engine.new(DATABASE_URL)
@e2 = Engine.new(DATABASE_URL)
end
it 'first engine can create new entity' do
@e1.entity_create(Entity.new('ent', MIXED_SCHEMA))
end
it 'first engine can operate this entity' do
ent = @e1.entity('ent')
expect(ent).not_to be_nil
ent.insert({'name' => 'foo'})
expect(ent.count).to eq(1)
end
it 'second engine does not see entity' do
ent = @e2.entity('ent')
expect(ent).to be_nil
end
it 'second engine sees entity after refresh' do
@e2.refresh!
ent = @e2.entity('ent')
expect(ent).not_to be_nil
expect(ent.count).to eq(1)
end
end
end
|
# encoding: UTF-8
require 'examen'
require 'spec_helper'
describe Pregunta do
before :each do
@p1 = Pregunta.new("Enunciado", "a", "b", "c", "d", 3)
@p2 = Pregunta.new("Enunciado", "a", "b", "c", "d", 2)
end
describe "La pregunta es coherente" do
it "Existe enunciado" do
expect(@p1.enunciado).not_to eq(nil)
end
it "Existen respuestas" do
var =0
while var < @p1.respuestas.length
expect(@p1.respuestas[var]).not_to eq(nil)
var +=1
end
end
end
describe "Getter de enunciado" do
it "Devuelve el valor de p1.enunciado" do
expect(@p1.enunciado).to eq("Enunciado")
end
end
describe "Getter de respuestas" do
it "Devuelve el valor de p1.respuestas" do
expect(@p1.respuestas).to eq(["a", "b", "c", "d"])
end
end
describe "to_s" do
it "Se debe mostrar por consola las pregunta y las respuestas" do
expect(@p1.to_s).to eq("Enunciado \n a \n b \n c \n d")
end
end
#PRACTICA 8
describe "La clase es comparable" do
it "La pregunta 1 es más dificil que la pregunta 2" do
expect(@p1 > @p2).to eq(true)
end
end
end
describe List do
before :each do
@l1 = List.new("Raiz")
end
describe "Node" do
it "Debe existir un Nodo de la lista con sus datos y su siguiente" do
expect(@l1.raiz.value).not_to eq(nil)
expect(@l1.raiz.next).to eq(nil)
end
end
describe "List" do
it "Se extrae el primer elemento de la lista" do
@l1.push(8)
@l1.shift
expect(@l1.raiz.value).to eq(8)
end
it "Se puede insertar un elemento" do
@l1.push(8)
expect(@l1.tail.value).to eq(8)
end
it "Se pueden insertar varios elementos" do
@l1.insert(3,4,5)
expect(@l1.tail.value).to eq(5)
end
it "Debe existir una lista con su cabeza" do
expect(@l1.raiz.value).to eq("Raiz")
end
it "La lista está doblemente enlazada" do
@l1.insert(8, 7, 6)
expect(@l1.tail.previous.previous.value).to eq(8)
end
it "La lista está doblemente enlazada" do
@l1.insert(8, 7, 6)
expect(@l1.tail.previous.value).to eq(7)
end
it "La lista está doblemente enlazada" do
@l1.insert(8, 7, 6)
expect(@l1.raiz.previous).to eq(nil)
end
it "La lista está doblemente enlazada: pop" do
@l1.pop(1)
expect(@l1.raiz.value).to eq(1)
end
it "La lista está doblemente enlazada: pull" do
@l1.pop(1)
@l1.pull
expect(@l1.raiz.value).to eq(1)
end
end
end
describe Examen do
#Corregido de la practica 7 :)
describe "Relacion de preguntas" do
before :each do
@p1 = Pregunta.new("1.-) ¿Cuál es la salida del siguiente código Ruby? \n\t class Xyz \n\t\t def pots \n\t\t\t @nice \n\t\t end \n\t end \n\n\t xyz = Xyz.new \n\t p xyz.pots", "a) #<Xyz:0xa000208>", "b) nil", "c) 0", "d) Ninguna de las anteriores", 5)
@p2 = Verdadero_Falso.new("2.-) La sigiente definición de un hash en Ruby es válida: \n\t hash_raro = { \n\t\t [1,2,3] => Object.new(), \n\t\t Has.new => :toto \n\t }", 4)
@p3 = Pregunta.new("3.-) ¿Cuál es la salida del siguiente código Ruby? \n\t class Array \n\t\t def say_hi \n\t\t\t \"HEY!\" \n\t\t end \n\t end \n\t p [1, \"bob\"].say_hi", "a) 1","b) bob","c) hey!","d) Ninguna de las anteriores", 3)
@p4 = Pregunta.new("4.-) ¿Cuál es el tipo del objeto en el siguiente código Ruby? \n\t class Objeto \n\t end", "a) Una instancia de la clase Class","b) Una constante", "c) Un Objeto","d) Ninguna de las anteriores", 2)
@p5 = Verdadero_Falso.new("5.-) Es apropiado que una clase Tablero herede de una clase Juego", 1)
@lista_preguntas = List.new(@p1)
@lista_preguntas.insert(@p2, @p3, @p4, @p5)
end
it "Estructura de la lista: la cabeza es correcta" do
expect(@lista_preguntas.raiz.value).to eq(@p1)
end
it "Estructura de la lista: la cola es correcta" do
expect(@lista_preguntas.tail.value).to eq(@p5)
end
it "Modulo ENUMERABLE" do
expect(@lista_preguntas.count).to eq(5)
expect(@lista_preguntas.max).to eq(@p1)
expect(@lista_preguntas.min).to eq(@p5)
expect(@lista_preguntas.sort).to eq([@p5,@p4,@p3,@p2,@p1])
end
end
end
Extendidas las expectatibas de ENUMERABLE
# encoding: UTF-8
require 'examen'
require 'spec_helper'
describe Pregunta do
before :each do
@p1 = Pregunta.new("Enunciado", "a", "b", "c", "d", 3)
@p2 = Pregunta.new("Enunciado", "a", "b", "c", "d", 2)
@p3 = Pregunta.new("Enunciado", "a", "b", "c", "d", 4)
end
describe "La pregunta es coherente" do
it "Existe enunciado" do
expect(@p1.enunciado).not_to eq(nil)
end
it "Existen respuestas" do
var =0
while var < @p1.respuestas.length
expect(@p1.respuestas[var]).not_to eq(nil)
var +=1
end
end
end
describe "Getter de enunciado" do
it "Devuelve el valor de p1.enunciado" do
expect(@p1.enunciado).to eq("Enunciado")
end
end
describe "Getter de respuestas" do
it "Devuelve el valor de p1.respuestas" do
expect(@p1.respuestas).to eq(["a", "b", "c", "d"])
end
end
describe "to_s" do
it "Se debe mostrar por consola las pregunta y las respuestas" do
expect(@p1.to_s).to eq("Enunciado \n a \n b \n c \n d")
end
end
###################PRACTICA 8#########################
describe "La clase es comparable" do
it "La pregunta 1 es más dificil que la pregunta 2" do
expect(@p1 > @p2).to eq(true)
end
it "La pregunta 2 no es más dificil que la pregunta 1" do
expect(@p1 < @p2).to eq(false)
end
it "La pregunta 2 no es igual de dificil que la pregunta 1" do
expect(@p1 == @p2).to eq(false)
end
it "La pregunta 1 esta entre el nivel 1 y 3" do
expect(@p1.between?(@p2, @p3)).to eq(true)
end
end
end
describe List do
before :each do
@l1 = List.new("Raiz")
end
describe "Node" do
it "Debe existir un Nodo de la lista con sus datos y su siguiente" do
expect(@l1.raiz.value).not_to eq(nil)
expect(@l1.raiz.next).to eq(nil)
end
end
describe "List" do
it "Se extrae el primer elemento de la lista" do
@l1.push(8)
@l1.shift
expect(@l1.raiz.value).to eq(8)
end
it "Se puede insertar un elemento" do
@l1.push(8)
expect(@l1.tail.value).to eq(8)
end
it "Se pueden insertar varios elementos" do
@l1.insert(3,4,5)
expect(@l1.tail.value).to eq(5)
end
it "Debe existir una lista con su cabeza" do
expect(@l1.raiz.value).to eq("Raiz")
end
it "La lista está doblemente enlazada" do
@l1.insert(8, 7, 6)
expect(@l1.tail.previous.previous.value).to eq(8)
end
it "La lista está doblemente enlazada" do
@l1.insert(8, 7, 6)
expect(@l1.tail.previous.value).to eq(7)
end
it "La lista está doblemente enlazada" do
@l1.insert(8, 7, 6)
expect(@l1.raiz.previous).to eq(nil)
end
it "La lista está doblemente enlazada: pop" do
@l1.pop(1)
expect(@l1.raiz.value).to eq(1)
end
it "La lista está doblemente enlazada: pull" do
@l1.pop(1)
@l1.pull
expect(@l1.raiz.value).to eq(1)
end
end
end
describe Examen do
#Corregido de la practica 7 :)
describe "Relacion de preguntas" do
before :each do
@p1 = Pregunta.new("1.-) ¿Cuál es la salida del siguiente código Ruby? \n\t class Xyz \n\t\t def pots \n\t\t\t @nice \n\t\t end \n\t end \n\n\t xyz = Xyz.new \n\t p xyz.pots", "a) #<Xyz:0xa000208>", "b) nil", "c) 0", "d) Ninguna de las anteriores", 5)
@p2 = Verdadero_Falso.new("2.-) La sigiente definición de un hash en Ruby es válida: \n\t hash_raro = { \n\t\t [1,2,3] => Object.new(), \n\t\t Has.new => :toto \n\t }", 4)
@p3 = Pregunta.new("3.-) ¿Cuál es la salida del siguiente código Ruby? \n\t class Array \n\t\t def say_hi \n\t\t\t \"HEY!\" \n\t\t end \n\t end \n\t p [1, \"bob\"].say_hi", "a) 1","b) bob","c) hey!","d) Ninguna de las anteriores", 3)
@p4 = Pregunta.new("4.-) ¿Cuál es el tipo del objeto en el siguiente código Ruby? \n\t class Objeto \n\t end", "a) Una instancia de la clase Class","b) Una constante", "c) Un Objeto","d) Ninguna de las anteriores", 2)
@p5 = Verdadero_Falso.new("5.-) Es apropiado que una clase Tablero herede de una clase Juego", 1)
@lista_preguntas = List.new(@p1)
@lista_preguntas.insert(@p2, @p3, @p4, @p5)
end
it "Estructura de la lista: la cabeza es correcta" do
expect(@lista_preguntas.raiz.value).to eq(@p1)
end
it "Estructura de la lista: la cola es correcta" do
expect(@lista_preguntas.tail.value).to eq(@p5)
end
###################PRACTICA 8#########################
it "Modulo ENUMERABLE" do
expect(@lista_preguntas.count).to eq(5)
expect(@lista_preguntas.max).to eq(@p1)
expect(@lista_preguntas.min).to eq(@p5)
expect(@lista_preguntas.sort).to eq([@p5,@p4,@p3,@p2,@p1])
expect(@lista_preguntas.drop(4)).to eq([@p5])
expect(@lista_preguntas.first).to eq(@p1)
expect(@lista_preguntas.take(2)).to eq([@p1,@p2])
end
end
end |
require 'spec_helper'
require 'examen'
class Pregunta
describe Examen do
before :each do
@q = Pregunta.new(:text => '2+2=', :right => 4 , :distractors => [9,3,5])
end
context "Pregunta" do
it "Debe tener texto y alguna pregunta" do
expect(@q.text)=='2+2='
expect(@q.right)==4
end
it "debe tener 3 componentes" do
expect {Pregunta.new(:text => '5*8=?')}.to raise_error(ArgumentError)
end
it "mostrar pregunta" do
expect(@q).to respond_to :text
end
it "mostrar respuesta correcta" do
expect(@q).to respond_to :right
end
it "mostrar opciones incorrectas" do
expect(@q).to respond_to :distractors
end
it "mostrar por pantalla" do
expect(@q).to respond_to :to_s
end
it "mostrarse correctamente" do
expect(@q.to_s).to match(/(\d|\w)+\n(\d\)\s+(\w|\d)+\n)+/)
end
end
end
end
class Examen
describe Examen do
before :each do
@q = Pregunta.new(:text => '2+2=', :right => 4 , :distractors => [9,3,5])
@n = Nodo.new(@q, nil, nil)
@e = Examen.new(@q)
end
context "Nodo" do
it "Debe existir un nodo de la lista con sus datos con su siguiente y su anterior" do
expect(@n.value)==@q
expect(@n.next)==nil
expect(@n.prev)==nil
end
end
context "List" do
it "Se puede extraer el primer elemento de la lista" do
expect(@e).to respond_to :pop
expect(@e.pop)== @q
end
it "Se puede insertar un elemento" do
expect(@e).to respond_to :<<
expect {@e << @n}.to raise_error(TypeError)
expect {@e << @e}.to raise_error(TypeError)
expect {@e << @q}==@q
end
it "Se puede insertar varios elementos" do
expect(@e).to respond_to :push_back
expect(@e.push_back(@q, @q)).to be_instance_of(Array)
end
it "Debe existir una lista con su cabeza" do
expect(@e).to respond_to :cabeza
end
it "Debe inicializarse con una pregunta" do
expect {Examen.new()}.to raise_error(ArgumentError)
expect {Examen.new(Nodo.new(@q, nil, nil))}.to raise_error(TypeError)
end
it "Debe mostrarse correctamente" do
text = "¿Cuál es la salida del siguiente código Ruby?\nclass Xyz\n\sdef pots\n\s\s@nice\n\send\nend\n\nxyz = Xyz.new\np xyz.pots"
examen = Examen.new(Pregunta.new(:text => text, :right =>"nil", :distractors => ["#<Xyz:0xa000208>","0","Ninguna de las anteriores"]))
text = "La siguiente definición de un hash en Ruby es válida:\nhash_raro = {\n\s[1, 2, 3] => Object.new(),\nHash.new => :toto\n}"
examen << PreguntaVerdaderoFalso.new(:text => text, :right => false)
text = %Q{¿Cuál es la salida del siguiente código Ruby?\nclass Array\n\sdef say_hi\n\s\s"HEY!"\n\send\nend\n p [1, "bob"].say_hi}
examen << Pregunta.new(:text => text, :right =>"HEY!", :distractors => ["1","bob","Ninguna de las anteriores"])
text = "¿Cuál es el tipo del objeto en el siguiente código Ruby?\nclass Objeto\nend"
examen << Pregunta.new(:text => text, :right =>"Una instancia de la clase Class", :distractors => ["Una Constante", "Un Objeto", "Ninguna de las anteriores"])
text = "Es apropiado que una clase Tablero herede de una clase Juego"
examen << PreguntaVerdaderoFalso.new(:text => text, :right => false)
expect(examen.to_s).to match(/(\d+\.-\)(.|\s|\n)+)+/)
end
end
end
end
class PreguntaVerdaderoFalso
describe Examen do
before :each do
@q = PreguntaVerdaderoFalso.new(:text => '¿2+2=4?', :right => true)
end
context "Pregunta Verdadero y Falso" do
it "Debe tener texto y alguna pregunta" do
expect(@q.text)=='¿2+2=4?'
expect(@q.right)=='Cierto'
end
it "Debe heredar de Pregunta" do
expect(@q).to be_a Pregunta
end
<<<<<<< HEAD
it "Debe heredar de Pregunta" do
expect(@q.instance_of?Pregunta).to eq(false)
end
=======
>>>>>>> documentacion
it "debe tener 2 componentes" do
expect {PreguntaVerdaderoFalso.new(:text => '5*8=?')}.to raise_error(ArgumentError)
end
it "mostrar pregunta" do
expect(@q).to respond_to :text
end
it "mostrar respuesta correcta" do
expect(@q).to respond_to :right
end
it "mostrar opciones incorrectas" do
expect(@q).to respond_to :distractors
end
it "mostrar por pantalla" do
expect(@q).to respond_to :to_s
end
it "mostrarse correctamente" do
expect(@q.to_s).to match(/(\d|\w)+\n(\d\)\s+(\w|\d)+\n)+/)
end
end
end
end
Actualizado spec ampliando el contexto de pruebas en las pruebas de PreguntaVerdaderoFalso
require 'spec_helper'
require 'examen'
class Pregunta
describe Examen do
before :each do
@q = Pregunta.new(:text => '2+2=', :right => 4 , :distractors => [9,3,5])
end
context "Pregunta" do
it "Debe tener texto y alguna pregunta" do
expect(@q.text)=='2+2='
expect(@q.right)==4
end
it "debe tener 3 componentes" do
expect {Pregunta.new(:text => '5*8=?')}.to raise_error(ArgumentError)
end
it "mostrar pregunta" do
expect(@q).to respond_to :text
end
it "mostrar respuesta correcta" do
expect(@q).to respond_to :right
end
it "mostrar opciones incorrectas" do
expect(@q).to respond_to :distractors
end
it "mostrar por pantalla" do
expect(@q).to respond_to :to_s
end
it "mostrarse correctamente" do
expect(@q.to_s).to match(/(\d|\w)+\n(\d\)\s+(\w|\d)+\n)+/)
end
end
end
end
class Examen
describe Examen do
before :each do
@q = Pregunta.new(:text => '2+2=', :right => 4 , :distractors => [9,3,5])
@n = Nodo.new(@q, nil, nil)
@e = Examen.new(@q)
end
context "Nodo" do
it "Debe existir un nodo de la lista con sus datos con su siguiente y su anterior" do
expect(@n.value)==@q
expect(@n.next)==nil
expect(@n.prev)==nil
end
end
context "List" do
it "Se puede extraer el primer elemento de la lista" do
expect(@e).to respond_to :pop
expect(@e.pop)== @q
end
it "Se puede insertar un elemento" do
expect(@e).to respond_to :<<
expect {@e << @n}.to raise_error(TypeError)
expect {@e << @e}.to raise_error(TypeError)
expect {@e << @q}==@q
end
it "Se puede insertar varios elementos" do
expect(@e).to respond_to :push_back
expect(@e.push_back(@q, @q)).to be_instance_of(Array)
end
it "Debe existir una lista con su cabeza" do
expect(@e).to respond_to :cabeza
end
it "Debe inicializarse con una pregunta" do
expect {Examen.new()}.to raise_error(ArgumentError)
expect {Examen.new(Nodo.new(@q, nil, nil))}.to raise_error(TypeError)
end
it "Debe mostrarse correctamente" do
text = "¿Cuál es la salida del siguiente código Ruby?\nclass Xyz\n\sdef pots\n\s\s@nice\n\send\nend\n\nxyz = Xyz.new\np xyz.pots"
examen = Examen.new(Pregunta.new(:text => text, :right =>"nil", :distractors => ["#<Xyz:0xa000208>","0","Ninguna de las anteriores"]))
text = "La siguiente definición de un hash en Ruby es válida:\nhash_raro = {\n\s[1, 2, 3] => Object.new(),\nHash.new => :toto\n}"
examen << PreguntaVerdaderoFalso.new(:text => text, :right => false)
text = %Q{¿Cuál es la salida del siguiente código Ruby?\nclass Array\n\sdef say_hi\n\s\s"HEY!"\n\send\nend\n p [1, "bob"].say_hi}
examen << Pregunta.new(:text => text, :right =>"HEY!", :distractors => ["1","bob","Ninguna de las anteriores"])
text = "¿Cuál es el tipo del objeto en el siguiente código Ruby?\nclass Objeto\nend"
examen << Pregunta.new(:text => text, :right =>"Una instancia de la clase Class", :distractors => ["Una Constante", "Un Objeto", "Ninguna de las anteriores"])
text = "Es apropiado que una clase Tablero herede de una clase Juego"
examen << PreguntaVerdaderoFalso.new(:text => text, :right => false)
expect(examen.to_s).to match(/(\d+\.-\)(.|\s|\n)+)+/)
end
end
end
end
class PreguntaVerdaderoFalso
describe Examen do
before :each do
@q = PreguntaVerdaderoFalso.new(:text => '¿2+2=4?', :right => true)
end
context "Pregunta Verdadero y Falso" do
it "Debe tener texto y alguna pregunta" do
expect(@q.text)=='¿2+2=4?'
expect(@q.right)=='Cierto'
end
it "Debe heredar de Pregunta" do
expect(@q).to be_a Pregunta
end
it "Debe no ser instancia de Pregunta" do
expect(@q.instance_of?Pregunta).to eq(false)
end
it "debe tener 2 componentes" do
expect {PreguntaVerdaderoFalso.new(:text => '5*8=?')}.to raise_error(ArgumentError)
end
it "mostrar pregunta" do
expect(@q).to respond_to :text
end
it "mostrar respuesta correcta" do
expect(@q).to respond_to :right
end
it "mostrar opciones incorrectas" do
expect(@q).to respond_to :distractors
end
it "mostrar por pantalla" do
expect(@q).to respond_to :to_s
end
it "mostrarse correctamente" do
expect(@q.to_s).to match(/(\d|\w)+\n(\d\)\s+(\w|\d)+\n)+/)
end
end
end
end
|
# coding: utf-8
require_relative 'spec_helper'
describe JSON::LD::API do
let(:logger) {RDF::Spec.logger}
describe ".expand" do
{
"empty doc": {
input: {},
output: []
},
"@list coercion": {
input: %({
"@context": {
"foo": {"@id": "http://example.com/foo", "@container": "@list"}
},
"foo": [{"@value": "bar"}]
}),
output: %([{
"http://example.com/foo": [{"@list": [{"@value": "bar"}]}]
}])
},
"native values in list": {
input: %({
"http://example.com/foo": {"@list": [1, 2]}
}),
output: %([{
"http://example.com/foo": [{"@list": [{"@value": 1}, {"@value": 2}]}]
}])
},
"@graph": {
input: %({
"@context": {"ex": "http://example.com/"},
"@graph": [
{"ex:foo": {"@value": "foo"}},
{"ex:bar": {"@value": "bar"}}
]
}),
output: %([
{"http://example.com/foo": [{"@value": "foo"}]},
{"http://example.com/bar": [{"@value": "bar"}]}
])
},
"@graph value (expands to array form)": {
input: %({
"@context": {"ex": "http://example.com/"},
"ex:p": {
"@id": "ex:Sub1",
"@graph": {
"ex:q": "foo"
}
}
}),
output: %([{
"http://example.com/p": [{
"@id": "http://example.com/Sub1",
"@graph": [{
"http://example.com/q": [{"@value": "foo"}]
}]
}]
}])
},
"@type with CURIE": {
input: %({
"@context": {"ex": "http://example.com/"},
"@type": "ex:type"
}),
output: %([
{"@type": ["http://example.com/type"]}
])
},
"@type with CURIE and muliple values": {
input: %({
"@context": {"ex": "http://example.com/"},
"@type": ["ex:type1", "ex:type2"]
}),
output: %([
{"@type": ["http://example.com/type1", "http://example.com/type2"]}
])
},
"@value with false": {
input: %({"http://example.com/ex": {"@value": false}}),
output: %([{"http://example.com/ex": [{"@value": false}]}])
},
"compact IRI": {
input: %({
"@context": {"ex": "http://example.com/"},
"ex:p": {"@id": "ex:Sub1"}
}),
output: %([{
"http://example.com/p": [{"@id": "http://example.com/Sub1"}]
}])
},
}.each_pair do |title, params|
it(title) {run_expand params}
end
context "with relative IRIs" do
{
"base": {
input: %({
"@id": "",
"@type": "http://www.w3.org/2000/01/rdf-schema#Resource"
}),
output: %([{
"@id": "http://example.org/",
"@type": ["http://www.w3.org/2000/01/rdf-schema#Resource"]
}])
},
"relative": {
input: %({
"@id": "a/b",
"@type": "http://www.w3.org/2000/01/rdf-schema#Resource"
}),
output: %([{
"@id": "http://example.org/a/b",
"@type": ["http://www.w3.org/2000/01/rdf-schema#Resource"]
}])
},
"hash": {
input: %({
"@id": "#a",
"@type": "http://www.w3.org/2000/01/rdf-schema#Resource"
}),
output: %([{
"@id": "http://example.org/#a",
"@type": ["http://www.w3.org/2000/01/rdf-schema#Resource"]
}])
},
"unmapped @id": {
input: %({
"http://example.com/foo": {"@id": "bar"}
}),
output: %([{
"http://example.com/foo": [{"@id": "http://example.org/bar"}]
}])
},
}.each do |title, params|
it(title) {run_expand params.merge(base: "http://example.org/")}
end
end
context "with relative property IRIs" do
{
"base": {
input: %({
"http://a/b": "foo"
}),
output: %([{
"http://a/b": [{"@value": "foo"}]
}])
},
"relative": {
input: %({
"a/b": "foo"
}),
output: %([])
},
"hash": {
input: %({
"#a": "foo"
}),
output: %([])
},
"dotseg": {
input: %({
"../a": "foo"
}),
output: %([])
},
}.each do |title, params|
it(title) {run_expand params.merge(base: "http://example.org/")}
end
context "with @vocab" do
{
"base": {
input: %({
"@context": {"@vocab": "http:///vocab/"},
"http://a/b": "foo"
}),
output: %([{
"http://a/b": [{"@value": "foo"}]
}])
},
"relative": {
input: %({
"@context": {"@vocab": "http://vocab/"},
"a/b": "foo"
}),
output: %([{
"http://vocab/a/b": [{"@value": "foo"}]
}])
},
"hash": {
input: %({
"@context": {"@vocab": "http://vocab/"},
"#a": "foo"
}),
output: %([{
"http://vocab/#a": [{"@value": "foo"}]
}])
},
"dotseg": {
input: %({
"@context": {"@vocab": "http://vocab/"},
"../a": "foo"
}),
output: %([{
"http://vocab/../a": [{"@value": "foo"}]
}])
},
}.each do |title, params|
it(title) {run_expand params.merge(base: "http://example.org/")}
end
end
context "with @vocab: @base" do
{
"base": {
input: %({
"@context": {"@vocab": "@base"},
"http://a/b": "foo"
}),
output: %([{
"http://a/b": [{"@value": "foo"}]
}])
},
"relative": {
input: %({
"@context": {"@vocab": "@base"},
"a/b": "foo"
}),
output: %([{
"http://example.org/a/b": [{"@value": "foo"}]
}])
},
"hash": {
input: %({
"@context": {"@vocab": "@base"},
"#a": "foo"
}),
output: %([{
"http://example.org/#a": [{"@value": "foo"}]
}])
},
"dotseg": {
input: %({
"@context": {"@vocab": "@base"},
"../a": "foo"
}),
output: %([{
"http://example.org/a": [{"@value": "foo"}]
}])
},
}.each do |title, params|
it(title) {run_expand params.merge(base: "http://example.org/")}
end
end
end
context "keyword aliasing" do
{
"@id": {
input: %({
"@context": {"id": "@id"},
"id": "",
"@type": "http://www.w3.org/2000/01/rdf-schema#Resource"
}),
output: %([{
"@id": "",
"@type":[ "http://www.w3.org/2000/01/rdf-schema#Resource"]
}])
},
"@type": {
input: %({
"@context": {"type": "@type"},
"type": "http://www.w3.org/2000/01/rdf-schema#Resource",
"http://example.com/foo": {"@value": "bar", "type": "http://example.com/baz"}
}),
output: %([{
"@type": ["http://www.w3.org/2000/01/rdf-schema#Resource"],
"http://example.com/foo": [{"@value": "bar", "@type": "http://example.com/baz"}]
}])
},
"@language": {
input: %({
"@context": {"language": "@language"},
"http://example.com/foo": {"@value": "bar", "language": "baz"}
}),
output: %([{
"http://example.com/foo": [{"@value": "bar", "@language": "baz"}]
}])
},
"@value": {
input: %({
"@context": {"literal": "@value"},
"http://example.com/foo": {"literal": "bar"}
}),
output: %([{
"http://example.com/foo": [{"@value": "bar"}]
}])
},
"@list": {
input: %({
"@context": {"list": "@list"},
"http://example.com/foo": {"list": ["bar"]}
}),
output: %([{
"http://example.com/foo": [{"@list": [{"@value": "bar"}]}]
}])
},
}.each do |title, params|
it(title) {run_expand params}
end
end
context "native types" do
{
"true": {
input: %({
"@context": {"e": "http://example.org/vocab#"},
"e:bool": true
}),
output: %([{
"http://example.org/vocab#bool": [{"@value": true}]
}])
},
"false": {
input: %({
"@context": {"e": "http://example.org/vocab#"},
"e:bool": false
}),
output: %([{
"http://example.org/vocab#bool": [{"@value": false}]
}])
},
"double": {
input: %({
"@context": {"e": "http://example.org/vocab#"},
"e:double": 1.23
}),
output: %([{
"http://example.org/vocab#double": [{"@value": 1.23}]
}])
},
"double-zero": {
input: %({
"@context": {"e": "http://example.org/vocab#"},
"e:double-zero": 0.0e0
}),
output: %([{
"http://example.org/vocab#double-zero": [{"@value": 0.0e0}]
}])
},
"integer": {
input: %({
"@context": {"e": "http://example.org/vocab#"},
"e:integer": 123
}),
output: %([{
"http://example.org/vocab#integer": [{"@value": 123}]
}])
},
}.each do |title, params|
it(title) {run_expand params}
end
context "with @type: @id" do
{
"true": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#bool", "@type": "@id"}},
"e": true
}),
output:%( [{
"http://example.org/vocab#bool": [{"@value": true}]
}])
},
"false": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#bool", "@type": "@id"}},
"e": false
}),
output: %([{
"http://example.org/vocab#bool": [{"@value": false}]
}])
},
"double": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#double", "@type": "@id"}},
"e": 1.23
}),
output: %([{
"http://example.org/vocab#double": [{"@value": 1.23}]
}])
},
"double-zero": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#double", "@type": "@id"}},
"e": 0.0e0
}),
output: %([{
"http://example.org/vocab#double": [{"@value": 0.0e0}]
}])
},
"integer": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#integer", "@type": "@id"}},
"e": 123
}),
output: %([{
"http://example.org/vocab#integer": [{"@value": 123}]
}])
},
}.each do |title, params|
it(title) {run_expand params}
end
end
context "with @type: @vocab" do
{
"true": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#bool", "@type": "@vocab"}},
"e": true
}),
output:%( [{
"http://example.org/vocab#bool": [{"@value": true}]
}])
},
"false": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#bool", "@type": "@vocab"}},
"e": false
}),
output: %([{
"http://example.org/vocab#bool": [{"@value": false}]
}])
},
"double": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#double", "@type": "@vocab"}},
"e": 1.23
}),
output: %([{
"http://example.org/vocab#double": [{"@value": 1.23}]
}])
},
"double-zero": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#double", "@type": "@vocab"}},
"e": 0.0e0
}),
output: %([{
"http://example.org/vocab#double": [{"@value": 0.0e0}]
}])
},
"integer": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#integer", "@type": "@vocab"}},
"e": 123
}),
output: %([{
"http://example.org/vocab#integer": [{"@value": 123}]
}])
},
}.each do |title, params|
it(title) {run_expand params}
end
end
end
context "coerced typed values" do
{
"boolean" => {
input: {
"@context" => {"foo" => {"@id" => "http://example.org/foo", "@type" => "http://www.w3.org/2001/XMLSchema#boolean"}},
"foo" => "true"
},
output: [{
"http://example.org/foo" => [{"@value" => "true", "@type" => "http://www.w3.org/2001/XMLSchema#boolean"}]
}]
},
"date" => {
input: {
"@context" => {"foo" => {"@id" => "http://example.org/foo", "@type" => "http://www.w3.org/2001/XMLSchema#date"}},
"foo" => "2011-03-26"
},
output: [{
"http://example.org/foo" => [{"@value" => "2011-03-26", "@type" => "http://www.w3.org/2001/XMLSchema#date"}]
}]
},
}.each do |title, params|
it(title) {run_expand params}
end
end
context "null" do
{
"value" => {
input: {"http://example.com/foo" => nil},
output: []
},
"@value" => {
input: {"http://example.com/foo" => {"@value" => nil}},
output: []
},
"@value and non-null @type" => {
input: {"http://example.com/foo" => {"@value" => nil, "@type" => "http://type"}},
output: []
},
"@value and non-null @language" => {
input: {"http://example.com/foo" => {"@value" => nil, "@language" => "en"}},
output: []
},
"array with null elements" => {
input: {
"http://example.com/foo" => [nil]
},
output: [{
"http://example.com/foo" => []
}]
},
"@set with null @value" => {
input: {
"http://example.com/foo" => [
{"@value" => nil, "@type" => "http://example.org/Type"}
]
},
output: [{
"http://example.com/foo" => []
}]
}
}.each do |title, params|
it(title) {run_expand params}
end
end
context "default language" do
{
"value with coerced null language" => {
input: {
"@context" => {
"@language" => "en",
"ex" => "http://example.org/vocab#",
"ex:german" => { "@language" => "de" },
"ex:nolang" => { "@language" => nil }
},
"ex:german" => "german",
"ex:nolang" => "no language"
},
output: [
{
"http://example.org/vocab#german" => [{"@value" => "german", "@language" => "de"}],
"http://example.org/vocab#nolang" => [{"@value" => "no language"}]
}
]
},
}.each do |title, params|
it(title) {run_expand params}
end
end
context "default vocabulary" do
{
"property" => {
input: {
"@context" => {"@vocab" => "http://example.com/"},
"verb" => {"@value" => "foo"}
},
output: [{
"http://example.com/verb" => [{"@value" => "foo"}]
}]
},
"datatype" => {
input: {
"@context" => {"@vocab" => "http://example.com/"},
"http://example.org/verb" => {"@value" => "foo", "@type" => "string"}
},
output: [
"http://example.org/verb" => [{"@value" => "foo", "@type" => "http://example.com/string"}]
]
},
"expand-0028" => {
input: {
"@context" => {
"@vocab" => "http://example.org/vocab#",
"date" => { "@type" => "dateTime" }
},
"@id" => "example1",
"@type" => "test",
"date" => "2011-01-25T00:00:00Z",
"embed" => {
"@id" => "example2",
"expandedDate" => { "@value" => "2012-08-01T00:00:00Z", "@type" => "dateTime" }
}
},
output: [
{
"@id" => "http://foo/bar/example1",
"@type" => ["http://example.org/vocab#test"],
"http://example.org/vocab#date" => [
{
"@value" => "2011-01-25T00:00:00Z",
"@type" => "http://example.org/vocab#dateTime"
}
],
"http://example.org/vocab#embed" => [
{
"@id" => "http://foo/bar/example2",
"http://example.org/vocab#expandedDate" => [
{
"@value" => "2012-08-01T00:00:00Z",
"@type" => "http://example.org/vocab#dateTime"
}
]
}
]
}
]
}
}.each do |title, params|
it(title) {run_expand params.merge(base: "http://foo/bar/")}
end
end
context "unmapped properties" do
{
"unmapped key" => {
input: {
"foo" => "bar"
},
output: []
},
"unmapped @type as datatype" => {
input: {
"http://example.com/foo" => {"@value" => "bar", "@type" => "baz"}
},
output: [{
"http://example.com/foo" => [{"@value" => "bar", "@type" => "http://example/baz"}]
}]
},
"unknown keyword" => {
input: {
"@foo" => "bar"
},
output: []
},
"value" => {
input: {
"@context" => {"ex" => {"@id" => "http://example.org/idrange", "@type" => "@id"}},
"@id" => "http://example.org/Subj",
"idrange" => "unmapped"
},
output: []
},
"context reset" => {
input: {
"@context" => {"ex" => "http://example.org/", "prop" => "ex:prop"},
"@id" => "http://example.org/id1",
"prop" => "prop",
"ex:chain" => {
"@context" => nil,
"@id" => "http://example.org/id2",
"prop" => "prop"
}
},
output: [{
"@id" => "http://example.org/id1",
"http://example.org/prop" => [{"@value" => "prop"}],
"http://example.org/chain" => [{"@id" => "http://example.org/id2"}]
}
]}
}.each do |title, params|
it(title) {run_expand params.merge(base: "http://example/")}
end
end
context "@container: @index" do
{
"string annotation" => {
input: {
"@context" => {
"container" => {
"@id" => "http://example.com/container",
"@container" => "@index"
}
},
"@id" => "http://example.com/annotationsTest",
"container" => {
"en" => "The Queen",
"de" => [ "Die Königin", "Ihre Majestät" ]
}
},
output: [
{
"@id" => "http://example.com/annotationsTest",
"http://example.com/container" => [
{"@value" => "Die Königin", "@index" => "de"},
{"@value" => "Ihre Majestät", "@index" => "de"},
{"@value" => "The Queen", "@index" => "en"}
]
}
]
},
}.each do |title, params|
it(title) {run_expand params}
end
end
context "@container: @list" do
{
"empty" => {
input: {"http://example.com/foo" => {"@list" => []}},
output: [{"http://example.com/foo" => [{"@list" => []}]}]
},
"coerced empty" => {
input: {
"@context" => {"http://example.com/foo" => {"@container" => "@list"}},
"http://example.com/foo" => []
},
output: [{"http://example.com/foo" => [{"@list" => []}]}]
},
"coerced single element" => {
input: {
"@context" => {"http://example.com/foo" => {"@container" => "@list"}},
"http://example.com/foo" => [ "foo" ]
},
output: [{"http://example.com/foo" => [{"@list" => [{"@value" => "foo"}]}]}]
},
"coerced multiple elements" => {
input: {
"@context" => {"http://example.com/foo" => {"@container" => "@list"}},
"http://example.com/foo" => [ "foo", "bar" ]
},
output: [{
"http://example.com/foo" => [{"@list" => [ {"@value" => "foo"}, {"@value" => "bar"} ]}]
}]
},
"native values in list" => {
input: {
"http://example.com/foo" => {"@list" => [1, 2]}
},
output: [{
"http://example.com/foo" => [{"@list" => [{"@value" => 1}, {"@value" => 2}]}]
}]
},
"explicit list with coerced @id values" => {
input: {
"@context" => {"http://example.com/foo" => {"@type" => "@id"}},
"http://example.com/foo" => {"@list" => ["http://foo", "http://bar"]}
},
output: [{
"http://example.com/foo" => [{"@list" => [{"@id" => "http://foo"}, {"@id" => "http://bar"}]}]
}]
},
"explicit list with coerced datatype values" => {
input: {
"@context" => {"http://example.com/foo" => {"@type" => RDF::XSD.date.to_s}},
"http://example.com/foo" => {"@list" => ["2012-04-12"]}
},
output: [{
"http://example.com/foo" => [{"@list" => [{"@value" => "2012-04-12", "@type" => RDF::XSD.date.to_s}]}]
}]
},
"expand-0004" => {
input: %({
"@context": {
"mylist1": {"@id": "http://example.com/mylist1", "@container": "@list"},
"mylist2": {"@id": "http://example.com/mylist2", "@container": "@list"},
"myset2": {"@id": "http://example.com/myset2", "@container": "@set"},
"myset3": {"@id": "http://example.com/myset3", "@container": "@set"}
},
"http://example.org/property": { "@list": "one item" }
}),
output: %([
{
"http://example.org/property": [
{
"@list": [
{
"@value": "one item"
}
]
}
]
}
])
},
"@list containing @list" => {
input: {
"http://example.com/foo" => {"@list" => [{"@list" => ["baz"]}]}
},
exception: JSON::LD::JsonLdError::ListOfLists
},
"@list containing @list (with coercion)" => {
input: {
"@context" => {"foo" => {"@id" => "http://example.com/foo", "@container" => "@list"}},
"foo" => [{"@list" => ["baz"]}]
},
exception: JSON::LD::JsonLdError::ListOfLists
},
"coerced @list containing an array" => {
input: {
"@context" => {"foo" => {"@id" => "http://example.com/foo", "@container" => "@list"}},
"foo" => [["baz"]]
},
exception: JSON::LD::JsonLdError::ListOfLists
},
}.each do |title, params|
it(title) {run_expand params}
end
end
context "@container: @set" do
{
"empty" => {
input: {
"http://example.com/foo" => {"@set" => []}
},
output: [{
"http://example.com/foo" => []
}]
},
"coerced empty" => {
input: {
"@context" => {"http://example.com/foo" => {"@container" => "@set"}},
"http://example.com/foo" => []
},
output: [{
"http://example.com/foo" => []
}]
},
"coerced single element" => {
input: {
"@context" => {"http://example.com/foo" => {"@container" => "@set"}},
"http://example.com/foo" => [ "foo" ]
},
output: [{
"http://example.com/foo" => [ {"@value" => "foo"} ]
}]
},
"coerced multiple elements" => {
input: {
"@context" => {"http://example.com/foo" => {"@container" => "@set"}},
"http://example.com/foo" => [ "foo", "bar" ]
},
output: [{
"http://example.com/foo" => [ {"@value" => "foo"}, {"@value" => "bar"} ]
}]
},
"array containing set" => {
input: {
"http://example.com/foo" => [{"@set" => []}]
},
output: [{
"http://example.com/foo" => []
}]
},
"Free-floating values in sets" => {
input: %({
"@context": {"property": "http://example.com/property"},
"@graph": [{
"@set": [
"free-floating strings in set objects are removed",
{"@id": "http://example.com/free-floating-node"},
{
"@id": "http://example.com/node",
"property": "nodes with properties are not removed"
}
]
}]
}),
output: %([{
"@id": "http://example.com/node",
"http://example.com/property": [
{
"@value": "nodes with properties are not removed"
}
]
}])
}
}.each do |title, params|
it(title) {run_expand params}
end
end
context "@container: @language" do
{
"simple map" => {
input: {
"@context" => {
"vocab" => "http://example.com/vocab/",
"label" => {
"@id" => "vocab:label",
"@container" => "@language"
}
},
"@id" => "http://example.com/queen",
"label" => {
"en" => "The Queen",
"de" => [ "Die Königin", "Ihre Majestät" ]
}
},
output: [
{
"@id" => "http://example.com/queen",
"http://example.com/vocab/label" => [
{"@value" => "Die Königin", "@language" => "de"},
{"@value" => "Ihre Majestät", "@language" => "de"},
{"@value" => "The Queen", "@language" => "en"}
]
}
]
},
"simple map with @none" => {
input: {
"@context" => {
"vocab" => "http://example.com/vocab/",
"label" => {
"@id" => "vocab:label",
"@container" => "@language"
}
},
"@id" => "http://example.com/queen",
"label" => {
"en" => "The Queen",
"de" => [ "Die Königin", "Ihre Majestät" ],
"@none" => "The Queen"
}
},
output: [
{
"@id" => "http://example.com/queen",
"http://example.com/vocab/label" => [
{"@value" => "The Queen"},
{"@value" => "Die Königin", "@language" => "de"},
{"@value" => "Ihre Majestät", "@language" => "de"},
{"@value" => "The Queen", "@language" => "en"},
]
}
]
},
"simple map with alias of @none" => {
input: {
"@context" => {
"vocab" => "http://example.com/vocab/",
"label" => {
"@id" => "vocab:label",
"@container" => "@language"
},
"none" => "@none"
},
"@id" => "http://example.com/queen",
"label" => {
"en" => "The Queen",
"de" => [ "Die Königin", "Ihre Majestät" ],
"none" => "The Queen"
}
},
output: [
{
"@id" => "http://example.com/queen",
"http://example.com/vocab/label" => [
{"@value" => "Die Königin", "@language" => "de"},
{"@value" => "Ihre Majestät", "@language" => "de"},
{"@value" => "The Queen", "@language" => "en"},
{"@value" => "The Queen"},
]
}
]
},
"expand-0035" => {
input: {
"@context" => {
"@vocab" => "http://example.com/vocab/",
"@language" => "it",
"label" => {
"@container" => "@language"
}
},
"@id" => "http://example.com/queen",
"label" => {
"en" => "The Queen",
"de" => [ "Die Königin", "Ihre Majestät" ]
},
"http://example.com/vocab/label" => [
"Il re",
{ "@value" => "The king", "@language" => "en" }
]
},
output: [
{
"@id" => "http://example.com/queen",
"http://example.com/vocab/label" => [
{"@value" => "Il re", "@language" => "it"},
{"@value" => "The king", "@language" => "en"},
{"@value" => "Die Königin", "@language" => "de"},
{"@value" => "Ihre Majestät", "@language" => "de"},
{"@value" => "The Queen", "@language" => "en"},
]
}
]
}
}.each do |title, params|
it(title) {run_expand params}
end
end
context "@container: @id" do
{
"Adds @id to object not having an @id" => {
input: %({
"@context": {
"@vocab": "http://example/",
"idmap": {"@container": "@id"}
},
"idmap": {
"http://example.org/foo": {"label": "Object with @id <foo>"},
"_:bar": {"label": "Object with @id _:bar"}
}
}),
output: %([{
"http://example/idmap": [
{"http://example/label": [{"@value": "Object with @id _:bar"}], "@id": "_:bar"},
{"http://example/label": [{"@value": "Object with @id <foo>"}], "@id": "http://example.org/foo"}
]
}])
},
"Retains @id in object already having an @id" => {
input: %({
"@context": {
"@vocab": "http://example/",
"idmap": {"@container": "@id"}
},
"idmap": {
"http://example.org/foo": {"@id": "http://example.org/bar", "label": "Object with @id <foo>"},
"_:bar": {"@id": "_:foo", "label": "Object with @id _:bar"}
}
}),
output: %([{
"http://example/idmap": [
{"@id": "_:foo", "http://example/label": [{"@value": "Object with @id _:bar"}]},
{"@id": "http://example.org/bar", "http://example/label": [{"@value": "Object with @id <foo>"}]}
]
}])
},
"Adds expanded @id to object" => {
input: %({
"@context": {
"@vocab": "http://example/",
"idmap": {"@container": "@id"}
},
"idmap": {
"foo": {"label": "Object with @id <foo>"}
}
}),
output: %([{
"http://example/idmap": [
{"http://example/label": [{"@value": "Object with @id <foo>"}], "@id": "http://example.org/foo"}
]
}]),
base: "http://example.org/"
},
"Raises InvalidContainerMapping if processingMode is not specified" => {
input: %({
"@context": {
"@vocab": "http://example/",
"idmap": {"@container": "@id"}
},
"idmap": {
"http://example.org/foo": {"label": "Object with @id <foo>"},
"_:bar": {"label": "Object with @id _:bar"}
}
}),
processingMode: nil,
exception: JSON::LD::JsonLdError::InvalidContainerMapping
},
"Does not add @id if it is @none, or expands to @none": {
input: %({
"@context": {
"@vocab": "http://example/",
"idmap": {"@container": "@id"},
"none": "@none"
},
"idmap": {
"@none": {"label": "Object with no @id"},
"none": {"label": "Another object with no @id"}
}
}),
output: %([{
"http://example/idmap": [
{"http://example/label": [{"@value": "Object with no @id"}]},
{"http://example/label": [{"@value": "Another object with no @id"}]}
]
}])
}
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
end
context "@container: @type" do
{
"Adds @type to object not having an @type" => {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type"}
},
"typemap": {
"http://example.org/foo": {"label": "Object with @type <foo>"},
"_:bar": {"label": "Object with @type _:bar"}
}
}),
output: %([{
"http://example/typemap": [
{"http://example/label": [{"@value": "Object with @type _:bar"}], "@type": ["_:bar"]},
{"http://example/label": [{"@value": "Object with @type <foo>"}], "@type": ["http://example.org/foo"]}
]
}])
},
"Prepends @type in object already having an @type" => {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type"}
},
"typemap": {
"http://example.org/foo": {"@type": "http://example.org/bar", "label": "Object with @type <foo>"},
"_:bar": {"@type": "_:foo", "label": "Object with @type _:bar"}
}
}),
output: %([{
"http://example/typemap": [
{
"@type": ["_:bar", "_:foo"],
"http://example/label": [{"@value": "Object with @type _:bar"}]
},
{
"@type": ["http://example.org/foo", "http://example.org/bar"],
"http://example/label": [{"@value": "Object with @type <foo>"}]
}
]
}])
},
"Adds vocabulary expanded @type to object" => {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type"}
},
"typemap": {
"Foo": {"label": "Object with @type <foo>"}
}
}),
output: %([{
"http://example/typemap": [
{"http://example/label": [{"@value": "Object with @type <foo>"}], "@type": ["http://example/Foo"]}
]
}])
},
"Adds document expanded @type to object" => {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type"},
"label": "http://example/label"
},
"typemap": {
"Foo": {"label": "Object with @type <foo>"}
}
}),
output: %([{
"http://example/typemap": [
{"http://example/label": [{"@value": "Object with @type <foo>"}], "@type": ["http://example/Foo"]}
]
}])
},
"Does not add @type if it is @none, or expands to @none": {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type"},
"none": "@none"
},
"typemap": {
"@none": {"label": "Object with no @type"},
"none": {"label": "Another object with no @type"}
}
}),
output: %([{
"http://example/typemap": [
{"http://example/label": [{"@value": "Object with no @type"}]},
{"http://example/label": [{"@value": "Another object with no @type"}]}
]
}])
},
"Raises InvalidContainerMapping if processingMode is not specified" => {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type"}
},
"typemap": {
"http://example.org/foo": {"label": "Object with @type <foo>"},
"_:bar": {"label": "Object with @type _:bar"}
}
}),
processingMode: nil,
exception: JSON::LD::JsonLdError::InvalidContainerMapping
},
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
end
context "@container: @graph" do
{
"Creates a graph object given a value" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": "@graph"}
},
"input": {
"value": "x"
}
}),
output: %([{
"http://example.org/input": [{
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Creates a graph object within an array given a value" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@set"]}
},
"input": {
"value": "x"
}
}),
output: %([{
"http://example.org/input": [{
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Does not create an graph object if value is a graph" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": "@graph"}
},
"input": {
"@graph": {
"value": "x"
}
}
}),
output: %([{
"http://example.org/input": [{
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
context "+ @index" do
{
"Creates a graph object given an indexed value" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@index"]}
},
"input": {
"g1": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@index": "g1",
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Creates a graph object given an indexed value with index @none" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@index"]}
},
"input": {
"@none": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Creates a graph object given an indexed value with index alias of @none" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@index"]},
"none": "@none"
},
"input": {
"none": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Creates a graph object given an indexed value with @set" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@index", "@set"]}
},
"input": {
"g1": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@index": "g1",
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Does not create a new graph object if indexed value is already a graph object" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@index"]}
},
"input": {
"g1": {
"@graph": {
"value": "x"
}
}
}
}),
output: %([{
"http://example.org/input": [{
"@index": "g1",
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
end
context "+ @id" do
{
"Creates a graph object given an indexed value" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@id"]}
},
"input": {
"http://example.com/g1": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@id": "http://example.com/g1",
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Creates a graph object given an indexed value of @none" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@id"]}
},
"input": {
"@none": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Creates a graph object given an indexed value of alias of @none" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@id"]},
"none": "@none"
},
"input": {
"none": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Creates a graph object given an indexed value with @set" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@id", "@set"]}
},
"input": {
"http://example.com/g1": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@id": "http://example.com/g1",
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Does not create a new graph object if indexed value is already a graph object" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@id"]}
},
"input": {
"http://example.com/g1": {
"@graph": {
"value": "x"
}
}
}
}),
output: %([{
"http://example.org/input": [{
"@id": "http://example.com/g1",
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
end
end
context "@nest" do
{
"Expands input using @nest" => {
input: %({
"@context": {"@vocab": "http://example.org/"},
"p1": "v1",
"@nest": {
"p2": "v2"
}
}),
output: %([{
"http://example.org/p1": [{"@value": "v1"}],
"http://example.org/p2": [{"@value": "v2"}]
}])
},
"Expands input using aliased @nest" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest": "@nest"
},
"p1": "v1",
"nest": {
"p2": "v2"
}
}),
output: %([{
"http://example.org/p1": [{"@value": "v1"}],
"http://example.org/p2": [{"@value": "v2"}]
}])
},
"Appends nested values when property at base and nested" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest": "@nest"
},
"p1": "v1",
"nest": {
"p2": "v3"
},
"p2": "v2"
}),
output: %([{
"http://example.org/p1": [{"@value": "v1"}],
"http://example.org/p2": [
{"@value": "v2"},
{"@value": "v3"}
]
}])
},
"Appends nested values from all @nest aliases in term order" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest1": "@nest",
"nest2": "@nest"
},
"p1": "v1",
"nest2": {
"p2": "v4"
},
"p2": "v2",
"nest1": {
"p2": "v3"
}
}),
output: %([{
"http://example.org/p1": [{"@value": "v1"}],
"http://example.org/p2": [
{"@value": "v2"},
{"@value": "v3"},
{"@value": "v4"}
]
}])
},
"Nested nested containers" => {
input: %({
"@context": {
"@vocab": "http://example.org/"
},
"p1": "v1",
"@nest": {
"p2": "v3",
"@nest": {
"p2": "v4"
}
},
"p2": "v2"
}),
output: %([{
"http://example.org/p1": [{"@value": "v1"}],
"http://example.org/p2": [
{"@value": "v2"},
{"@value": "v3"},
{"@value": "v4"}
]
}])
},
"Arrays of nested values" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest": "@nest"
},
"p1": "v1",
"nest": {
"p2": ["v4", "v5"]
},
"p2": ["v2", "v3"]
}),
output: %([{
"http://example.org/p1": [{"@value": "v1"}],
"http://example.org/p2": [
{"@value": "v2"},
{"@value": "v3"},
{"@value": "v4"},
{"@value": "v5"}
]
}])
},
"A nest of arrays" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest": "@nest"
},
"p1": "v1",
"nest": [{
"p2": "v4"
}, {
"p2": "v5"
}],
"p2": ["v2", "v3"]
}),
output: %([{
"http://example.org/p1": [{"@value": "v1"}],
"http://example.org/p2": [
{"@value": "v2"},
{"@value": "v3"},
{"@value": "v4"},
{"@value": "v5"}
]
}])
},
"@nest MUST NOT have a string value" => {
input: %({
"@context": {"@vocab": "http://example.org/"},
"@nest": "This should generate an error"
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"@nest MUST NOT have a boolen value" => {
input: %({
"@context": {"@vocab": "http://example.org/"},
"@nest": true
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"@nest MUST NOT have a numeric value" => {
input: %({
"@context": {"@vocab": "http://example.org/"},
"@nest": 1
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"@nest MUST NOT have a value object value" => {
input: %({
"@context": {"@vocab": "http://example.org/"},
"@nest": {"@value": "This should generate an error"}
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"@nest in term definition MUST NOT be a non-@nest keyword" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest": {"@nest": "@id"}
},
"nest": "This should generate an error"
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"@nest in term definition MUST NOT have a boolen value" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest": {"@nest": true}
},
"nest": "This should generate an error"
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"@nest in term definition MUST NOT have a numeric value" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest": {"@nest": 123}
},
"nest": "This should generate an error"
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"Nested @container: @list" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"list": {"@container": "@list", "@nest": "nestedlist"},
"nestedlist": "@nest"
},
"nestedlist": {
"list": ["a", "b"]
}
}),
output: %([{
"http://example.org/list": [{"@list": [
{"@value": "a"},
{"@value": "b"}
]}]
}])
},
"Nested @container: @index" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"index": {"@container": "@index", "@nest": "nestedindex"},
"nestedindex": "@nest"
},
"nestedindex": {
"index": {
"A": "a",
"B": "b"
}
}
}),
output: %([{
"http://example.org/index": [
{"@value": "a", "@index": "A"},
{"@value": "b", "@index": "B"}
]
}])
},
"Nested @container: @language" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"container": {"@container": "@language", "@nest": "nestedlanguage"},
"nestedlanguage": "@nest"
},
"nestedlanguage": {
"container": {
"en": "The Queen",
"de": "Die Königin"
}
}
}),
output: %([{
"http://example.org/container": [
{"@value": "Die Königin", "@language": "de"},
{"@value": "The Queen", "@language": "en"}
]
}])
},
"Nested @container: @type" => {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type", "@nest": "nestedtypemap"},
"nestedtypemap": "@nest"
},
"nestedtypemap": {
"typemap": {
"http://example.org/foo": {"label": "Object with @type <foo>"},
"_:bar": {"label": "Object with @type _:bar"}
}
}
}),
output: %([{
"http://example/typemap": [
{"http://example/label": [{"@value": "Object with @type _:bar"}], "@type": ["_:bar"]},
{"http://example/label": [{"@value": "Object with @type <foo>"}], "@type": ["http://example.org/foo"]}
]
}])
},
"Nested @container: @id" => {
input: %({
"@context": {
"@vocab": "http://example/",
"idmap": {"@container": "@id", "@nest": "nestedidmap"},
"nestedidmap": "@nest"
},
"nestedidmap": {
"idmap": {
"http://example.org/foo": {"label": "Object with @id <foo>"},
"_:bar": {"label": "Object with @id _:bar"}
}
}
}),
output: %([{
"http://example/idmap": [
{"http://example/label": [{"@value": "Object with @id _:bar"}], "@id": "_:bar"},
{"http://example/label": [{"@value": "Object with @id <foo>"}], "@id": "http://example.org/foo"}
]
}])
},
"Nest term an invalid keyword" => {
input: %({
"@context": {
"term": {"@id": "http://example/term", "@nest": "@id"}
}
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"Nest in @reverse" => {
input: %({
"@context": {
"term": {"@reverse": "http://example/term", "@nest": "@nest"}
}
}),
exception: JSON::LD::JsonLdError::InvalidReverseProperty
},
"Raises InvalidTermDefinition if processingMode is not specified" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"list": {"@container": "@list", "@nest": "nestedlist"},
"nestedlist": "@nest"
},
"nestedlist": {
"list": ["a", "b"]
}
}),
processingMode: nil,
validate: true,
exception: JSON::LD::JsonLdError::InvalidTermDefinition
},
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
end
context "scoped context" do
{
"adding new term" => {
input: %({
"@context": {
"@vocab": "http://example/",
"foo": {"@context": {"bar": "http://example.org/bar"}}
},
"foo": {
"bar": "baz"
}
}),
output: %([
{
"http://example/foo": [{"http://example.org/bar": [{"@value": "baz"}]}]
}
])
},
"overriding a term" => {
input: %({
"@context": {
"@vocab": "http://example/",
"foo": {"@context": {"bar": {"@type": "@id"}}},
"bar": {"@type": "http://www.w3.org/2001/XMLSchema#string"}
},
"foo": {
"bar": "http://example/baz"
}
}),
output: %([
{
"http://example/foo": [{"http://example/bar": [{"@id": "http://example/baz"}]}]
}
])
},
"property and value with different terms mapping to the same expanded property" => {
input: %({
"@context": {
"@vocab": "http://example/",
"foo": {"@context": {"Bar": {"@id": "bar"}}}
},
"foo": {
"Bar": "baz"
}
}),
output: %([
{
"http://example/foo": [{
"http://example/bar": [
{"@value": "baz"}
]}
]
}
])
},
"deep @context affects nested nodes" => {
input: %({
"@context": {
"@vocab": "http://example/",
"foo": {"@context": {"baz": {"@type": "@vocab"}}}
},
"foo": {
"bar": {
"baz": "buzz"
}
}
}),
output: %([
{
"http://example/foo": [{
"http://example/bar": [{
"http://example/baz": [{"@id": "http://example/buzz"}]
}]
}]
}
])
},
"scoped context layers on intemediate contexts" => {
input: %({
"@context": {
"@vocab": "http://example/",
"b": {"@context": {"c": "http://example.org/c"}}
},
"a": {
"@context": {"@vocab": "http://example.com/"},
"b": {
"a": "A in example.com",
"c": "C in example.org"
},
"c": "C in example.com"
},
"c": "C in example"
}),
output: %([{
"http://example/a": [{
"http://example.com/c": [{"@value": "C in example.com"}],
"http://example/b": [{
"http://example.com/a": [{"@value": "A in example.com"}],
"http://example.org/c": [{"@value": "C in example.org"}]
}]
}],
"http://example/c": [{"@value": "C in example"}]
}])
},
"Raises InvalidTermDefinition if processingMode is not specified" => {
input: %({
"@context": {
"@vocab": "http://example/",
"foo": {"@context": {"bar": "http://example.org/bar"}}
},
"foo": {
"bar": "baz"
}
}),
processingMode: nil,
validate: true,
exception: JSON::LD::JsonLdError::InvalidTermDefinition
},
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
end
context "scoped context on @type" do
{
"adding new term" => {
input: %({
"@context": {
"@vocab": "http://example/",
"Foo": {"@context": {"bar": "http://example.org/bar"}}
},
"a": {"@type": "Foo", "bar": "baz"}
}),
output: %([
{
"http://example/a": [{
"@type": ["http://example/Foo"],
"http://example.org/bar": [{"@value": "baz"}]
}]
}
])
},
"overriding a term" => {
input: %({
"@context": {
"@vocab": "http://example/",
"Foo": {"@context": {"bar": {"@type": "@id"}}},
"bar": {"@type": "http://www.w3.org/2001/XMLSchema#string"}
},
"a": {"@type": "Foo", "bar": "http://example/baz"}
}),
output: %([
{
"http://example/a": [{
"@type": ["http://example/Foo"],
"http://example/bar": [{"@id": "http://example/baz"}]
}]
}
])
},
"alias of @type" => {
input: %({
"@context": {
"@vocab": "http://example/",
"type": "@type",
"Foo": {"@context": {"bar": "http://example.org/bar"}}
},
"a": {"type": "Foo", "bar": "baz"}
}),
output: %([
{
"http://example/a": [{
"@type": ["http://example/Foo"],
"http://example.org/bar": [{"@value": "baz"}]
}]
}
])
},
"deep @context affects nested nodes" => {
input: %({
"@context": {
"@vocab": "http://example/",
"Foo": {"@context": {"baz": {"@type": "@vocab"}}}
},
"@type": "Foo",
"bar": {"baz": "buzz"}
}),
output: %([
{
"@type": ["http://example/Foo"],
"http://example/bar": [{
"http://example/baz": [{"@id": "http://example/buzz"}]
}]
}
])
},
"scoped context layers on intemediate contexts" => {
input: %({
"@context": {
"@vocab": "http://example/",
"B": {"@context": {"c": "http://example.org/c"}}
},
"a": {
"@context": {"@vocab": "http://example.com/"},
"@type": "B",
"a": "A in example.com",
"c": "C in example.org"
},
"c": "C in example"
}),
output: %([{
"http://example/a": [{
"@type": ["http://example/B"],
"http://example.com/a": [{"@value": "A in example.com"}],
"http://example.org/c": [{"@value": "C in example.org"}]
}],
"http://example/c": [{"@value": "C in example"}]
}])
},
"with @container: @type" => {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type"},
"Type": {"@context": {"a": "http://example.org/a"}}
},
"typemap": {
"Type": {"a": "Object with @type <Type>"}
}
}),
output: %([{
"http://example/typemap": [
{"http://example.org/a": [{"@value": "Object with @type <Type>"}], "@type": ["http://example/Type"]}
]
}])
},
"Raises InvalidTermDefinition if processingMode is not specified" => {
input: %({
"@context": {
"@vocab": "http://example/",
"Foo": {"@context": {"bar": "http://example.org/bar"}}
},
"a": {"@type": "Foo", "bar": "baz"}
}),
processingMode: nil,
validate: true,
exception: JSON::LD::JsonLdError::InvalidTermDefinition
},
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
end
context "@reverse" do
{
"@container: @reverse" => {
input: %({
"@context": {
"@vocab": "http://example/",
"rev": { "@reverse": "forward", "@type": "@id"}
},
"@id": "http://example/one",
"rev": "http://example/two"
}),
output: %([{
"@id": "http://example/one",
"@reverse": {
"http://example/forward": [
{
"@id": "http://example/two"
}
]
}
}])
},
"expand-0037" => {
input: %({
"@context": {
"name": "http://xmlns.com/foaf/0.1/name"
},
"@id": "http://example.com/people/markus",
"name": "Markus Lanthaler",
"@reverse": {
"http://xmlns.com/foaf/0.1/knows": {
"@id": "http://example.com/people/dave",
"name": "Dave Longley"
}
}
}),
output: %([
{
"@id": "http://example.com/people/markus",
"@reverse": {
"http://xmlns.com/foaf/0.1/knows": [
{
"@id": "http://example.com/people/dave",
"http://xmlns.com/foaf/0.1/name": [
{
"@value": "Dave Longley"
}
]
}
]
},
"http://xmlns.com/foaf/0.1/name": [
{
"@value": "Markus Lanthaler"
}
]
}
])
},
"expand-0043" => {
input: %({
"@context": {
"name": "http://xmlns.com/foaf/0.1/name",
"isKnownBy": { "@reverse": "http://xmlns.com/foaf/0.1/knows" }
},
"@id": "http://example.com/people/markus",
"name": "Markus Lanthaler",
"@reverse": {
"isKnownBy": [
{
"@id": "http://example.com/people/dave",
"name": "Dave Longley"
},
{
"@id": "http://example.com/people/gregg",
"name": "Gregg Kellogg"
}
]
}
}),
output: %([
{
"@id": "http://example.com/people/markus",
"http://xmlns.com/foaf/0.1/knows": [
{
"@id": "http://example.com/people/dave",
"http://xmlns.com/foaf/0.1/name": [
{
"@value": "Dave Longley"
}
]
},
{
"@id": "http://example.com/people/gregg",
"http://xmlns.com/foaf/0.1/name": [
{
"@value": "Gregg Kellogg"
}
]
}
],
"http://xmlns.com/foaf/0.1/name": [
{
"@value": "Markus Lanthaler"
}
]
}
])
},
"@reverse object with an @id property" => {
input: %({
"@id": "http://example/foo",
"@reverse": {
"@id": "http://example/bar"
}
}),
exception: JSON::LD::JsonLdError::InvalidReversePropertyMap,
},
}.each do |title, params|
it(title) {run_expand params}
end
end
context "exceptions" do
{
"non-null @value and null @type" => {
input: {"http://example.com/foo" => {"@value" => "foo", "@type" => nil}},
exception: JSON::LD::JsonLdError::InvalidTypeValue
},
"non-null @value and null @language" => {
input: {"http://example.com/foo" => {"@value" => "foo", "@language" => nil}},
exception: JSON::LD::JsonLdError::InvalidLanguageTaggedString
},
"value with null language" => {
input: {
"@context" => {"@language" => "en"},
"http://example.org/nolang" => {"@value" => "no language", "@language" => nil}
},
exception: JSON::LD::JsonLdError::InvalidLanguageTaggedString
},
"colliding keywords" => {
input: %({
"@context": {
"id": "@id",
"ID": "@id"
},
"id": "http://example/foo",
"ID": "http://example/bar"
}),
exception: JSON::LD::JsonLdError::CollidingKeywords,
}
}.each do |title, params|
it(title) {run_expand params}
end
end
end
def run_expand(params)
input, output, processingMode = params[:input], params[:output], params[:processingMode]
input = ::JSON.parse(input) if input.is_a?(String)
output = ::JSON.parse(output) if output.is_a?(String)
pending params.fetch(:pending, "test implementation") unless input
if params[:exception]
expect {JSON::LD::API.expand(input, {processingMode: processingMode}.merge(params))}.to raise_error(params[:exception])
else
jld = JSON::LD::API.expand(input, base: params[:base], logger: logger, processingMode: processingMode)
expect(jld).to produce(output, logger)
end
end
end
Add `@vocab: @base` example from spec.
# coding: utf-8
require_relative 'spec_helper'
describe JSON::LD::API do
let(:logger) {RDF::Spec.logger}
describe ".expand" do
{
"empty doc": {
input: {},
output: []
},
"@list coercion": {
input: %({
"@context": {
"foo": {"@id": "http://example.com/foo", "@container": "@list"}
},
"foo": [{"@value": "bar"}]
}),
output: %([{
"http://example.com/foo": [{"@list": [{"@value": "bar"}]}]
}])
},
"native values in list": {
input: %({
"http://example.com/foo": {"@list": [1, 2]}
}),
output: %([{
"http://example.com/foo": [{"@list": [{"@value": 1}, {"@value": 2}]}]
}])
},
"@graph": {
input: %({
"@context": {"ex": "http://example.com/"},
"@graph": [
{"ex:foo": {"@value": "foo"}},
{"ex:bar": {"@value": "bar"}}
]
}),
output: %([
{"http://example.com/foo": [{"@value": "foo"}]},
{"http://example.com/bar": [{"@value": "bar"}]}
])
},
"@graph value (expands to array form)": {
input: %({
"@context": {"ex": "http://example.com/"},
"ex:p": {
"@id": "ex:Sub1",
"@graph": {
"ex:q": "foo"
}
}
}),
output: %([{
"http://example.com/p": [{
"@id": "http://example.com/Sub1",
"@graph": [{
"http://example.com/q": [{"@value": "foo"}]
}]
}]
}])
},
"@type with CURIE": {
input: %({
"@context": {"ex": "http://example.com/"},
"@type": "ex:type"
}),
output: %([
{"@type": ["http://example.com/type"]}
])
},
"@type with CURIE and muliple values": {
input: %({
"@context": {"ex": "http://example.com/"},
"@type": ["ex:type1", "ex:type2"]
}),
output: %([
{"@type": ["http://example.com/type1", "http://example.com/type2"]}
])
},
"@value with false": {
input: %({"http://example.com/ex": {"@value": false}}),
output: %([{"http://example.com/ex": [{"@value": false}]}])
},
"compact IRI": {
input: %({
"@context": {"ex": "http://example.com/"},
"ex:p": {"@id": "ex:Sub1"}
}),
output: %([{
"http://example.com/p": [{"@id": "http://example.com/Sub1"}]
}])
},
}.each_pair do |title, params|
it(title) {run_expand params}
end
context "with relative IRIs" do
{
"base": {
input: %({
"@id": "",
"@type": "http://www.w3.org/2000/01/rdf-schema#Resource"
}),
output: %([{
"@id": "http://example.org/",
"@type": ["http://www.w3.org/2000/01/rdf-schema#Resource"]
}])
},
"relative": {
input: %({
"@id": "a/b",
"@type": "http://www.w3.org/2000/01/rdf-schema#Resource"
}),
output: %([{
"@id": "http://example.org/a/b",
"@type": ["http://www.w3.org/2000/01/rdf-schema#Resource"]
}])
},
"hash": {
input: %({
"@id": "#a",
"@type": "http://www.w3.org/2000/01/rdf-schema#Resource"
}),
output: %([{
"@id": "http://example.org/#a",
"@type": ["http://www.w3.org/2000/01/rdf-schema#Resource"]
}])
},
"unmapped @id": {
input: %({
"http://example.com/foo": {"@id": "bar"}
}),
output: %([{
"http://example.com/foo": [{"@id": "http://example.org/bar"}]
}])
},
}.each do |title, params|
it(title) {run_expand params.merge(base: "http://example.org/")}
end
end
context "with relative property IRIs" do
{
"base": {
input: %({
"http://a/b": "foo"
}),
output: %([{
"http://a/b": [{"@value": "foo"}]
}])
},
"relative": {
input: %({
"a/b": "foo"
}),
output: %([])
},
"hash": {
input: %({
"#a": "foo"
}),
output: %([])
},
"dotseg": {
input: %({
"../a": "foo"
}),
output: %([])
},
}.each do |title, params|
it(title) {run_expand params.merge(base: "http://example.org/")}
end
context "with @vocab" do
{
"base": {
input: %({
"@context": {"@vocab": "http:///vocab/"},
"http://a/b": "foo"
}),
output: %([{
"http://a/b": [{"@value": "foo"}]
}])
},
"relative": {
input: %({
"@context": {"@vocab": "http://vocab/"},
"a/b": "foo"
}),
output: %([{
"http://vocab/a/b": [{"@value": "foo"}]
}])
},
"hash": {
input: %({
"@context": {"@vocab": "http://vocab/"},
"#a": "foo"
}),
output: %([{
"http://vocab/#a": [{"@value": "foo"}]
}])
},
"dotseg": {
input: %({
"@context": {"@vocab": "http://vocab/"},
"../a": "foo"
}),
output: %([{
"http://vocab/../a": [{"@value": "foo"}]
}])
},
}.each do |title, params|
it(title) {run_expand params.merge(base: "http://example.org/")}
end
end
context "with @vocab: @base" do
{
"base": {
input: %({
"@context": {"@vocab": "@base"},
"http://a/b": "foo"
}),
output: %([{
"http://a/b": [{"@value": "foo"}]
}])
},
"relative": {
input: %({
"@context": {"@vocab": "@base"},
"a/b": "foo"
}),
output: %([{
"http://example.org/a/b": [{"@value": "foo"}]
}])
},
"hash": {
input: %({
"@context": {"@vocab": "@base"},
"#a": "foo"
}),
output: %([{
"http://example.org/#a": [{"@value": "foo"}]
}])
},
"dotseg": {
input: %({
"@context": {"@vocab": "@base"},
"../a": "foo"
}),
output: %([{
"http://example.org/a": [{"@value": "foo"}]
}])
},
"example": {
input: %({
"@context": {
"@base": "http://example/document",
"@vocab": "@base"
},
"@id": "http://example.org/places#BrewEats",
"@type": "#Restaurant",
"#name": "Brew Eats"
}),
output: %([{
"@id": "http://example.org/places#BrewEats",
"@type": ["http://example/document#Restaurant"],
"http://example/document#name": [{"@value": "Brew Eats"}]
}])
}
}.each do |title, params|
it(title) {run_expand params.merge(base: "http://example.org/")}
end
end
end
context "keyword aliasing" do
{
"@id": {
input: %({
"@context": {"id": "@id"},
"id": "",
"@type": "http://www.w3.org/2000/01/rdf-schema#Resource"
}),
output: %([{
"@id": "",
"@type":[ "http://www.w3.org/2000/01/rdf-schema#Resource"]
}])
},
"@type": {
input: %({
"@context": {"type": "@type"},
"type": "http://www.w3.org/2000/01/rdf-schema#Resource",
"http://example.com/foo": {"@value": "bar", "type": "http://example.com/baz"}
}),
output: %([{
"@type": ["http://www.w3.org/2000/01/rdf-schema#Resource"],
"http://example.com/foo": [{"@value": "bar", "@type": "http://example.com/baz"}]
}])
},
"@language": {
input: %({
"@context": {"language": "@language"},
"http://example.com/foo": {"@value": "bar", "language": "baz"}
}),
output: %([{
"http://example.com/foo": [{"@value": "bar", "@language": "baz"}]
}])
},
"@value": {
input: %({
"@context": {"literal": "@value"},
"http://example.com/foo": {"literal": "bar"}
}),
output: %([{
"http://example.com/foo": [{"@value": "bar"}]
}])
},
"@list": {
input: %({
"@context": {"list": "@list"},
"http://example.com/foo": {"list": ["bar"]}
}),
output: %([{
"http://example.com/foo": [{"@list": [{"@value": "bar"}]}]
}])
},
}.each do |title, params|
it(title) {run_expand params}
end
end
context "native types" do
{
"true": {
input: %({
"@context": {"e": "http://example.org/vocab#"},
"e:bool": true
}),
output: %([{
"http://example.org/vocab#bool": [{"@value": true}]
}])
},
"false": {
input: %({
"@context": {"e": "http://example.org/vocab#"},
"e:bool": false
}),
output: %([{
"http://example.org/vocab#bool": [{"@value": false}]
}])
},
"double": {
input: %({
"@context": {"e": "http://example.org/vocab#"},
"e:double": 1.23
}),
output: %([{
"http://example.org/vocab#double": [{"@value": 1.23}]
}])
},
"double-zero": {
input: %({
"@context": {"e": "http://example.org/vocab#"},
"e:double-zero": 0.0e0
}),
output: %([{
"http://example.org/vocab#double-zero": [{"@value": 0.0e0}]
}])
},
"integer": {
input: %({
"@context": {"e": "http://example.org/vocab#"},
"e:integer": 123
}),
output: %([{
"http://example.org/vocab#integer": [{"@value": 123}]
}])
},
}.each do |title, params|
it(title) {run_expand params}
end
context "with @type: @id" do
{
"true": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#bool", "@type": "@id"}},
"e": true
}),
output:%( [{
"http://example.org/vocab#bool": [{"@value": true}]
}])
},
"false": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#bool", "@type": "@id"}},
"e": false
}),
output: %([{
"http://example.org/vocab#bool": [{"@value": false}]
}])
},
"double": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#double", "@type": "@id"}},
"e": 1.23
}),
output: %([{
"http://example.org/vocab#double": [{"@value": 1.23}]
}])
},
"double-zero": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#double", "@type": "@id"}},
"e": 0.0e0
}),
output: %([{
"http://example.org/vocab#double": [{"@value": 0.0e0}]
}])
},
"integer": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#integer", "@type": "@id"}},
"e": 123
}),
output: %([{
"http://example.org/vocab#integer": [{"@value": 123}]
}])
},
}.each do |title, params|
it(title) {run_expand params}
end
end
context "with @type: @vocab" do
{
"true": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#bool", "@type": "@vocab"}},
"e": true
}),
output:%( [{
"http://example.org/vocab#bool": [{"@value": true}]
}])
},
"false": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#bool", "@type": "@vocab"}},
"e": false
}),
output: %([{
"http://example.org/vocab#bool": [{"@value": false}]
}])
},
"double": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#double", "@type": "@vocab"}},
"e": 1.23
}),
output: %([{
"http://example.org/vocab#double": [{"@value": 1.23}]
}])
},
"double-zero": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#double", "@type": "@vocab"}},
"e": 0.0e0
}),
output: %([{
"http://example.org/vocab#double": [{"@value": 0.0e0}]
}])
},
"integer": {
input: %({
"@context": {"e": {"@id": "http://example.org/vocab#integer", "@type": "@vocab"}},
"e": 123
}),
output: %([{
"http://example.org/vocab#integer": [{"@value": 123}]
}])
},
}.each do |title, params|
it(title) {run_expand params}
end
end
end
context "coerced typed values" do
{
"boolean" => {
input: {
"@context" => {"foo" => {"@id" => "http://example.org/foo", "@type" => "http://www.w3.org/2001/XMLSchema#boolean"}},
"foo" => "true"
},
output: [{
"http://example.org/foo" => [{"@value" => "true", "@type" => "http://www.w3.org/2001/XMLSchema#boolean"}]
}]
},
"date" => {
input: {
"@context" => {"foo" => {"@id" => "http://example.org/foo", "@type" => "http://www.w3.org/2001/XMLSchema#date"}},
"foo" => "2011-03-26"
},
output: [{
"http://example.org/foo" => [{"@value" => "2011-03-26", "@type" => "http://www.w3.org/2001/XMLSchema#date"}]
}]
},
}.each do |title, params|
it(title) {run_expand params}
end
end
context "null" do
{
"value" => {
input: {"http://example.com/foo" => nil},
output: []
},
"@value" => {
input: {"http://example.com/foo" => {"@value" => nil}},
output: []
},
"@value and non-null @type" => {
input: {"http://example.com/foo" => {"@value" => nil, "@type" => "http://type"}},
output: []
},
"@value and non-null @language" => {
input: {"http://example.com/foo" => {"@value" => nil, "@language" => "en"}},
output: []
},
"array with null elements" => {
input: {
"http://example.com/foo" => [nil]
},
output: [{
"http://example.com/foo" => []
}]
},
"@set with null @value" => {
input: {
"http://example.com/foo" => [
{"@value" => nil, "@type" => "http://example.org/Type"}
]
},
output: [{
"http://example.com/foo" => []
}]
}
}.each do |title, params|
it(title) {run_expand params}
end
end
context "default language" do
{
"value with coerced null language" => {
input: {
"@context" => {
"@language" => "en",
"ex" => "http://example.org/vocab#",
"ex:german" => { "@language" => "de" },
"ex:nolang" => { "@language" => nil }
},
"ex:german" => "german",
"ex:nolang" => "no language"
},
output: [
{
"http://example.org/vocab#german" => [{"@value" => "german", "@language" => "de"}],
"http://example.org/vocab#nolang" => [{"@value" => "no language"}]
}
]
},
}.each do |title, params|
it(title) {run_expand params}
end
end
context "default vocabulary" do
{
"property" => {
input: {
"@context" => {"@vocab" => "http://example.com/"},
"verb" => {"@value" => "foo"}
},
output: [{
"http://example.com/verb" => [{"@value" => "foo"}]
}]
},
"datatype" => {
input: {
"@context" => {"@vocab" => "http://example.com/"},
"http://example.org/verb" => {"@value" => "foo", "@type" => "string"}
},
output: [
"http://example.org/verb" => [{"@value" => "foo", "@type" => "http://example.com/string"}]
]
},
"expand-0028" => {
input: {
"@context" => {
"@vocab" => "http://example.org/vocab#",
"date" => { "@type" => "dateTime" }
},
"@id" => "example1",
"@type" => "test",
"date" => "2011-01-25T00:00:00Z",
"embed" => {
"@id" => "example2",
"expandedDate" => { "@value" => "2012-08-01T00:00:00Z", "@type" => "dateTime" }
}
},
output: [
{
"@id" => "http://foo/bar/example1",
"@type" => ["http://example.org/vocab#test"],
"http://example.org/vocab#date" => [
{
"@value" => "2011-01-25T00:00:00Z",
"@type" => "http://example.org/vocab#dateTime"
}
],
"http://example.org/vocab#embed" => [
{
"@id" => "http://foo/bar/example2",
"http://example.org/vocab#expandedDate" => [
{
"@value" => "2012-08-01T00:00:00Z",
"@type" => "http://example.org/vocab#dateTime"
}
]
}
]
}
]
}
}.each do |title, params|
it(title) {run_expand params.merge(base: "http://foo/bar/")}
end
end
context "unmapped properties" do
{
"unmapped key" => {
input: {
"foo" => "bar"
},
output: []
},
"unmapped @type as datatype" => {
input: {
"http://example.com/foo" => {"@value" => "bar", "@type" => "baz"}
},
output: [{
"http://example.com/foo" => [{"@value" => "bar", "@type" => "http://example/baz"}]
}]
},
"unknown keyword" => {
input: {
"@foo" => "bar"
},
output: []
},
"value" => {
input: {
"@context" => {"ex" => {"@id" => "http://example.org/idrange", "@type" => "@id"}},
"@id" => "http://example.org/Subj",
"idrange" => "unmapped"
},
output: []
},
"context reset" => {
input: {
"@context" => {"ex" => "http://example.org/", "prop" => "ex:prop"},
"@id" => "http://example.org/id1",
"prop" => "prop",
"ex:chain" => {
"@context" => nil,
"@id" => "http://example.org/id2",
"prop" => "prop"
}
},
output: [{
"@id" => "http://example.org/id1",
"http://example.org/prop" => [{"@value" => "prop"}],
"http://example.org/chain" => [{"@id" => "http://example.org/id2"}]
}
]}
}.each do |title, params|
it(title) {run_expand params.merge(base: "http://example/")}
end
end
context "@container: @index" do
{
"string annotation" => {
input: {
"@context" => {
"container" => {
"@id" => "http://example.com/container",
"@container" => "@index"
}
},
"@id" => "http://example.com/annotationsTest",
"container" => {
"en" => "The Queen",
"de" => [ "Die Königin", "Ihre Majestät" ]
}
},
output: [
{
"@id" => "http://example.com/annotationsTest",
"http://example.com/container" => [
{"@value" => "Die Königin", "@index" => "de"},
{"@value" => "Ihre Majestät", "@index" => "de"},
{"@value" => "The Queen", "@index" => "en"}
]
}
]
},
}.each do |title, params|
it(title) {run_expand params}
end
end
context "@container: @list" do
{
"empty" => {
input: {"http://example.com/foo" => {"@list" => []}},
output: [{"http://example.com/foo" => [{"@list" => []}]}]
},
"coerced empty" => {
input: {
"@context" => {"http://example.com/foo" => {"@container" => "@list"}},
"http://example.com/foo" => []
},
output: [{"http://example.com/foo" => [{"@list" => []}]}]
},
"coerced single element" => {
input: {
"@context" => {"http://example.com/foo" => {"@container" => "@list"}},
"http://example.com/foo" => [ "foo" ]
},
output: [{"http://example.com/foo" => [{"@list" => [{"@value" => "foo"}]}]}]
},
"coerced multiple elements" => {
input: {
"@context" => {"http://example.com/foo" => {"@container" => "@list"}},
"http://example.com/foo" => [ "foo", "bar" ]
},
output: [{
"http://example.com/foo" => [{"@list" => [ {"@value" => "foo"}, {"@value" => "bar"} ]}]
}]
},
"native values in list" => {
input: {
"http://example.com/foo" => {"@list" => [1, 2]}
},
output: [{
"http://example.com/foo" => [{"@list" => [{"@value" => 1}, {"@value" => 2}]}]
}]
},
"explicit list with coerced @id values" => {
input: {
"@context" => {"http://example.com/foo" => {"@type" => "@id"}},
"http://example.com/foo" => {"@list" => ["http://foo", "http://bar"]}
},
output: [{
"http://example.com/foo" => [{"@list" => [{"@id" => "http://foo"}, {"@id" => "http://bar"}]}]
}]
},
"explicit list with coerced datatype values" => {
input: {
"@context" => {"http://example.com/foo" => {"@type" => RDF::XSD.date.to_s}},
"http://example.com/foo" => {"@list" => ["2012-04-12"]}
},
output: [{
"http://example.com/foo" => [{"@list" => [{"@value" => "2012-04-12", "@type" => RDF::XSD.date.to_s}]}]
}]
},
"expand-0004" => {
input: %({
"@context": {
"mylist1": {"@id": "http://example.com/mylist1", "@container": "@list"},
"mylist2": {"@id": "http://example.com/mylist2", "@container": "@list"},
"myset2": {"@id": "http://example.com/myset2", "@container": "@set"},
"myset3": {"@id": "http://example.com/myset3", "@container": "@set"}
},
"http://example.org/property": { "@list": "one item" }
}),
output: %([
{
"http://example.org/property": [
{
"@list": [
{
"@value": "one item"
}
]
}
]
}
])
},
"@list containing @list" => {
input: {
"http://example.com/foo" => {"@list" => [{"@list" => ["baz"]}]}
},
exception: JSON::LD::JsonLdError::ListOfLists
},
"@list containing @list (with coercion)" => {
input: {
"@context" => {"foo" => {"@id" => "http://example.com/foo", "@container" => "@list"}},
"foo" => [{"@list" => ["baz"]}]
},
exception: JSON::LD::JsonLdError::ListOfLists
},
"coerced @list containing an array" => {
input: {
"@context" => {"foo" => {"@id" => "http://example.com/foo", "@container" => "@list"}},
"foo" => [["baz"]]
},
exception: JSON::LD::JsonLdError::ListOfLists
},
}.each do |title, params|
it(title) {run_expand params}
end
end
context "@container: @set" do
{
"empty" => {
input: {
"http://example.com/foo" => {"@set" => []}
},
output: [{
"http://example.com/foo" => []
}]
},
"coerced empty" => {
input: {
"@context" => {"http://example.com/foo" => {"@container" => "@set"}},
"http://example.com/foo" => []
},
output: [{
"http://example.com/foo" => []
}]
},
"coerced single element" => {
input: {
"@context" => {"http://example.com/foo" => {"@container" => "@set"}},
"http://example.com/foo" => [ "foo" ]
},
output: [{
"http://example.com/foo" => [ {"@value" => "foo"} ]
}]
},
"coerced multiple elements" => {
input: {
"@context" => {"http://example.com/foo" => {"@container" => "@set"}},
"http://example.com/foo" => [ "foo", "bar" ]
},
output: [{
"http://example.com/foo" => [ {"@value" => "foo"}, {"@value" => "bar"} ]
}]
},
"array containing set" => {
input: {
"http://example.com/foo" => [{"@set" => []}]
},
output: [{
"http://example.com/foo" => []
}]
},
"Free-floating values in sets" => {
input: %({
"@context": {"property": "http://example.com/property"},
"@graph": [{
"@set": [
"free-floating strings in set objects are removed",
{"@id": "http://example.com/free-floating-node"},
{
"@id": "http://example.com/node",
"property": "nodes with properties are not removed"
}
]
}]
}),
output: %([{
"@id": "http://example.com/node",
"http://example.com/property": [
{
"@value": "nodes with properties are not removed"
}
]
}])
}
}.each do |title, params|
it(title) {run_expand params}
end
end
context "@container: @language" do
{
"simple map" => {
input: {
"@context" => {
"vocab" => "http://example.com/vocab/",
"label" => {
"@id" => "vocab:label",
"@container" => "@language"
}
},
"@id" => "http://example.com/queen",
"label" => {
"en" => "The Queen",
"de" => [ "Die Königin", "Ihre Majestät" ]
}
},
output: [
{
"@id" => "http://example.com/queen",
"http://example.com/vocab/label" => [
{"@value" => "Die Königin", "@language" => "de"},
{"@value" => "Ihre Majestät", "@language" => "de"},
{"@value" => "The Queen", "@language" => "en"}
]
}
]
},
"simple map with @none" => {
input: {
"@context" => {
"vocab" => "http://example.com/vocab/",
"label" => {
"@id" => "vocab:label",
"@container" => "@language"
}
},
"@id" => "http://example.com/queen",
"label" => {
"en" => "The Queen",
"de" => [ "Die Königin", "Ihre Majestät" ],
"@none" => "The Queen"
}
},
output: [
{
"@id" => "http://example.com/queen",
"http://example.com/vocab/label" => [
{"@value" => "The Queen"},
{"@value" => "Die Königin", "@language" => "de"},
{"@value" => "Ihre Majestät", "@language" => "de"},
{"@value" => "The Queen", "@language" => "en"},
]
}
]
},
"simple map with alias of @none" => {
input: {
"@context" => {
"vocab" => "http://example.com/vocab/",
"label" => {
"@id" => "vocab:label",
"@container" => "@language"
},
"none" => "@none"
},
"@id" => "http://example.com/queen",
"label" => {
"en" => "The Queen",
"de" => [ "Die Königin", "Ihre Majestät" ],
"none" => "The Queen"
}
},
output: [
{
"@id" => "http://example.com/queen",
"http://example.com/vocab/label" => [
{"@value" => "Die Königin", "@language" => "de"},
{"@value" => "Ihre Majestät", "@language" => "de"},
{"@value" => "The Queen", "@language" => "en"},
{"@value" => "The Queen"},
]
}
]
},
"expand-0035" => {
input: {
"@context" => {
"@vocab" => "http://example.com/vocab/",
"@language" => "it",
"label" => {
"@container" => "@language"
}
},
"@id" => "http://example.com/queen",
"label" => {
"en" => "The Queen",
"de" => [ "Die Königin", "Ihre Majestät" ]
},
"http://example.com/vocab/label" => [
"Il re",
{ "@value" => "The king", "@language" => "en" }
]
},
output: [
{
"@id" => "http://example.com/queen",
"http://example.com/vocab/label" => [
{"@value" => "Il re", "@language" => "it"},
{"@value" => "The king", "@language" => "en"},
{"@value" => "Die Königin", "@language" => "de"},
{"@value" => "Ihre Majestät", "@language" => "de"},
{"@value" => "The Queen", "@language" => "en"},
]
}
]
}
}.each do |title, params|
it(title) {run_expand params}
end
end
context "@container: @id" do
{
"Adds @id to object not having an @id" => {
input: %({
"@context": {
"@vocab": "http://example/",
"idmap": {"@container": "@id"}
},
"idmap": {
"http://example.org/foo": {"label": "Object with @id <foo>"},
"_:bar": {"label": "Object with @id _:bar"}
}
}),
output: %([{
"http://example/idmap": [
{"http://example/label": [{"@value": "Object with @id _:bar"}], "@id": "_:bar"},
{"http://example/label": [{"@value": "Object with @id <foo>"}], "@id": "http://example.org/foo"}
]
}])
},
"Retains @id in object already having an @id" => {
input: %({
"@context": {
"@vocab": "http://example/",
"idmap": {"@container": "@id"}
},
"idmap": {
"http://example.org/foo": {"@id": "http://example.org/bar", "label": "Object with @id <foo>"},
"_:bar": {"@id": "_:foo", "label": "Object with @id _:bar"}
}
}),
output: %([{
"http://example/idmap": [
{"@id": "_:foo", "http://example/label": [{"@value": "Object with @id _:bar"}]},
{"@id": "http://example.org/bar", "http://example/label": [{"@value": "Object with @id <foo>"}]}
]
}])
},
"Adds expanded @id to object" => {
input: %({
"@context": {
"@vocab": "http://example/",
"idmap": {"@container": "@id"}
},
"idmap": {
"foo": {"label": "Object with @id <foo>"}
}
}),
output: %([{
"http://example/idmap": [
{"http://example/label": [{"@value": "Object with @id <foo>"}], "@id": "http://example.org/foo"}
]
}]),
base: "http://example.org/"
},
"Raises InvalidContainerMapping if processingMode is not specified" => {
input: %({
"@context": {
"@vocab": "http://example/",
"idmap": {"@container": "@id"}
},
"idmap": {
"http://example.org/foo": {"label": "Object with @id <foo>"},
"_:bar": {"label": "Object with @id _:bar"}
}
}),
processingMode: nil,
exception: JSON::LD::JsonLdError::InvalidContainerMapping
},
"Does not add @id if it is @none, or expands to @none": {
input: %({
"@context": {
"@vocab": "http://example/",
"idmap": {"@container": "@id"},
"none": "@none"
},
"idmap": {
"@none": {"label": "Object with no @id"},
"none": {"label": "Another object with no @id"}
}
}),
output: %([{
"http://example/idmap": [
{"http://example/label": [{"@value": "Object with no @id"}]},
{"http://example/label": [{"@value": "Another object with no @id"}]}
]
}])
}
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
end
context "@container: @type" do
{
"Adds @type to object not having an @type" => {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type"}
},
"typemap": {
"http://example.org/foo": {"label": "Object with @type <foo>"},
"_:bar": {"label": "Object with @type _:bar"}
}
}),
output: %([{
"http://example/typemap": [
{"http://example/label": [{"@value": "Object with @type _:bar"}], "@type": ["_:bar"]},
{"http://example/label": [{"@value": "Object with @type <foo>"}], "@type": ["http://example.org/foo"]}
]
}])
},
"Prepends @type in object already having an @type" => {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type"}
},
"typemap": {
"http://example.org/foo": {"@type": "http://example.org/bar", "label": "Object with @type <foo>"},
"_:bar": {"@type": "_:foo", "label": "Object with @type _:bar"}
}
}),
output: %([{
"http://example/typemap": [
{
"@type": ["_:bar", "_:foo"],
"http://example/label": [{"@value": "Object with @type _:bar"}]
},
{
"@type": ["http://example.org/foo", "http://example.org/bar"],
"http://example/label": [{"@value": "Object with @type <foo>"}]
}
]
}])
},
"Adds vocabulary expanded @type to object" => {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type"}
},
"typemap": {
"Foo": {"label": "Object with @type <foo>"}
}
}),
output: %([{
"http://example/typemap": [
{"http://example/label": [{"@value": "Object with @type <foo>"}], "@type": ["http://example/Foo"]}
]
}])
},
"Adds document expanded @type to object" => {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type"},
"label": "http://example/label"
},
"typemap": {
"Foo": {"label": "Object with @type <foo>"}
}
}),
output: %([{
"http://example/typemap": [
{"http://example/label": [{"@value": "Object with @type <foo>"}], "@type": ["http://example/Foo"]}
]
}])
},
"Does not add @type if it is @none, or expands to @none": {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type"},
"none": "@none"
},
"typemap": {
"@none": {"label": "Object with no @type"},
"none": {"label": "Another object with no @type"}
}
}),
output: %([{
"http://example/typemap": [
{"http://example/label": [{"@value": "Object with no @type"}]},
{"http://example/label": [{"@value": "Another object with no @type"}]}
]
}])
},
"Raises InvalidContainerMapping if processingMode is not specified" => {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type"}
},
"typemap": {
"http://example.org/foo": {"label": "Object with @type <foo>"},
"_:bar": {"label": "Object with @type _:bar"}
}
}),
processingMode: nil,
exception: JSON::LD::JsonLdError::InvalidContainerMapping
},
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
end
context "@container: @graph" do
{
"Creates a graph object given a value" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": "@graph"}
},
"input": {
"value": "x"
}
}),
output: %([{
"http://example.org/input": [{
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Creates a graph object within an array given a value" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@set"]}
},
"input": {
"value": "x"
}
}),
output: %([{
"http://example.org/input": [{
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Does not create an graph object if value is a graph" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": "@graph"}
},
"input": {
"@graph": {
"value": "x"
}
}
}),
output: %([{
"http://example.org/input": [{
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
context "+ @index" do
{
"Creates a graph object given an indexed value" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@index"]}
},
"input": {
"g1": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@index": "g1",
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Creates a graph object given an indexed value with index @none" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@index"]}
},
"input": {
"@none": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Creates a graph object given an indexed value with index alias of @none" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@index"]},
"none": "@none"
},
"input": {
"none": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Creates a graph object given an indexed value with @set" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@index", "@set"]}
},
"input": {
"g1": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@index": "g1",
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Does not create a new graph object if indexed value is already a graph object" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@index"]}
},
"input": {
"g1": {
"@graph": {
"value": "x"
}
}
}
}),
output: %([{
"http://example.org/input": [{
"@index": "g1",
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
end
context "+ @id" do
{
"Creates a graph object given an indexed value" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@id"]}
},
"input": {
"http://example.com/g1": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@id": "http://example.com/g1",
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Creates a graph object given an indexed value of @none" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@id"]}
},
"input": {
"@none": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Creates a graph object given an indexed value of alias of @none" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@id"]},
"none": "@none"
},
"input": {
"none": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Creates a graph object given an indexed value with @set" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@id", "@set"]}
},
"input": {
"http://example.com/g1": {"value": "x"}
}
}),
output: %([{
"http://example.org/input": [{
"@id": "http://example.com/g1",
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
"Does not create a new graph object if indexed value is already a graph object" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"input": {"@container": ["@graph", "@id"]}
},
"input": {
"http://example.com/g1": {
"@graph": {
"value": "x"
}
}
}
}),
output: %([{
"http://example.org/input": [{
"@id": "http://example.com/g1",
"@graph": [{
"http://example.org/value": [{"@value": "x"}]
}]
}]
}])
},
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
end
end
context "@nest" do
{
"Expands input using @nest" => {
input: %({
"@context": {"@vocab": "http://example.org/"},
"p1": "v1",
"@nest": {
"p2": "v2"
}
}),
output: %([{
"http://example.org/p1": [{"@value": "v1"}],
"http://example.org/p2": [{"@value": "v2"}]
}])
},
"Expands input using aliased @nest" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest": "@nest"
},
"p1": "v1",
"nest": {
"p2": "v2"
}
}),
output: %([{
"http://example.org/p1": [{"@value": "v1"}],
"http://example.org/p2": [{"@value": "v2"}]
}])
},
"Appends nested values when property at base and nested" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest": "@nest"
},
"p1": "v1",
"nest": {
"p2": "v3"
},
"p2": "v2"
}),
output: %([{
"http://example.org/p1": [{"@value": "v1"}],
"http://example.org/p2": [
{"@value": "v2"},
{"@value": "v3"}
]
}])
},
"Appends nested values from all @nest aliases in term order" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest1": "@nest",
"nest2": "@nest"
},
"p1": "v1",
"nest2": {
"p2": "v4"
},
"p2": "v2",
"nest1": {
"p2": "v3"
}
}),
output: %([{
"http://example.org/p1": [{"@value": "v1"}],
"http://example.org/p2": [
{"@value": "v2"},
{"@value": "v3"},
{"@value": "v4"}
]
}])
},
"Nested nested containers" => {
input: %({
"@context": {
"@vocab": "http://example.org/"
},
"p1": "v1",
"@nest": {
"p2": "v3",
"@nest": {
"p2": "v4"
}
},
"p2": "v2"
}),
output: %([{
"http://example.org/p1": [{"@value": "v1"}],
"http://example.org/p2": [
{"@value": "v2"},
{"@value": "v3"},
{"@value": "v4"}
]
}])
},
"Arrays of nested values" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest": "@nest"
},
"p1": "v1",
"nest": {
"p2": ["v4", "v5"]
},
"p2": ["v2", "v3"]
}),
output: %([{
"http://example.org/p1": [{"@value": "v1"}],
"http://example.org/p2": [
{"@value": "v2"},
{"@value": "v3"},
{"@value": "v4"},
{"@value": "v5"}
]
}])
},
"A nest of arrays" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest": "@nest"
},
"p1": "v1",
"nest": [{
"p2": "v4"
}, {
"p2": "v5"
}],
"p2": ["v2", "v3"]
}),
output: %([{
"http://example.org/p1": [{"@value": "v1"}],
"http://example.org/p2": [
{"@value": "v2"},
{"@value": "v3"},
{"@value": "v4"},
{"@value": "v5"}
]
}])
},
"@nest MUST NOT have a string value" => {
input: %({
"@context": {"@vocab": "http://example.org/"},
"@nest": "This should generate an error"
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"@nest MUST NOT have a boolen value" => {
input: %({
"@context": {"@vocab": "http://example.org/"},
"@nest": true
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"@nest MUST NOT have a numeric value" => {
input: %({
"@context": {"@vocab": "http://example.org/"},
"@nest": 1
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"@nest MUST NOT have a value object value" => {
input: %({
"@context": {"@vocab": "http://example.org/"},
"@nest": {"@value": "This should generate an error"}
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"@nest in term definition MUST NOT be a non-@nest keyword" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest": {"@nest": "@id"}
},
"nest": "This should generate an error"
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"@nest in term definition MUST NOT have a boolen value" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest": {"@nest": true}
},
"nest": "This should generate an error"
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"@nest in term definition MUST NOT have a numeric value" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"nest": {"@nest": 123}
},
"nest": "This should generate an error"
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"Nested @container: @list" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"list": {"@container": "@list", "@nest": "nestedlist"},
"nestedlist": "@nest"
},
"nestedlist": {
"list": ["a", "b"]
}
}),
output: %([{
"http://example.org/list": [{"@list": [
{"@value": "a"},
{"@value": "b"}
]}]
}])
},
"Nested @container: @index" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"index": {"@container": "@index", "@nest": "nestedindex"},
"nestedindex": "@nest"
},
"nestedindex": {
"index": {
"A": "a",
"B": "b"
}
}
}),
output: %([{
"http://example.org/index": [
{"@value": "a", "@index": "A"},
{"@value": "b", "@index": "B"}
]
}])
},
"Nested @container: @language" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"container": {"@container": "@language", "@nest": "nestedlanguage"},
"nestedlanguage": "@nest"
},
"nestedlanguage": {
"container": {
"en": "The Queen",
"de": "Die Königin"
}
}
}),
output: %([{
"http://example.org/container": [
{"@value": "Die Königin", "@language": "de"},
{"@value": "The Queen", "@language": "en"}
]
}])
},
"Nested @container: @type" => {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type", "@nest": "nestedtypemap"},
"nestedtypemap": "@nest"
},
"nestedtypemap": {
"typemap": {
"http://example.org/foo": {"label": "Object with @type <foo>"},
"_:bar": {"label": "Object with @type _:bar"}
}
}
}),
output: %([{
"http://example/typemap": [
{"http://example/label": [{"@value": "Object with @type _:bar"}], "@type": ["_:bar"]},
{"http://example/label": [{"@value": "Object with @type <foo>"}], "@type": ["http://example.org/foo"]}
]
}])
},
"Nested @container: @id" => {
input: %({
"@context": {
"@vocab": "http://example/",
"idmap": {"@container": "@id", "@nest": "nestedidmap"},
"nestedidmap": "@nest"
},
"nestedidmap": {
"idmap": {
"http://example.org/foo": {"label": "Object with @id <foo>"},
"_:bar": {"label": "Object with @id _:bar"}
}
}
}),
output: %([{
"http://example/idmap": [
{"http://example/label": [{"@value": "Object with @id _:bar"}], "@id": "_:bar"},
{"http://example/label": [{"@value": "Object with @id <foo>"}], "@id": "http://example.org/foo"}
]
}])
},
"Nest term an invalid keyword" => {
input: %({
"@context": {
"term": {"@id": "http://example/term", "@nest": "@id"}
}
}),
exception: JSON::LD::JsonLdError::InvalidNestValue
},
"Nest in @reverse" => {
input: %({
"@context": {
"term": {"@reverse": "http://example/term", "@nest": "@nest"}
}
}),
exception: JSON::LD::JsonLdError::InvalidReverseProperty
},
"Raises InvalidTermDefinition if processingMode is not specified" => {
input: %({
"@context": {
"@vocab": "http://example.org/",
"list": {"@container": "@list", "@nest": "nestedlist"},
"nestedlist": "@nest"
},
"nestedlist": {
"list": ["a", "b"]
}
}),
processingMode: nil,
validate: true,
exception: JSON::LD::JsonLdError::InvalidTermDefinition
},
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
end
context "scoped context" do
{
"adding new term" => {
input: %({
"@context": {
"@vocab": "http://example/",
"foo": {"@context": {"bar": "http://example.org/bar"}}
},
"foo": {
"bar": "baz"
}
}),
output: %([
{
"http://example/foo": [{"http://example.org/bar": [{"@value": "baz"}]}]
}
])
},
"overriding a term" => {
input: %({
"@context": {
"@vocab": "http://example/",
"foo": {"@context": {"bar": {"@type": "@id"}}},
"bar": {"@type": "http://www.w3.org/2001/XMLSchema#string"}
},
"foo": {
"bar": "http://example/baz"
}
}),
output: %([
{
"http://example/foo": [{"http://example/bar": [{"@id": "http://example/baz"}]}]
}
])
},
"property and value with different terms mapping to the same expanded property" => {
input: %({
"@context": {
"@vocab": "http://example/",
"foo": {"@context": {"Bar": {"@id": "bar"}}}
},
"foo": {
"Bar": "baz"
}
}),
output: %([
{
"http://example/foo": [{
"http://example/bar": [
{"@value": "baz"}
]}
]
}
])
},
"deep @context affects nested nodes" => {
input: %({
"@context": {
"@vocab": "http://example/",
"foo": {"@context": {"baz": {"@type": "@vocab"}}}
},
"foo": {
"bar": {
"baz": "buzz"
}
}
}),
output: %([
{
"http://example/foo": [{
"http://example/bar": [{
"http://example/baz": [{"@id": "http://example/buzz"}]
}]
}]
}
])
},
"scoped context layers on intemediate contexts" => {
input: %({
"@context": {
"@vocab": "http://example/",
"b": {"@context": {"c": "http://example.org/c"}}
},
"a": {
"@context": {"@vocab": "http://example.com/"},
"b": {
"a": "A in example.com",
"c": "C in example.org"
},
"c": "C in example.com"
},
"c": "C in example"
}),
output: %([{
"http://example/a": [{
"http://example.com/c": [{"@value": "C in example.com"}],
"http://example/b": [{
"http://example.com/a": [{"@value": "A in example.com"}],
"http://example.org/c": [{"@value": "C in example.org"}]
}]
}],
"http://example/c": [{"@value": "C in example"}]
}])
},
"Raises InvalidTermDefinition if processingMode is not specified" => {
input: %({
"@context": {
"@vocab": "http://example/",
"foo": {"@context": {"bar": "http://example.org/bar"}}
},
"foo": {
"bar": "baz"
}
}),
processingMode: nil,
validate: true,
exception: JSON::LD::JsonLdError::InvalidTermDefinition
},
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
end
context "scoped context on @type" do
{
"adding new term" => {
input: %({
"@context": {
"@vocab": "http://example/",
"Foo": {"@context": {"bar": "http://example.org/bar"}}
},
"a": {"@type": "Foo", "bar": "baz"}
}),
output: %([
{
"http://example/a": [{
"@type": ["http://example/Foo"],
"http://example.org/bar": [{"@value": "baz"}]
}]
}
])
},
"overriding a term" => {
input: %({
"@context": {
"@vocab": "http://example/",
"Foo": {"@context": {"bar": {"@type": "@id"}}},
"bar": {"@type": "http://www.w3.org/2001/XMLSchema#string"}
},
"a": {"@type": "Foo", "bar": "http://example/baz"}
}),
output: %([
{
"http://example/a": [{
"@type": ["http://example/Foo"],
"http://example/bar": [{"@id": "http://example/baz"}]
}]
}
])
},
"alias of @type" => {
input: %({
"@context": {
"@vocab": "http://example/",
"type": "@type",
"Foo": {"@context": {"bar": "http://example.org/bar"}}
},
"a": {"type": "Foo", "bar": "baz"}
}),
output: %([
{
"http://example/a": [{
"@type": ["http://example/Foo"],
"http://example.org/bar": [{"@value": "baz"}]
}]
}
])
},
"deep @context affects nested nodes" => {
input: %({
"@context": {
"@vocab": "http://example/",
"Foo": {"@context": {"baz": {"@type": "@vocab"}}}
},
"@type": "Foo",
"bar": {"baz": "buzz"}
}),
output: %([
{
"@type": ["http://example/Foo"],
"http://example/bar": [{
"http://example/baz": [{"@id": "http://example/buzz"}]
}]
}
])
},
"scoped context layers on intemediate contexts" => {
input: %({
"@context": {
"@vocab": "http://example/",
"B": {"@context": {"c": "http://example.org/c"}}
},
"a": {
"@context": {"@vocab": "http://example.com/"},
"@type": "B",
"a": "A in example.com",
"c": "C in example.org"
},
"c": "C in example"
}),
output: %([{
"http://example/a": [{
"@type": ["http://example/B"],
"http://example.com/a": [{"@value": "A in example.com"}],
"http://example.org/c": [{"@value": "C in example.org"}]
}],
"http://example/c": [{"@value": "C in example"}]
}])
},
"with @container: @type" => {
input: %({
"@context": {
"@vocab": "http://example/",
"typemap": {"@container": "@type"},
"Type": {"@context": {"a": "http://example.org/a"}}
},
"typemap": {
"Type": {"a": "Object with @type <Type>"}
}
}),
output: %([{
"http://example/typemap": [
{"http://example.org/a": [{"@value": "Object with @type <Type>"}], "@type": ["http://example/Type"]}
]
}])
},
"Raises InvalidTermDefinition if processingMode is not specified" => {
input: %({
"@context": {
"@vocab": "http://example/",
"Foo": {"@context": {"bar": "http://example.org/bar"}}
},
"a": {"@type": "Foo", "bar": "baz"}
}),
processingMode: nil,
validate: true,
exception: JSON::LD::JsonLdError::InvalidTermDefinition
},
}.each do |title, params|
it(title) {run_expand({processingMode: "json-ld-1.1"}.merge(params))}
end
end
context "@reverse" do
{
"@container: @reverse" => {
input: %({
"@context": {
"@vocab": "http://example/",
"rev": { "@reverse": "forward", "@type": "@id"}
},
"@id": "http://example/one",
"rev": "http://example/two"
}),
output: %([{
"@id": "http://example/one",
"@reverse": {
"http://example/forward": [
{
"@id": "http://example/two"
}
]
}
}])
},
"expand-0037" => {
input: %({
"@context": {
"name": "http://xmlns.com/foaf/0.1/name"
},
"@id": "http://example.com/people/markus",
"name": "Markus Lanthaler",
"@reverse": {
"http://xmlns.com/foaf/0.1/knows": {
"@id": "http://example.com/people/dave",
"name": "Dave Longley"
}
}
}),
output: %([
{
"@id": "http://example.com/people/markus",
"@reverse": {
"http://xmlns.com/foaf/0.1/knows": [
{
"@id": "http://example.com/people/dave",
"http://xmlns.com/foaf/0.1/name": [
{
"@value": "Dave Longley"
}
]
}
]
},
"http://xmlns.com/foaf/0.1/name": [
{
"@value": "Markus Lanthaler"
}
]
}
])
},
"expand-0043" => {
input: %({
"@context": {
"name": "http://xmlns.com/foaf/0.1/name",
"isKnownBy": { "@reverse": "http://xmlns.com/foaf/0.1/knows" }
},
"@id": "http://example.com/people/markus",
"name": "Markus Lanthaler",
"@reverse": {
"isKnownBy": [
{
"@id": "http://example.com/people/dave",
"name": "Dave Longley"
},
{
"@id": "http://example.com/people/gregg",
"name": "Gregg Kellogg"
}
]
}
}),
output: %([
{
"@id": "http://example.com/people/markus",
"http://xmlns.com/foaf/0.1/knows": [
{
"@id": "http://example.com/people/dave",
"http://xmlns.com/foaf/0.1/name": [
{
"@value": "Dave Longley"
}
]
},
{
"@id": "http://example.com/people/gregg",
"http://xmlns.com/foaf/0.1/name": [
{
"@value": "Gregg Kellogg"
}
]
}
],
"http://xmlns.com/foaf/0.1/name": [
{
"@value": "Markus Lanthaler"
}
]
}
])
},
"@reverse object with an @id property" => {
input: %({
"@id": "http://example/foo",
"@reverse": {
"@id": "http://example/bar"
}
}),
exception: JSON::LD::JsonLdError::InvalidReversePropertyMap,
},
}.each do |title, params|
it(title) {run_expand params}
end
end
context "exceptions" do
{
"non-null @value and null @type" => {
input: {"http://example.com/foo" => {"@value" => "foo", "@type" => nil}},
exception: JSON::LD::JsonLdError::InvalidTypeValue
},
"non-null @value and null @language" => {
input: {"http://example.com/foo" => {"@value" => "foo", "@language" => nil}},
exception: JSON::LD::JsonLdError::InvalidLanguageTaggedString
},
"value with null language" => {
input: {
"@context" => {"@language" => "en"},
"http://example.org/nolang" => {"@value" => "no language", "@language" => nil}
},
exception: JSON::LD::JsonLdError::InvalidLanguageTaggedString
},
"colliding keywords" => {
input: %({
"@context": {
"id": "@id",
"ID": "@id"
},
"id": "http://example/foo",
"ID": "http://example/bar"
}),
exception: JSON::LD::JsonLdError::CollidingKeywords,
}
}.each do |title, params|
it(title) {run_expand params}
end
end
end
def run_expand(params)
input, output, processingMode = params[:input], params[:output], params[:processingMode]
input = ::JSON.parse(input) if input.is_a?(String)
output = ::JSON.parse(output) if output.is_a?(String)
pending params.fetch(:pending, "test implementation") unless input
if params[:exception]
expect {JSON::LD::API.expand(input, {processingMode: processingMode}.merge(params))}.to raise_error(params[:exception])
else
jld = JSON::LD::API.expand(input, base: params[:base], logger: logger, processingMode: processingMode)
expect(jld).to produce(output, logger)
end
end
end
|
require 'spec_helper'
require 'fernet'
describe Fernet do
after { Fernet::Configuration.run }
let(:token_data) do
{ :email => 'harold@heroku.com', :id => '123', :arbitrary => 'data' }
end
let(:secret) { 'JrdICDH6x3M7duQeM8dJEMK4Y5TkBIsYDw1lPy35RiY=' }
let(:bad_secret) { 'badICDH6x3M7duQeM8dJEMK4Y5TkBIsYDw1lPy35RiY=' }
it 'can verify tokens it generates' do
token = Fernet.generate(secret) do |generator|
generator.data = token_data
end
expect(
Fernet.verify(secret, token) do |verifier|
verifier.data['email'] == 'harold@heroku.com'
end
).to be_true
end
it 'fails with a bad secret' do
token = Fernet.generate(secret) do |generator|
generator.data = token_data
end
expect(
Fernet.verify(bad_secret, token) do |verifier|
verifier.data['email'] == 'harold@heroku.com'
end
).to be_false
end
it 'fails with a bad custom verification' do
token = Fernet.generate(secret) do |generator|
generator.data = { :email => 'harold@heroku.com' }
end
expect(
Fernet.verify(secret, token) do |verifier|
verifier.data['email'] == 'lol@heroku.com'
end
).to be_false
end
it 'fails if the token is too old' do
token = Fernet.generate(secret) do |generator|
generator.data = token_data
end
expect(
Fernet.verify(secret, token) do |verifier|
verifier.ttl = 0
end
).to be_false
end
it 'verifies without a custom verification' do
token = Fernet.generate(secret) do |generator|
generator.data = token_data
end
expect(Fernet.verify(secret, token)).to be_true
end
it 'can ignore TTL enforcement' do
token = Fernet.generate(secret) do |generator|
generator.data = token_data
end
expect(
Fernet.verify(secret, token) do |verifier|
def verifier.now
Time.now + 99999999999
end
verifier.enforce_ttl = false
true
end
).to be_true
end
it 'can ignore TTL enforcement via global config' do
Fernet::Configuration.run do |config|
config.enforce_ttl = false
end
token = Fernet.generate(secret) do |generator|
generator.data = token_data
end
expect(
Fernet.verify(secret, token) do |verifier|
def verifier.now
Time.now + 99999999999
end
true
end
).to be_true
end
it 'generates without custom data' do
token = Fernet.generate(secret)
expect(Fernet.verify(secret, token)).to be_true
end
it 'can encrypt the payload' do
token = Fernet.generate(secret, true) do |generator|
generator.data['password'] = 'password1'
end
expect(Base64.decode64(token)).not_to match /password1/
Fernet.verify(secret, token) do |verifier|
expect(verifier.data['password']).to eq('password1')
end
end
it 'does not encrypt when asked nicely' do
token = Fernet.generate(secret, false) do |generator|
generator.data['password'] = 'password1'
end
expect(Base64.decode64(token)).to match /password1/
Fernet.verify(secret, token, false) do |verifier|
expect(verifier.data['password']).to eq('password1')
end
end
it 'can disable encryption via global configuration' do
Fernet::Configuration.run { |c| c.encrypt = false }
token = Fernet.generate(secret) do |generator|
generator.data['password'] = 'password1'
end
expect(Base64.decode64(token)).to match /password1/
Fernet.verify(secret, token) do |verifier|
expect(verifier.data['password']).to eq('password1')
end
end
it 'returns the unencrypted message upon verify' do
token = Fernet.generate(secret) do |generator|
generator.data['password'] = 'password1'
end
verifier = Fernet.verifier(secret, token)
expect(verifier.valid?).to be_true
expect(verifier.data['password']).to eq('password1')
end
end
Failing test demonstrating the problem.
require 'spec_helper'
require 'fernet'
describe Fernet do
after { Fernet::Configuration.run }
let(:token_data) do
{ :email => 'harold@heroku.com', :id => '123', :arbitrary => 'data' }
end
let(:secret) { 'JrdICDH6x3M7duQeM8dJEMK4Y5TkBIsYDw1lPy35RiY=' }
let(:bad_secret) { 'badICDH6x3M7duQeM8dJEMK4Y5TkBIsYDw1lPy35RiY=' }
it 'can verify tokens it generates' do
token = Fernet.generate(secret) do |generator|
generator.data = token_data
end
expect(
Fernet.verify(secret, token) do |verifier|
verifier.data['email'] == 'harold@heroku.com'
end
).to be_true
end
it 'fails with a bad secret' do
token = Fernet.generate(secret) do |generator|
generator.data = token_data
end
expect(
Fernet.verify(bad_secret, token) do |verifier|
verifier.data['email'] == 'harold@heroku.com'
end
).to be_false
end
it 'fails with a bad custom verification' do
token = Fernet.generate(secret) do |generator|
generator.data = { :email => 'harold@heroku.com' }
end
expect(
Fernet.verify(secret, token) do |verifier|
verifier.data['email'] == 'lol@heroku.com'
end
).to be_false
end
it 'fails if the token is too old' do
token = Fernet.generate(secret) do |generator|
generator.data = token_data
end
expect(
Fernet.verify(secret, token) do |verifier|
verifier.ttl = 1
def verifier.now
now = DateTime.now
DateTime.new(now.year, now.month, now.day, now.hour,
now.minute, now.second + 2, now.offset)
end
true
end
).to be_false
end
it 'verifies without a custom verification' do
token = Fernet.generate(secret) do |generator|
generator.data = token_data
end
expect(Fernet.verify(secret, token)).to be_true
end
it 'can ignore TTL enforcement' do
token = Fernet.generate(secret) do |generator|
generator.data = token_data
end
expect(
Fernet.verify(secret, token) do |verifier|
def verifier.now
Time.now + 99999999999
end
verifier.enforce_ttl = false
true
end
).to be_true
end
it 'can ignore TTL enforcement via global config' do
Fernet::Configuration.run do |config|
config.enforce_ttl = false
end
token = Fernet.generate(secret) do |generator|
generator.data = token_data
end
expect(
Fernet.verify(secret, token) do |verifier|
def verifier.now
Time.now + 99999999999
end
true
end
).to be_true
end
it 'generates without custom data' do
token = Fernet.generate(secret)
expect(Fernet.verify(secret, token)).to be_true
end
it 'can encrypt the payload' do
token = Fernet.generate(secret, true) do |generator|
generator.data['password'] = 'password1'
end
expect(Base64.decode64(token)).not_to match /password1/
Fernet.verify(secret, token) do |verifier|
expect(verifier.data['password']).to eq('password1')
end
end
it 'does not encrypt when asked nicely' do
token = Fernet.generate(secret, false) do |generator|
generator.data['password'] = 'password1'
end
expect(Base64.decode64(token)).to match /password1/
Fernet.verify(secret, token, false) do |verifier|
expect(verifier.data['password']).to eq('password1')
end
end
it 'can disable encryption via global configuration' do
Fernet::Configuration.run { |c| c.encrypt = false }
token = Fernet.generate(secret) do |generator|
generator.data['password'] = 'password1'
end
expect(Base64.decode64(token)).to match /password1/
Fernet.verify(secret, token) do |verifier|
expect(verifier.data['password']).to eq('password1')
end
end
it 'returns the unencrypted message upon verify' do
token = Fernet.generate(secret) do |generator|
generator.data['password'] = 'password1'
end
verifier = Fernet.verifier(secret, token)
expect(verifier.valid?).to be_true
expect(verifier.data['password']).to eq('password1')
end
end
|
require 'spec_helper'
require 'open3'
require 'timecop'
require 'yaml'
HOW_IS_CONFIG_FILE = File.expand_path('./data/how_is.yml', __dir__)
HOW_IS_EXAMPLE_REPOSITORY_JSON_REPORT = File.expand_path('./data/how-is-example-repository-report.json', __dir__)
HOW_IS_EXAMPLE_REPOSITORY_HTML_REPORT = File.expand_path('./data/how-is-example-repository-report.html', __dir__)
HOW_IS_EXAMPLE_EMPTY_REPOSITORY_HTML_REPORT = File.expand_path('./data/how-is-example-empty-repository-report.html', __dir__)
JEKYLL_HEADER = <<-EOF
---
title: rubygems/rubygems report
layout: default
---
EOF
describe HowIs do
before do
# 2016-11-01 00:00:00 UTC.
# See note in lib/how_is/report.rb about new_offset.
# TODO: Stop pretending to always be in UTC.
date = DateTime.parse('2016-11-01').new_offset(0)
Timecop.freeze(date)
end
after do
Timecop.return
end
it 'from_json(json) works' do
expected = File.open(HOW_IS_EXAMPLE_REPOSITORY_JSON_REPORT).read
actual = HowIs.from_json(expected).to_json
expect(expected.strip).to eq(actual.strip)
end
context 'with a config' do
it 'generates valid report files' do
Dir.mktmpdir { |dir|
Dir.chdir(dir) {
reports = nil
VCR.use_cassette("how-is-with-config-file") do
expect {
reports = HowIs.from_config(YAML.load_file(HOW_IS_CONFIG_FILE))
}.to_not output.to_stderr
end
html_report = reports['./report.html']
json_report = reports['./report.json']
expect(html_report).to include(JEKYLL_HEADER)
}
}
end
end
context 'HTML report for how-is/example-repository' do
it 'generates a valid report' do
expected = File.open(HOW_IS_EXAMPLE_REPOSITORY_HTML_REPORT).read.chomp
actual = nil
VCR.use_cassette("how-is-example-repository") do
expect {
actual = HowIs.new('how-is/example-repository').to_html
}.to_not output.to_stderr
end
expect(expected).to eq(actual)
end
end
context 'JSON report for how-is/example-repository' do
it 'generates a valid report file' do
expected = File.open(HOW_IS_EXAMPLE_REPOSITORY_JSON_REPORT).read.chomp
actual = nil
VCR.use_cassette("how-is-example-repository") do
expect {
actual = HowIs.new('how-is/example-repository').to_json
}.to_not output.to_stderr
end
expect(expected).to eq(actual)
end
end
context 'HTML report for repository with no PRs or issues' do
it 'generates a valid report file' do
expected = File.open(HOW_IS_EXAMPLE_EMPTY_REPOSITORY_HTML_REPORT).read.chomp
actual = nil
VCR.use_cassette("how-is-example-empty-repository") do
expect {
actual = HowIs.new('how-is/example-empty-repository').to_html
}.to_not output.to_stderr
end
expect(expected).to eq(actual)
end
end
context '#generate_frontmatter' do
it 'works with frontmatter parameter using String keys, report_data using String keys' do
actual = nil
expected = nil
VCR.use_cassette("how-is-example-repository") do
actual = HowIs.generate_frontmatter({'foo' => "bar %{baz}"}, {'baz' => "asdf"})
expected = "---\nfoo: bar asdf\n"
end
expect(actual).to eq(expected)
end
it 'works with frontmatter parameter using Symbol keys, report_data using Symbol keys' do
actual = nil
expected = nil
VCR.use_cassette("how-is-example-repository") do
actual = HowIs.generate_frontmatter({:foo => "bar %{baz}"}, {:baz => "asdf"})
expected = "---\nfoo: bar asdf\n"
end
expect(actual).to eq(expected)
end
end
context '#from_config' do
let(:config) {
file = File.expand_path('./data/how_is/cli_spec/how_is.yml', __dir__)
YAML.load_file(file)
}
let(:issues) { JSON.parse(open(File.expand_path('./data/issues.json', __dir__)).read) }
let(:pulls) { JSON.parse(open(File.expand_path('./data/pulls.json', __dir__)).read) }
let(:github) {
instance_double('GitHub',
issues: instance_double('GitHub::Issues', list: issues),
pulls: instance_double('GitHub::Pulls', list: pulls)
)
}
let(:report_class) {
Class.new {
def self.export(analysis, format)
"[report]"
end
}
}
it 'generates a report, with correct frontmatter' do
reports = nil
VCR.use_cassette("how-is-from-config-frontmatter") do
reports = HowIs.from_config(config, github: github, report_class: report_class)
end
actual_html = reports['output/report.html']
actual_json = reports['output/report.json']
expected_html = <<-EOF
---
title: rubygems/rubygems report
layout: default
---
[report]
EOF
# Not valid JSON, because report_class.export() is the same static string
# regardless of format.
expected_json = "[report]\n"
expect(actual_html).to eq(expected_html)
expect(actual_json).to eq(expected_json)
end
end
end
remove unnecessary blank line
require 'spec_helper'
require 'open3'
require 'timecop'
require 'yaml'
HOW_IS_CONFIG_FILE = File.expand_path('./data/how_is.yml', __dir__)
HOW_IS_EXAMPLE_REPOSITORY_JSON_REPORT = File.expand_path('./data/how-is-example-repository-report.json', __dir__)
HOW_IS_EXAMPLE_REPOSITORY_HTML_REPORT = File.expand_path('./data/how-is-example-repository-report.html', __dir__)
HOW_IS_EXAMPLE_EMPTY_REPOSITORY_HTML_REPORT = File.expand_path('./data/how-is-example-empty-repository-report.html', __dir__)
JEKYLL_HEADER = <<-EOF
---
title: rubygems/rubygems report
layout: default
---
EOF
describe HowIs do
before do
# 2016-11-01 00:00:00 UTC.
# See note in lib/how_is/report.rb about new_offset.
# TODO: Stop pretending to always be in UTC.
date = DateTime.parse('2016-11-01').new_offset(0)
Timecop.freeze(date)
end
after do
Timecop.return
end
it 'from_json(json) works' do
expected = File.open(HOW_IS_EXAMPLE_REPOSITORY_JSON_REPORT).read
actual = HowIs.from_json(expected).to_json
expect(expected.strip).to eq(actual.strip)
end
context 'with a config' do
it 'generates valid report files' do
Dir.mktmpdir { |dir|
Dir.chdir(dir) {
reports = nil
VCR.use_cassette("how-is-with-config-file") do
expect {
reports = HowIs.from_config(YAML.load_file(HOW_IS_CONFIG_FILE))
}.to_not output.to_stderr
end
html_report = reports['./report.html']
json_report = reports['./report.json']
expect(html_report).to include(JEKYLL_HEADER)
}
}
end
end
context 'HTML report for how-is/example-repository' do
it 'generates a valid report' do
expected = File.open(HOW_IS_EXAMPLE_REPOSITORY_HTML_REPORT).read.chomp
actual = nil
VCR.use_cassette("how-is-example-repository") do
expect {
actual = HowIs.new('how-is/example-repository').to_html
}.to_not output.to_stderr
end
expect(expected).to eq(actual)
end
end
context 'JSON report for how-is/example-repository' do
it 'generates a valid report file' do
expected = File.open(HOW_IS_EXAMPLE_REPOSITORY_JSON_REPORT).read.chomp
actual = nil
VCR.use_cassette("how-is-example-repository") do
expect {
actual = HowIs.new('how-is/example-repository').to_json
}.to_not output.to_stderr
end
expect(expected).to eq(actual)
end
end
context 'HTML report for repository with no PRs or issues' do
it 'generates a valid report file' do
expected = File.open(HOW_IS_EXAMPLE_EMPTY_REPOSITORY_HTML_REPORT).read.chomp
actual = nil
VCR.use_cassette("how-is-example-empty-repository") do
expect {
actual = HowIs.new('how-is/example-empty-repository').to_html
}.to_not output.to_stderr
end
expect(expected).to eq(actual)
end
end
context '#generate_frontmatter' do
it 'works with frontmatter parameter using String keys, report_data using String keys' do
actual = nil
expected = nil
VCR.use_cassette("how-is-example-repository") do
actual = HowIs.generate_frontmatter({'foo' => "bar %{baz}"}, {'baz' => "asdf"})
expected = "---\nfoo: bar asdf\n"
end
expect(actual).to eq(expected)
end
it 'works with frontmatter parameter using Symbol keys, report_data using Symbol keys' do
actual = nil
expected = nil
VCR.use_cassette("how-is-example-repository") do
actual = HowIs.generate_frontmatter({:foo => "bar %{baz}"}, {:baz => "asdf"})
expected = "---\nfoo: bar asdf\n"
end
expect(actual).to eq(expected)
end
end
context '#from_config' do
let(:config) {
file = File.expand_path('./data/how_is/cli_spec/how_is.yml', __dir__)
YAML.load_file(file)
}
let(:issues) { JSON.parse(open(File.expand_path('./data/issues.json', __dir__)).read) }
let(:pulls) { JSON.parse(open(File.expand_path('./data/pulls.json', __dir__)).read) }
let(:github) {
instance_double('GitHub',
issues: instance_double('GitHub::Issues', list: issues),
pulls: instance_double('GitHub::Pulls', list: pulls)
)
}
let(:report_class) {
Class.new {
def self.export(analysis, format)
"[report]"
end
}
}
it 'generates a report, with correct frontmatter' do
reports = nil
VCR.use_cassette("how-is-from-config-frontmatter") do
reports = HowIs.from_config(config, github: github, report_class: report_class)
end
actual_html = reports['output/report.html']
actual_json = reports['output/report.json']
expected_html = <<-EOF
---
title: rubygems/rubygems report
layout: default
---
[report]
EOF
# Not valid JSON, because report_class.export() is the same static string
# regardless of format.
expected_json = "[report]\n"
expect(actual_html).to eq(expected_html)
expect(actual_json).to eq(expected_json)
end
end
end
|
# -*- encoding: utf-8 -*-
$:.push File.expand_path('../lib', __FILE__)
Gem::Specification.new do |s|
s.name = 'redis-activesupport'
s.version = '4.1.5'
s.authors = ['Luca Guidi', 'Ryan Bigg']
s.email = ['me@lucaguidi.com', 'me@ryanbigg.com']
s.homepage = 'http://redis-store.org/redis-activesupport'
s.summary = %q{Redis store for ActiveSupport}
s.description = %q{Redis store for ActiveSupport}
s.license = 'MIT'
s.rubyforge_project = 'redis-activesupport'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ['lib']
s.add_runtime_dependency 'redis-store', '~> 1.1.0'
s.add_runtime_dependency 'activesupport', '>= 3', '< 6'
s.add_development_dependency 'rake', '~> 10'
s.add_development_dependency 'bundler', '~> 1.3'
s.add_development_dependency 'mocha', '~> 0.14.0'
s.add_development_dependency 'minitest', '>= 4.2', '< 6'
s.add_development_dependency 'connection_pool', '~> 1.2.0'
s.add_development_dependency 'redis-store-testing'
end
v5.0.0.pre adds support for Rails 5
# -*- encoding: utf-8 -*-
$:.push File.expand_path('../lib', __FILE__)
Gem::Specification.new do |s|
s.name = 'redis-activesupport'
s.version = '5.0.0.pre'
s.authors = ['Luca Guidi', 'Ryan Bigg']
s.email = ['me@lucaguidi.com', 'me@ryanbigg.com']
s.homepage = 'http://redis-store.org/redis-activesupport'
s.summary = %q{Redis store for ActiveSupport}
s.description = %q{Redis store for ActiveSupport}
s.license = 'MIT'
s.rubyforge_project = 'redis-activesupport'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ['lib']
s.add_runtime_dependency 'redis-store', '~> 1.1.0'
s.add_runtime_dependency 'activesupport', '>= 3', '< 6'
s.add_development_dependency 'rake', '~> 10'
s.add_development_dependency 'bundler', '~> 1.3'
s.add_development_dependency 'mocha', '~> 0.14.0'
s.add_development_dependency 'minitest', '>= 4.2', '< 6'
s.add_development_dependency 'connection_pool', '~> 1.2.0'
s.add_development_dependency 'redis-store-testing'
end
|
$:.push File.expand_path("../lib", __FILE__)
require "redis-content-store/version"
Gem::Specification.new do |s|
s.name = "redis-content-store"
s.version = RedisContentStore::VERSION
s.authors = ["Eric Richardson", "Bryan Ricker"]
s.email = ["bricker@scpr.org"]
s.homepage = "https://github.com/scpr/secretary-rails"
s.license = "MIT"
s.summary = "Content-aware caching for Rails"
s.description = "Content-aware caching for Rails"
s.files = Dir["{lib}/**/*"] +
["LICENSE", "Rakefile", "README.md"]
s.test_files = Dir["spec/**/*"]
s.add_runtime_dependency("redis-activesupport", "~> 4.0.0")
end
Update gem dependencies
$:.push File.expand_path("../lib", __FILE__)
require "redis-content-store/version"
Gem::Specification.new do |s|
s.name = "redis-content-store"
s.version = RedisContentStore::VERSION
s.authors = ["Eric Richardson", "Bryan Ricker"]
s.email = ["bricker@scpr.org"]
s.homepage = "https://github.com/scpr/secretary-rails"
s.license = "MIT"
s.summary = "Content-aware caching for Rails"
s.description = "Content-aware caching for Rails"
s.files = Dir["{lib}/**/*"] +
["LICENSE", "Rakefile", "README.md"]
s.test_files = Dir["spec/**/*"]
s.add_runtime_dependency "redis-activesupport", [">= 3.2.0", "< 5"]
end
|
Gem::Specification.new do |gem|
gem.name = 'redis-session-store'
gem.authors = ['Mathias Meyer']
gem.email = ['meyer@paperplanes.de']
gem.summary = 'A drop-in replacement for e.g. MemCacheStore to ' \
'store Rails sessions (and Rails sessions only) in Redis.'
gem.description = gem.summary + ' For great glory!'
gem.homepage = 'https://github.com/roidrage/redis-session-store'
gem.license = 'MIT'
gem.has_rdoc = true
gem.extra_rdoc_files = %w(LICENSE AUTHORS.md CONTRIBUTING.md)
gem.files = `git ls-files -z`.split("\x0")
gem.require_paths = %w(lib)
gem.version = File.read('lib/redis-session-store.rb')
.match(/^ VERSION = '(.*)'/)[1]
gem.add_runtime_dependency 'redis', '~> 0'
gem.add_runtime_dependency 'actionpack', '>= 3', '< 5.1'
gem.add_development_dependency 'fakeredis', '~> 0'
gem.add_development_dependency 'rake', '~> 0'
gem.add_development_dependency 'rspec', '~> 0'
gem.add_development_dependency 'rubocop', '~> 0'
gem.add_development_dependency 'simplecov', '~> 0'
end
Pin versions more better
Gem::Specification.new do |gem|
gem.name = 'redis-session-store'
gem.authors = ['Mathias Meyer']
gem.email = ['meyer@paperplanes.de']
gem.summary = 'A drop-in replacement for e.g. MemCacheStore to ' \
'store Rails sessions (and Rails sessions only) in Redis.'
gem.description = gem.summary + ' For great glory!'
gem.homepage = 'https://github.com/roidrage/redis-session-store'
gem.license = 'MIT'
gem.has_rdoc = true
gem.extra_rdoc_files = %w(LICENSE AUTHORS.md CONTRIBUTING.md)
gem.files = `git ls-files -z`.split("\x0")
gem.require_paths = %w(lib)
gem.version = File.read('lib/redis-session-store.rb')
.match(/^ VERSION = '(.*)'/)[1]
gem.add_runtime_dependency 'redis', '~> 3'
gem.add_runtime_dependency 'actionpack', '>= 3', '< 5.1'
gem.add_development_dependency 'fakeredis', '~> 0.5'
gem.add_development_dependency 'rake', '~> 11'
gem.add_development_dependency 'rspec', '~> 3'
gem.add_development_dependency 'rubocop', '~> 0.41'
gem.add_development_dependency 'simplecov', '~> 0.11'
end
|
Add initial spec dir
describe Joseph do
pending 'features'
end
|
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
require 'timeout'
describe 'Backlog' do
let(:redis) { Redis.new }
let(:backlog) { Kanban::Backlog.new backend: redis, namespace: 'kanban:test' }
before do
task = { 'test' => 'data' }
5.times { backlog.add(task) }
end
after(:all) do
redis = Redis.new
redis.keys('kanban:test:*').each do |key|
redis.del key
end
end
describe '#new' do
it 'should require a backend' do
expect { Kanban::Backlog.new }.to raise_error(ArgumentError)
end
subject { backlog }
it { is_expected.to be_an_instance_of Kanban::Backlog }
context 'when no optional parameters are given' do
let(:backlog) { Kanban::Backlog.new backend: redis }
describe '.namespace' do
subject { backlog.namespace }
it { is_expected.to eq 'default' }
end
describe '.queue' do
subject { backlog.queue }
it { is_expected.to eq 'default:tasks' }
end
describe '.item' do
subject { backlog.item }
it { is_expected.to eq 'default:task' }
end
end
context 'when :namespace is "testing"' do
let(:backlog) { Kanban::Backlog.new backend: redis, namespace: 'testing' }
describe '.namespace' do
subject { backlog.namespace }
it { is_expected.to eq 'testing' }
end
end
context 'when :queue is "tests"' do
let(:backlog) { Kanban::Backlog.new backend: redis, queue: 'tests' }
describe '.queue' do
subject { backlog.queue }
it { is_expected.to eq 'default:tests' }
end
end
context 'when :item is "test"' do
let(:backlog) { Kanban::Backlog.new backend: redis, item: 'test' }
describe '.item' do
subject { backlog.item }
it { is_expected.to eq 'default:test' }
end
end
end
describe '#get' do
context 'when the task does not exist' do
subject { backlog.get 0 }
it { is_expected.to be_empty }
end
context 'when the task is {"test"=>"data"}' do
let(:task) { ({ 'test' => 'data' }) }
let(:id) { backlog.add task }
subject { backlog.get id }
it { is_expected.to eq task }
end
end
describe '#next_id' do
describe 'should return incrementing values' do
let!(:last_id) { backlog.next_id }
subject { backlog.next_id }
it { is_expected.to be > last_id }
end
end
describe '#todo' do
context 'when there are no tasks pending' do
before { redis.del "#{backlog.queue}:todo" }
subject { backlog.todo }
it { is_expected.to be_empty }
end
context 'when there are tasks pending' do
let(:task) { ({ 'test' => 'data' }) }
let!(:id) { backlog.add task }
subject { backlog.todo }
it { is_expected.to_not be_empty }
it { is_expected.to include id }
end
end
describe '#add' do
context 'when task is a hash with symbol keys' do
let(:task) { ({ foo: 'bar' }) }
it 'should raise a ParamContractError' do
expect { backlog.add task }.to raise_error(ParamContractError)
end
end
context 'when task is a hash with string keys' do
let(:task) { ({ 'test' => 'data' }) }
let!(:id) { backlog.next_id + 1}
subject { backlog.add task }
it { is_expected.to eq id }
end
end
describe '#add!' do
context 'when task is a hash with symbol keys' do
let(:task) { ({ test: 'data' }) }
let!(:id) { backlog.next_id + 1 }
subject { backlog.add! task }
it { is_expected.to eq id }
end
end
describe '#claimed?' do
context 'when a claim does not exist' do
subject { backlog.claimed? 0 }
it { is_expected.to be false }
end
context 'when a claim exists' do
let(:id) { backlog.claim }
subject { backlog.claimed? id }
it { is_expected.to be true }
end
context 'when a claim has expired' do
let!(:id) { backlog.claim duration: 1 }
before { sleep 1.1 }
subject { backlog.claimed? id }
it { is_expected.to be false }
end
end
describe '#claim' do
context 'when there are no pending tasks' do
before { redis.del "#{backlog.queue}:todo" }
it 'should block' do
expect do
Timeout.timeout(0.1) do
backlog.claim
end
end.to raise_error(Timeout::Error)
end
end
context 'when there are pending tasks' do
before { backlog.add ({ 'test' => 'data' }) }
subject { backlog.claim }
it { is_expected.to be_a Fixnum }
end
end
describe '#doing' do
let!(:id) { backlog.claim }
subject { backlog.doing }
it { is_expected.to include id }
end
it 'should allow indicating completion of a task only once' do
expect(backlog.complete(1)).to be true
expect(backlog.complete(1)).to be false
end
it 'should check if a task is completed' do
expect(backlog.completed?(2)).to be false
backlog.complete 2
expect(backlog.completed?(2)).to be true
end
it 'should allow indicating a task should not be retried' do
expect(backlog.unworkable(3)).to be true
expect(backlog.unworkable(3)).to be false
end
it 'should check if a task is unworkable' do
expect(backlog.unworkable?(4)).to be false
backlog.unworkable 4
expect(backlog.unworkable?(4)).to be true
end
it 'should consider a task that is completed or unworkable to be done' do
expect(backlog.done?(0)).to be false
backlog.complete(5)
expect(backlog.done?(5)).to be true
backlog.unworkable(6)
expect(backlog.done?(6)).to be true
end
it 'should be able to release a task from being in progress' do
id = backlog.claim
expect(backlog.release(id)).to be true
expect(backlog.release(id)).to be false
expect(backlog.doing).to_not include(id)
end
it 'should be able to forcibly expire a claim' do
expect(backlog.expire_claim(0)).to be false
id = backlog.claim
expect(backlog.expire_claim(id)).to be true
expect(backlog.claimed?(id)).to be false
end
it 'should expire any active claims when a task is released' do
id = backlog.claim
expect(backlog.claimed?(id)).to be true
backlog.release(id)
expect(backlog.claimed?(id)).to be false
end
it 'should be able to requeue a task' do
id = backlog.claim
expect(backlog.requeue(id)).to be true
expect(backlog.todo).to include(id)
expect(backlog.doing).to_not include(id)
end
end
spec/kanban: Better tests for #complete
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
require 'timeout'
describe 'Backlog' do
let(:redis) { Redis.new }
let(:backlog) { Kanban::Backlog.new backend: redis, namespace: 'kanban:test' }
before do
task = { 'test' => 'data' }
5.times { backlog.add(task) }
end
after(:all) do
redis = Redis.new
redis.keys('kanban:test:*').each do |key|
redis.del key
end
end
describe '#new' do
it 'should require a backend' do
expect { Kanban::Backlog.new }.to raise_error(ArgumentError)
end
subject { backlog }
it { is_expected.to be_an_instance_of Kanban::Backlog }
context 'when no optional parameters are given' do
let(:backlog) { Kanban::Backlog.new backend: redis }
describe '.namespace' do
subject { backlog.namespace }
it { is_expected.to eq 'default' }
end
describe '.queue' do
subject { backlog.queue }
it { is_expected.to eq 'default:tasks' }
end
describe '.item' do
subject { backlog.item }
it { is_expected.to eq 'default:task' }
end
end
context 'when :namespace is "testing"' do
let(:backlog) { Kanban::Backlog.new backend: redis, namespace: 'testing' }
describe '.namespace' do
subject { backlog.namespace }
it { is_expected.to eq 'testing' }
end
end
context 'when :queue is "tests"' do
let(:backlog) { Kanban::Backlog.new backend: redis, queue: 'tests' }
describe '.queue' do
subject { backlog.queue }
it { is_expected.to eq 'default:tests' }
end
end
context 'when :item is "test"' do
let(:backlog) { Kanban::Backlog.new backend: redis, item: 'test' }
describe '.item' do
subject { backlog.item }
it { is_expected.to eq 'default:test' }
end
end
end
describe '#get' do
context 'when the task does not exist' do
subject { backlog.get 0 }
it { is_expected.to be_empty }
end
context 'when the task is {"test"=>"data"}' do
let(:task) { ({ 'test' => 'data' }) }
let(:id) { backlog.add task }
subject { backlog.get id }
it { is_expected.to eq task }
end
end
describe '#next_id' do
describe 'should return incrementing values' do
let!(:last_id) { backlog.next_id }
subject { backlog.next_id }
it { is_expected.to be > last_id }
end
end
describe '#todo' do
context 'when there are no tasks pending' do
before { redis.del "#{backlog.queue}:todo" }
subject { backlog.todo }
it { is_expected.to be_empty }
end
context 'when there are tasks pending' do
let(:task) { ({ 'test' => 'data' }) }
let!(:id) { backlog.add task }
subject { backlog.todo }
it { is_expected.to_not be_empty }
it { is_expected.to include id }
end
end
describe '#add' do
context 'when task is a hash with symbol keys' do
let(:task) { ({ foo: 'bar' }) }
it 'should raise a ParamContractError' do
expect { backlog.add task }.to raise_error(ParamContractError)
end
end
context 'when task is a hash with string keys' do
let(:task) { ({ 'test' => 'data' }) }
let!(:id) { backlog.next_id + 1}
subject { backlog.add task }
it { is_expected.to eq id }
end
end
describe '#add!' do
context 'when task is a hash with symbol keys' do
let(:task) { ({ test: 'data' }) }
let!(:id) { backlog.next_id + 1 }
subject { backlog.add! task }
it { is_expected.to eq id }
end
end
describe '#claimed?' do
context 'when a claim does not exist' do
subject { backlog.claimed? 0 }
it { is_expected.to be false }
end
context 'when a claim exists' do
let(:id) { backlog.claim }
subject { backlog.claimed? id }
it { is_expected.to be true }
end
context 'when a claim has expired' do
let!(:id) { backlog.claim duration: 1 }
before { sleep 1.1 }
subject { backlog.claimed? id }
it { is_expected.to be false }
end
end
describe '#claim' do
context 'when there are no pending tasks' do
before { redis.del "#{backlog.queue}:todo" }
it 'should block' do
expect do
Timeout.timeout(0.1) do
backlog.claim
end
end.to raise_error(Timeout::Error)
end
end
context 'when there are pending tasks' do
before { backlog.add ({ 'test' => 'data' }) }
subject { backlog.claim }
it { is_expected.to be_a Fixnum }
end
end
describe '#doing' do
let!(:id) { backlog.claim }
subject { backlog.doing }
it { is_expected.to include id }
end
describe '#complete' do
context 'when task has not been marked complete' do
subject { backlog.complete 1 }
it { is_expected.to be true }
end
context 'when task has been marked complete' do
before { backlog.complete 1 }
subject { backlog.complete 1 }
it { is_expected.to be false }
end
end
it 'should check if a task is completed' do
expect(backlog.completed?(2)).to be false
backlog.complete 2
expect(backlog.completed?(2)).to be true
end
it 'should allow indicating a task should not be retried' do
expect(backlog.unworkable(3)).to be true
expect(backlog.unworkable(3)).to be false
end
it 'should check if a task is unworkable' do
expect(backlog.unworkable?(4)).to be false
backlog.unworkable 4
expect(backlog.unworkable?(4)).to be true
end
it 'should consider a task that is completed or unworkable to be done' do
expect(backlog.done?(0)).to be false
backlog.complete(5)
expect(backlog.done?(5)).to be true
backlog.unworkable(6)
expect(backlog.done?(6)).to be true
end
it 'should be able to release a task from being in progress' do
id = backlog.claim
expect(backlog.release(id)).to be true
expect(backlog.release(id)).to be false
expect(backlog.doing).to_not include(id)
end
it 'should be able to forcibly expire a claim' do
expect(backlog.expire_claim(0)).to be false
id = backlog.claim
expect(backlog.expire_claim(id)).to be true
expect(backlog.claimed?(id)).to be false
end
it 'should expire any active claims when a task is released' do
id = backlog.claim
expect(backlog.claimed?(id)).to be true
backlog.release(id)
expect(backlog.claimed?(id)).to be false
end
it 'should be able to requeue a task' do
id = backlog.claim
expect(backlog.requeue(id)).to be true
expect(backlog.todo).to include(id)
expect(backlog.doing).to_not include(id)
end
end
|
require 'spec_helper'
require './examples/lambda_report'
require 'roo'
describe LambdaReport do
include_context 'with report', LambdaReport
it 'have 11 rows' do
expect(xlsx.sheet(0).last_row).to eq 11
end
it 'use lambda to calculate value' do
expect(xlsx.sheet(0).cell('B', 6)).to eq true
expect(xlsx.sheet(0).cell('C', 6)).to eq false
end
end
Adapt boolean tests for updated roo.
require 'spec_helper'
require './examples/lambda_report'
require 'roo'
describe LambdaReport do
include_context 'with report', LambdaReport
it 'have 11 rows' do
expect(xlsx.sheet(0).last_row).to eq 11
end
it 'use lambda to calculate value' do
expect(xlsx.sheet(0).cell('B', 6)).to eq "TRUE"
expect(xlsx.sheet(0).cell('C', 6)).to eq "FALSE"
end
end
|
require 'spec_helper'
require 'webmock/rspec'
module CS
describe Client do
describe "client with authentication" do
let(:client) do
create_client
end
let!(:logged_in_client) do
client = CS::Client.new(base_uri: base_uri)
client.session_id = '123499'
client
end
describe "current_user" do
it "should return current user information" do
user = EndPoint::User.new
EndPoint::User.any_instance.stub(current_user: user)
current_user = client.current_user
current_user.should == user
current_user.to_h.should be_kind_of Hash
end
end
describe "current_groups" do
it "should return groups that current user belongs to" do
groups = [ EndPoint::Group.new ]
EndPoint::Group.any_instance.stub(current_groups: groups)
groups = logged_in_client.current_groups
groups.should_not be_empty
end
end
describe "sensors" do
it "should return Sensors relation" do
client.sensors.should be_a_kind_of(CS::Relation::SensorRelation)
end
end
describe "logger" do
context "when login using user & password" do
it "should assign the new session" do
logger = double()
Session.any_instance.stub(login: '1234')
client.logger = logger
client.login('foo', 'bar')
client.session.logger.should == logger
end
end
context "when login using oauth" do
it "should assign logger" do
logger = double()
client.logger = logger
client.oauth('', '', '', '')
client.session.logger.should == logger
end
end
context "when specifying session_id" do
it "should assign logger" do
logger = double()
client.logger = logger
client.session_id = '1234'
client.session.logger.should == logger
end
end
context "setting logger" do
it "it should set logger to existing session" do
logger = double()
client.session_id = '1234'
client.logger = logger
client.session.logger.should == logger
client.logger = nil
client.session.logger.should be_nil
end
end
end
end
end
end
Prepare spec for method delegation
Signed-off-by: Ahmy Yulrizka <a3d8d8a23998e2eb60cbb1a209b876c0975f08c6@gmail.com>
require 'spec_helper'
require 'webmock/rspec'
module CS
describe Client do
describe "client with authentication" do
let(:client) do
create_client
end
let!(:logged_in_client) do
client = CS::Client.new(base_uri: base_uri)
client.session_id = '123499'
client
end
describe "current_user" do
it "should return current user information" do
user = EndPoint::User.new
EndPoint::User.any_instance.stub(current_user: user)
current_user = client.current_user
current_user.should == user
current_user.to_h.should be_kind_of Hash
end
end
describe "current_groups" do
it "should return groups that current user belongs to" do
groups = [ EndPoint::Group.new ]
EndPoint::Group.any_instance.stub(current_groups: groups)
groups = logged_in_client.current_groups
groups.should_not be_empty
end
end
describe "sensors" do
it "should return Sensors relation" do
client.sensors.should be_a_kind_of(CS::Relation::SensorRelation)
end
end
describe "logger" do
context "when login using user & password" do
it "should assign the new session" do
logger = double()
Session.any_instance.stub(login: '1234')
client.logger = logger
client.login('foo', 'bar')
client.session.logger.should == logger
end
end
context "when login using oauth" do
it "should assign logger" do
logger = double()
client.logger = logger
client.oauth('', '', '', '')
client.session.logger.should == logger
end
end
context "when specifying session_id" do
it "should assign logger" do
logger = double()
client.logger = logger
client.session_id = '1234'
client.session.logger.should == logger
end
end
context "setting logger" do
it "it should set logger to existing session" do
logger = double()
client.session_id = '1234'
client.logger = logger
client.session.logger.should == logger
client.logger = nil
client.session.logger.should be_nil
end
end
end
describe "get" do
it 'shoudl delegate method to session' do
end
end
describe "post" do
it 'shoudl delegate method to session' do
end
end
describe "put" do
it 'shoudl delegate method to session' do
end
end
describe "delete" do
it 'shoudl delegate method to session' do
end
end
describe "head" do
it 'shoudl delegate method to session' do
end
end
describe "response_code" do
it 'shoudl delegate method to session' do
end
end
describe "response_body" do
it 'shoudl delegate method to session' do
end
end
describe "response_headers" do
it 'shoudl delegate method to session' do
end
end
describe "errors" do
it 'shoudl delegate method to session' do
end
end
end
end
end
|
#encoding: utf-8
require "spec_helper"
require 'mailer'
require 'configuration'
def create_seeds
Speaker.all.destroy!
Session.all.destroy!
@speaker = Speaker.new(:firstname => 'John', :lastname => 'Doe', :email => 'john@doe.org')
@speaker.save
date = DateTime.parse('2010/05/31 10:30')
@diner = Session.new(:title => 'diner', :speaker => @speaker, :scheduled_at => date)
@diner.save
pub = Session.new(:title => 'pub', :speaker => @speaker)
pub.save
@sessions = [@diner, pub]
end
def configure_test_mail
Mail.defaults do
delivery_method :test
end
Mail::TestMailer.deliveries.clear
end
def configure_test_database
Configuration.new.test
create_seeds()
end
describe 'Mailer' do
before do
configure_test_mail()
configure_test_database()
@mailer = Mailer.new
end
describe 'mail_speaker_having_at_least_one_scheduled_session' do
before do
@mails = @mailer.mail_speaker_having_at_least_one_scheduled_session
end
it 'should make a message to John Doe' do
@mails.length.should == 1
mail = @mails[0]
mail.from.should == ['orga@conf.agile-france.org']
mail.to.should == [@speaker.email]
mail.subject.should == 'vous avez une session retenue au programme de la conférence Agile France'
mail.body.raw_source.should == <<eos
Bonjour John Doe, nous avons regardé vos propositions de session sur http://conf.agile-france.org/
Le comité de sélection a retenu
- diner
Le programme sera publié prochainement
Contactez nous pour toute question, remarque ou contrainte
L'Organisation de la conférence Agile France
eos
end
it 'should send it' do
Mail::TestMailer.deliveries.should == @mails
end
end
describe 'confirm_speaker' do
before do
@mails = @mailer.mail_confirm_schedule_time_to_speaker
end
it 'should make a message to John Doe' do
@mails.length.should == 1
mail = @mails[0]
mail.from.should == ['orga@conf.agile-france.org']
mail.to.should == [@speaker.email]
mail.subject.should == 'heure de vos sessions à la conférence Agile France'
mail.body.raw_source.should == <<eos
Bonjour John Doe
Nous avons plusieurs information pour vous
== Programmation ==
Nous avons programmé
- diner, le 31/05/2010 à 10:30, pour une durée de 60 minutes
Le programme sera en ligne prochainement sur http://conf.agile-france.org/
== Matériel visuel ==
Il y a un logo 'speaker' sur http://github.com/thierryhenrio/agile-france-publicity/blob/master/speaker-2010.png
Le mettre sur un blog, un site, c'est aussi promouvoir l'évènement
== Nous prenons en charge un orateur par session ==
La prise en charge comprend les droits d'entrée pour la conférence et le diner du 31/05
Si vous binomez avec Paul Dupont, Paul Dupont doit s'acquitter des droits d'entrée pour la conférence
Contactez nous pour toute question, remarque ou contrainte
L'Organisation de la conférence Agile France
eos
end
it 'should send it' do
Mail::TestMailer.deliveries.should == @mails
end
end
describe 'ask_for_capacity' do
describe ', with speaker having session without capacity' do
before do
@mails = @mailer.mail_ask_for_capacity
end
it 'should make a message to John Doe for diner session' do
@mails.length.should == 1
mail = @mails[0]
mail.from.should == ['orga@conf.agile-france.org']
mail.to.should == [@speaker.email]
mail.subject.should == "nombre de participants que vous pouvez accueillir"
mail.body.raw_source.should == <<eos
Bonjour John Doe
Nous nous sommes aperçu que le nombre de participants n'est pas requis lors de la soumission de session
Et nous aimerions communiquer cette information aux participants
Nos objectifs sont les suivants
- les participants ont accès à cette information pour prendre les bonnes décisions
par exemple, la session 'Audience réduite' a une limite de 20, je vais rejoindre la salle 5 minutes avant,
plutot qu'arriver juste à temps
== Vos sessions ==
Pour les sessions suivantes, avez vous une limite de participation : 50, 40, 30, autre ?
- diner, le 31/05/2010 à 10:30
L'Organisation de la conférence Agile France
eos
end
it 'should send it' do
Mail::TestMailer.deliveries.should == @mails
end
end
describe ', with speaker having scheduled session with capacity' do
before do
@diner.capacity = 10
@diner.save!
@mails = @mailer.mail_ask_for_capacity
end
it 'should not mail John Doe' do
@mails.length.should == 0
end
end
end
describe 'refusal' do
before do
@mails = @mailer.mail_communicate_refusal
end
it 'should inform John Doe that pub session is not scheduled' do
@mails.length.should == 1
mail = @mails[0]
mail.from.should == ['orga@conf.agile-france.org']
mail.to.should == [@speaker.email]
mail.subject.should == 'les sessions suivantes ne sont pas retenues'
mail.body.raw_source.should == <<eos
Bonjour John Doe
les sessions suivantes ne sont pas retenues au programme
- pub
Nous vous remercions d'avoir proposé, car cela nous a permis de choisir
L'Organisation de la conférence Agile France
eos
end
it 'should send it' do
Mail::TestMailer.deliveries.should == @mails
end
end
describe 'communicate_session_is_rescheduled' do
before do
@diner.scheduled_at = DateTime.parse('31/05/2010 14h')
@diner.save!
date = Date.parse('06/06/2006')
@lunch = Session.new(:title => 'lunch', :speaker => @speaker, :scheduled => true, :scheduled_at => date)
@lunch.save!
@mails = @mailer.communicate_session_is_rescheduled @diner
end
it 'should inform John Doe that pub session is not scheduled' do
@mails.length.should == 1
mail = @mails[0]
mail.from.should == ['orga@conf.agile-france.org']
mail.to.should == [@speaker.email]
mail.subject.should == 'une de vos sessions a été reprogrammée'
mail.body.raw_source.should == <<eos
Bonjour John Doe
Pour exploiter au mieux vos contraintes ou les capacités des salles, nous avons reprogrammé au moins une de vos session
Nous vous confirmons votre agenda
- diner, le 31/05/2010 à 14:00
- lunch, le 06/06/2006 à 00:00
L'Organisation de la conférence Agile France
eos
end
it 'should send it' do
Mail::TestMailer.deliveries.should == @mails
end
end
end
ouch forgot one bad :scheduled attribute
#encoding: utf-8
require "spec_helper"
require 'mailer'
require 'configuration'
def create_seeds
Speaker.all.destroy!
Session.all.destroy!
@speaker = Speaker.new(:firstname => 'John', :lastname => 'Doe', :email => 'john@doe.org')
@speaker.save
date = DateTime.parse('2010/05/31 10:30')
@diner = Session.new(:title => 'diner', :speaker => @speaker, :scheduled_at => date)
@diner.save
pub = Session.new(:title => 'pub', :speaker => @speaker)
pub.save
@sessions = [@diner, pub]
end
def configure_test_mail
Mail.defaults do
delivery_method :test
end
Mail::TestMailer.deliveries.clear
end
def configure_test_database
Configuration.new.test
create_seeds()
end
describe 'Mailer' do
before do
configure_test_mail()
configure_test_database()
@mailer = Mailer.new
end
describe 'mail_speaker_having_at_least_one_scheduled_session' do
before do
@mails = @mailer.mail_speaker_having_at_least_one_scheduled_session
end
it 'should make a message to John Doe' do
@mails.length.should == 1
mail = @mails[0]
mail.from.should == ['orga@conf.agile-france.org']
mail.to.should == [@speaker.email]
mail.subject.should == 'vous avez une session retenue au programme de la conférence Agile France'
mail.body.raw_source.should == <<eos
Bonjour John Doe, nous avons regardé vos propositions de session sur http://conf.agile-france.org/
Le comité de sélection a retenu
- diner
Le programme sera publié prochainement
Contactez nous pour toute question, remarque ou contrainte
L'Organisation de la conférence Agile France
eos
end
it 'should send it' do
Mail::TestMailer.deliveries.should == @mails
end
end
describe 'confirm_speaker' do
before do
@mails = @mailer.mail_confirm_schedule_time_to_speaker
end
it 'should make a message to John Doe' do
@mails.length.should == 1
mail = @mails[0]
mail.from.should == ['orga@conf.agile-france.org']
mail.to.should == [@speaker.email]
mail.subject.should == 'heure de vos sessions à la conférence Agile France'
mail.body.raw_source.should == <<eos
Bonjour John Doe
Nous avons plusieurs information pour vous
== Programmation ==
Nous avons programmé
- diner, le 31/05/2010 à 10:30, pour une durée de 60 minutes
Le programme sera en ligne prochainement sur http://conf.agile-france.org/
== Matériel visuel ==
Il y a un logo 'speaker' sur http://github.com/thierryhenrio/agile-france-publicity/blob/master/speaker-2010.png
Le mettre sur un blog, un site, c'est aussi promouvoir l'évènement
== Nous prenons en charge un orateur par session ==
La prise en charge comprend les droits d'entrée pour la conférence et le diner du 31/05
Si vous binomez avec Paul Dupont, Paul Dupont doit s'acquitter des droits d'entrée pour la conférence
Contactez nous pour toute question, remarque ou contrainte
L'Organisation de la conférence Agile France
eos
end
it 'should send it' do
Mail::TestMailer.deliveries.should == @mails
end
end
describe 'ask_for_capacity' do
describe ', with speaker having session without capacity' do
before do
@mails = @mailer.mail_ask_for_capacity
end
it 'should make a message to John Doe for diner session' do
@mails.length.should == 1
mail = @mails[0]
mail.from.should == ['orga@conf.agile-france.org']
mail.to.should == [@speaker.email]
mail.subject.should == "nombre de participants que vous pouvez accueillir"
mail.body.raw_source.should == <<eos
Bonjour John Doe
Nous nous sommes aperçu que le nombre de participants n'est pas requis lors de la soumission de session
Et nous aimerions communiquer cette information aux participants
Nos objectifs sont les suivants
- les participants ont accès à cette information pour prendre les bonnes décisions
par exemple, la session 'Audience réduite' a une limite de 20, je vais rejoindre la salle 5 minutes avant,
plutot qu'arriver juste à temps
== Vos sessions ==
Pour les sessions suivantes, avez vous une limite de participation : 50, 40, 30, autre ?
- diner, le 31/05/2010 à 10:30
L'Organisation de la conférence Agile France
eos
end
it 'should send it' do
Mail::TestMailer.deliveries.should == @mails
end
end
describe ', with speaker having scheduled session with capacity' do
before do
@diner.capacity = 10
@diner.save!
@mails = @mailer.mail_ask_for_capacity
end
it 'should not mail John Doe' do
@mails.length.should == 0
end
end
end
describe 'refusal' do
before do
@mails = @mailer.mail_communicate_refusal
end
it 'should inform John Doe that pub session is not scheduled' do
@mails.length.should == 1
mail = @mails[0]
mail.from.should == ['orga@conf.agile-france.org']
mail.to.should == [@speaker.email]
mail.subject.should == 'les sessions suivantes ne sont pas retenues'
mail.body.raw_source.should == <<eos
Bonjour John Doe
les sessions suivantes ne sont pas retenues au programme
- pub
Nous vous remercions d'avoir proposé, car cela nous a permis de choisir
L'Organisation de la conférence Agile France
eos
end
it 'should send it' do
Mail::TestMailer.deliveries.should == @mails
end
end
describe 'communicate_session_is_rescheduled' do
before do
@diner.scheduled_at = DateTime.parse('31/05/2010 14h')
@diner.save!
date = Date.parse('06/06/2006')
@lunch = Session.new(:title => 'lunch', :speaker => @speaker, :scheduled_at => date)
@lunch.save!
@mails = @mailer.communicate_session_is_rescheduled @diner
end
it 'should inform John Doe that pub session is not scheduled' do
@mails.length.should == 1
mail = @mails[0]
mail.from.should == ['orga@conf.agile-france.org']
mail.to.should == [@speaker.email]
mail.subject.should == 'une de vos sessions a été reprogrammée'
mail.body.raw_source.should == <<eos
Bonjour John Doe
Pour exploiter au mieux vos contraintes ou les capacités des salles, nous avons reprogrammé au moins une de vos session
Nous vous confirmons votre agenda
- diner, le 31/05/2010 à 14:00
- lunch, le 06/06/2006 à 00:00
L'Organisation de la conférence Agile France
eos
end
it 'should send it' do
Mail::TestMailer.deliveries.should == @mails
end
end
end |
ModelStubbing.define_models do
time 2007, 6, 15
model Site do
stub :name => 'default', :host => ''
end
model User do
stub :login => 'normal-user', :email => 'normal-user@example.com', :state => 'active',
:salt => '7e3041ebc2fc05a40c60028e2c4901a81035d3cd', :crypted_password => '00742970dc9e6319f8019fd54864d3ea740f04b1',
:created_at => current_time - 5.days, :site => all_stubs(:site), :remember_token => 'foo-bar', :remember_token_expires_at => current_time + 5.days,
:activation_code => '8f24789ae988411ccf33ab0c30fe9106fab32e9b', :activated_at => current_time - 4.days, :posts_count => 3
end
model Forum do
stub :name => "Default", :topics_count => 2, :posts_count => 2, :position => 1, :state => 'public', :site => all_stubs(:site)
stub :other, :name => "Other", :topics_count => 1, :posts_count => 1, :position => 0
end
model Topic do
stub :forum => all_stubs(:forum), :user => all_stubs(:user), :title => "initial", :hits => 0, :sticky => 0, :posts_count => 1,
:last_post_id => 1000, :last_updated_at => current_time - 5.days
stub :other, :title => "Other", :last_updated_at => current_time - 4.days
stub :other_forum, :forum => all_stubs(:other_forum)
end
model Post do
stub :topic => all_stubs(:topic), :forum => all_stubs(:forum), :user => all_stubs(:user), :body => 'initial', :created_at => current_time - 5.days
stub :other, :topic => all_stubs(:other_topic), :body => 'other', :created_at => current_time - 13.days
stub :other_forum, :forum => all_stubs(:other_forum), :topic => all_stubs(:other_forum_topic)
end
end
fix 2 topic specs
ModelStubbing.define_models do
time 2007, 6, 15
model Site do
stub :name => 'default', :host => ''
end
model User do
stub :login => 'normal-user', :email => 'normal-user@example.com', :state => 'active',
:salt => '7e3041ebc2fc05a40c60028e2c4901a81035d3cd', :crypted_password => '00742970dc9e6319f8019fd54864d3ea740f04b1',
:created_at => current_time - 5.days, :site => all_stubs(:site), :remember_token => 'foo-bar', :remember_token_expires_at => current_time + 5.days,
:activation_code => '8f24789ae988411ccf33ab0c30fe9106fab32e9b', :activated_at => current_time - 4.days, :posts_count => 3
end
model Forum do
stub :name => "Default", :topics_count => 2, :posts_count => 2, :position => 1, :state => 'public', :site => all_stubs(:site)
stub :other, :name => "Other", :topics_count => 1, :posts_count => 1, :position => 0
end
model Topic do
stub :forum => all_stubs(:forum), :user => all_stubs(:user), :title => "initial", :hits => 0, :sticky => 0, :posts_count => 1,
:last_post_id => 1000, :last_updated_at => current_time - 5.days
stub :other, :title => "Other", :last_updated_at => current_time - 4.days
stub :other_forum, :forum => all_stubs(:other_forum)
end
model Post do
stub :topic => all_stubs(:topic), :forum => all_stubs(:forum), :user => all_stubs(:user), :body => 'initial', :created_at => current_time - 5.days
stub :other, :topic => all_stubs(:other_topic), :body => 'other', :created_at => current_time - 13.days
stub :other_forum, :forum => all_stubs(:other_forum), :topic => all_stubs(:other_forum_topic)
end
model Moderatorship
model Monitorship
end |
require 'nokogiri'
require 'open-uri'
RSpec.describe "Data classes" do
Reso::DataDictionary.specification.resources.each do |resource|
it "#{resource} can be initialized" do
expect(resource.constantize.new).to be_a(resource.constantize)
end
res_arr = resource.split("::")
if res_arr.count > 2
it "#{res_arr.join(" -> ")} relationships" do
(0..res_arr.count-3).each do |index|
parent_arr = res_arr[0..index + 1]
child_arr = res_arr[0..index + 2]
parent = parent_arr.join("::").classify.constantize.new
child = child_arr.join("::").classify.constantize.new
parent.assign_attributes(child_arr.last.underscore.to_sym => child)
expect(parent.send(child_arr.last.underscore.to_sym)).to be_a(child_arr.join("::").constantize)
end
end
end
end
end
# Reso::DataDictionary.specification.resources.each do |resource|
# RSpec.describe "#{resource} attributes" do
# Reso::DataDictionary.specification.fields_for_class(resource).map{|f| f[:attribute_name]}.each do |attr|
# it attr do
# end
# end
# end
# end
Cleanup
require 'nokogiri'
require 'open-uri'
RSpec.describe "Data classes" do
Reso::DataDictionary.specification.resources.each do |resource|
it "#{resource} can be initialized" do
expect(resource.constantize.new).to be_a(resource.constantize)
end
res_arr = resource.split("::")
if res_arr.count > 2
it "#{res_arr.join(" -> ")} relationships" do
(0..res_arr.count-3).each do |index|
parent_arr = res_arr[0..index + 1]
child_arr = res_arr[0..index + 2]
parent = parent_arr.join("::").classify.constantize.new
child = child_arr.join("::").classify.constantize.new
parent.assign_attributes(child_arr.last.underscore.to_sym => child)
expect(parent.send(child_arr.last.underscore.to_sym)).to be_a(child_arr.join("::").constantize)
end
end
end
# Reso::DataDictionary.specification.fields_for_class(resource).map{|f| f[:attribute_name]}.each do |attr|
# it attr do
# end
# end
end
end
|
describe "A Bureau in the Open State" do
before do
class UIViewController < UIResponder
include Bureau::Controller
end
class SomeController < UIViewController; end
@x = UIViewController.new
@y = UIViewController.new
class X
attr_accessor :num
def initialize; @num = 5; end
def do_nothing; end
def change_num; @num = 10; end
end
@z = X.new
@structure = [
{
drawers:
[
{target: @z, action: :do_nothing},
{controller: @x},
]
},
{
drawers:
[
{controller: SomeController},
]
},
{
drawers:
[
{target: @z, action: :change_num},
{controller: @y, open: true},
]
},
]
@bureau = Bureau::Bureau.new(structure:@structure, state: :open)
end
it "removes the previously open drawer when a new one with a controller is tapped" do
new_active_path = NSIndexPath.indexPathForRow(1, inSection:0)
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:new_active_path)
@bureau.structure.last[:drawers].last[:open].should.not == true
@bureau.childViewControllers.should.not.include @y
@bureau.view.subviews.should.not.include @y.view
end
it "does not remove the previously open drawer when a new one without a controller is tapped" do
new_active_path = NSIndexPath.indexPathForRow(0, inSection:0)
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:new_active_path)
@bureau.structure.last[:drawers].last[:open].should == true
@bureau.childViewControllers.should.include @y
@bureau.view.subviews.should.include @y.view
end
it "opens the touched drawer when a new one with a controller is tapped" do
new_active_path = NSIndexPath.indexPathForRow(1, inSection:0)
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:new_active_path)
@bureau.structure[0][:drawers][1][:open].should == true
@bureau.childViewControllers.should.include @x
@bureau.view.subviews.should.include @x.view
@x.view.frame.should == Bureau::Frame::closed
end
it "executes the action of a target/action drawer when one is tapped" do
tapped_path = NSIndexPath.indexPathForRow(0, inSection:2)
@z.num.should == 5
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:tapped_path)
@z.num.should == 10
end
it "instantiates controllers set as classes when tapped" do
tapped_path = NSIndexPath.indexPathForRow(0, inSection:1)
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:tapped_path)
@bureau.open_drawer[:controller_instance].should.be.instance_of SomeController
end
it "re-instantiates controllers set as classes when tapped" do
first_tapped_path = NSIndexPath.indexPathForRow(0, inSection:1)
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:first_tapped_path)
old_instance = @bureau.open_drawer[:controller_instance]
second_tapped_path = NSIndexPath.indexPathForRow(1, inSection:0)
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:second_tapped_path)
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:first_tapped_path)
new_instance = @bureau.open_drawer[:controller_instance]
new_instance.should.not == old_instance
end
end
Added test to ensure reinstantiated class-controllers get their .bureau variable set
describe "A Bureau in the Open State" do
before do
class UIViewController < UIResponder
include Bureau::Controller
end
class SomeController < UIViewController; end
@x = UIViewController.new
@y = UIViewController.new
class X
attr_accessor :num
def initialize; @num = 5; end
def do_nothing; end
def change_num; @num = 10; end
end
@z = X.new
@structure = [
{
drawers:
[
{target: @z, action: :do_nothing},
{controller: @x},
]
},
{
drawers:
[
{controller: SomeController},
]
},
{
drawers:
[
{target: @z, action: :change_num},
{controller: @y, open: true},
]
},
]
@bureau = Bureau::Bureau.new(structure:@structure, state: :open)
end
it "removes the previously open drawer when a new one with a controller is tapped" do
new_active_path = NSIndexPath.indexPathForRow(1, inSection:0)
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:new_active_path)
@bureau.structure.last[:drawers].last[:open].should.not == true
@bureau.childViewControllers.should.not.include @y
@bureau.view.subviews.should.not.include @y.view
end
it "does not remove the previously open drawer when a new one without a controller is tapped" do
new_active_path = NSIndexPath.indexPathForRow(0, inSection:0)
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:new_active_path)
@bureau.structure.last[:drawers].last[:open].should == true
@bureau.childViewControllers.should.include @y
@bureau.view.subviews.should.include @y.view
end
it "opens the touched drawer when a new one with a controller is tapped" do
new_active_path = NSIndexPath.indexPathForRow(1, inSection:0)
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:new_active_path)
@bureau.structure[0][:drawers][1][:open].should == true
@bureau.childViewControllers.should.include @x
@bureau.view.subviews.should.include @x.view
@x.view.frame.should == Bureau::Frame::closed
end
it "executes the action of a target/action drawer when one is tapped" do
tapped_path = NSIndexPath.indexPathForRow(0, inSection:2)
@z.num.should == 5
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:tapped_path)
@z.num.should == 10
end
it "instantiates controllers set as classes when tapped" do
tapped_path = NSIndexPath.indexPathForRow(0, inSection:1)
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:tapped_path)
@bureau.open_drawer[:controller_instance].should.be.instance_of SomeController
@bureau.open_drawer[:controller_instance].bureau.should == @bureau
end
it "re-instantiates controllers set as classes when tapped" do
first_tapped_path = NSIndexPath.indexPathForRow(0, inSection:1)
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:first_tapped_path)
old_instance = @bureau.open_drawer[:controller_instance]
second_tapped_path = NSIndexPath.indexPathForRow(1, inSection:0)
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:second_tapped_path)
@bureau.tableView(@bureau.table, didSelectRowAtIndexPath:first_tapped_path)
new_instance = @bureau.open_drawer[:controller_instance]
new_instance.should.not == old_instance
end
end
|
require 'yajl/ffi'
require 'minitest/autorun'
describe Yajl::FFI::Parser do
subject { Yajl::FFI::Parser.new }
describe 'parsing a document' do
it 'rejects documents containing bad start character' do
expected = [:error]
assert_equal expected, events('a')
end
it 'rejects documents starting with period' do
expected = [:error]
assert_equal expected, events('.')
end
it 'parses a null value document' do
expected = [:start_document, [:value, nil], :end_document]
assert_equal expected, events('null')
end
it 'parses a false value document' do
expected = [:start_document, [:value, false], :end_document]
assert_equal expected, events('false')
end
it 'parses a true value document' do
expected = [:start_document, [:value, true], :end_document]
assert_equal expected, events('true')
end
it 'parses a string document' do
expected = [:start_document, [:value, "test"], :end_document]
assert_equal expected, events('"test"')
end
it 'parses an integer value document' do
expected = [:start_document, [:value, 12], :end_document]
events = events('12', subject)
assert events.empty?
subject.finish
assert_equal expected, events
end
it 'parses a float value document' do
expected = [:start_document, [:value, 12.1], :end_document]
events = events('12.1', subject)
assert events.empty?
subject.finish
assert_equal expected, events
end
it 'parses a value document with leading whitespace' do
expected = [:start_document, [:value, false], :end_document]
assert_equal expected, events(' false ')
end
it 'parses array documents' do
expected = [:start_document, :start_array, :end_array, :end_document]
assert_equal expected, events('[]')
assert_equal expected, events('[ ]')
assert_equal expected, events(' [] ')
assert_equal expected, events(' [ ] ')
end
it 'parses object documents' do
expected = [:start_document, :start_object, :end_object, :end_document]
assert_equal expected, events('{}')
assert_equal expected, events('{ }')
assert_equal expected, events(' {} ')
assert_equal expected, events(' { } ')
end
it 'rejects documents with trailing characters' do
expected = [:start_document, :start_object, :end_object, :end_document, :error]
assert_equal expected, events('{}a')
assert_equal expected, events('{ } 12')
assert_equal expected, events(' {} false')
assert_equal expected, events(' { }, {}')
end
it 'ignores whitespace around tokens, preserves it within strings' do
json = %Q{
{ " key 1 " : \t [
1, 2, " my string ",\r
false, true, null ]
}
}
expected = [
:start_document,
:start_object,
[:key, " key 1 "],
:start_array,
[:value, 1],
[:value, 2],
[:value, " my string "],
[:value, false],
[:value, true],
[:value, nil],
:end_array,
:end_object,
:end_document
]
assert_equal expected, events(json)
end
it 'rejects partial keyword tokens' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('[tru]')
assert_equal expected, events('[fal]')
assert_equal expected, events('[nul,true]')
assert_equal expected, events('[fals1]')
end
it 'parses single keyword tokens' do
expected = [:start_document, :start_array, [:value, true], :end_array, :end_document]
assert_equal expected, events('[true]')
end
it 'parses keywords in series' do
expected = [:start_document, :start_array, [:value, true], [:value, nil], :end_array, :end_document]
assert_equal expected, events('[true, null]')
end
end
describe 'finishing the parse' do
it 'rejects finish with no json data provided' do
-> { subject.finish }.must_raise Yajl::FFI::ParserError
end
it 'rejects partial null keyword' do
subject << 'nul'
-> { subject.finish }.must_raise Yajl::FFI::ParserError
end
it 'rejects partial true keyword' do
subject << 'tru'
-> { subject.finish }.must_raise Yajl::FFI::ParserError
end
it 'rejects partial false keyword' do
subject << 'fals'
-> { subject.finish }.must_raise Yajl::FFI::ParserError
end
it 'rejects partial float literal' do
subject << '42.'
-> { subject.finish }.must_raise Yajl::FFI::ParserError
end
it 'does nothing on subsequent finish' do
begin
subject << 'false'
subject.finish
subject.finish
rescue
fail 'raised unexpected error'
end
end
end
describe 'parsing number tokens' do
it 'rejects invalid negative numbers' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('[-]')
expected = [:start_document, :start_array, [:value, 1], :error]
assert_equal expected, events('[1-0]')
end
it 'parses integer zero' do
expected = [:start_document, :start_array, [:value, 0], :end_array, :end_document]
assert_equal expected, events('[0]')
assert_equal expected, events('[-0]')
end
it 'parses float zero' do
expected = [:start_document, :start_array, [:value, 0.0], :end_array, :end_document]
assert_equal expected, events('[0.0]')
assert_equal expected, events('[-0.0]')
end
it 'rejects multi zero' do
expected = [:start_document, :start_array, [:value, 0], :error]
assert_equal expected, events('[00]')
assert_equal expected, events('[-00]')
end
it 'rejects integers that start with zero' do
expected = [:start_document, :start_array, [:value, 0], :error]
assert_equal expected, events('[01]')
assert_equal expected, events('[-01]')
end
it 'parses integer tokens' do
expected = [:start_document, :start_array, [:value, 1], :end_array, :end_document]
assert_equal expected, events('[1]')
expected = [:start_document, :start_array, [:value, -1], :end_array, :end_document]
assert_equal expected, events('[-1]')
expected = [:start_document, :start_array, [:value, 123], :end_array, :end_document]
assert_equal expected, events('[123]')
expected = [:start_document, :start_array, [:value, -123], :end_array, :end_document]
assert_equal expected, events('[-123]')
end
it 'parses float tokens' do
expected = [:start_document, :start_array, [:value, 1.0], :end_array, :end_document]
assert_equal expected, events('[1.0]')
assert_equal expected, events('[1.00]')
end
it 'parses negative floats' do
expected = [:start_document, :start_array, [:value, -1.0], :end_array, :end_document]
assert_equal expected, events('[-1.0]')
assert_equal expected, events('[-1.00]')
end
it 'parses multi-digit floats' do
expected = [:start_document, :start_array, [:value, 123.012], :end_array, :end_document]
assert_equal expected, events('[123.012]')
assert_equal expected, events('[123.0120]')
end
it 'parses negative multi-digit floats' do
expected = [:start_document, :start_array, [:value, -123.012], :end_array, :end_document]
assert_equal expected, events('[-123.012]')
assert_equal expected, events('[-123.0120]')
end
it 'rejects floats missing leading zero' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('[.1]')
assert_equal expected, events('[-.1]')
assert_equal expected, events('[.01]')
assert_equal expected, events('[-.01]')
end
it 'rejects float missing fraction' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('[.]')
assert_equal expected, events('[..]')
assert_equal expected, events('[0.]')
assert_equal expected, events('[12.]')
end
it 'parses positive exponent integers' do
expected = [:start_document, :start_array, [:value, 212], :end_array, :end_document]
assert_equal expected, events('[2.12e2]')
assert_equal expected, events('[2.12e02]')
assert_equal expected, events('[2.12e+2]')
assert_equal expected, events('[2.12e+02]')
end
it 'parses positive exponent floats' do
expected = [:start_document, :start_array, [:value, 21.2], :end_array, :end_document]
assert_equal expected, events('[2.12e1]')
assert_equal expected, events('[2.12e01]')
assert_equal expected, events('[2.12e+1]')
assert_equal expected, events('[2.12e+01]')
end
it 'parses negative exponent' do
expected = [:start_document, :start_array, [:value, 0.0212], :end_array, :end_document]
assert_equal expected, events('[2.12e-2]')
assert_equal expected, events('[2.12e-02]')
assert_equal expected, events('[2.12e-2]')
assert_equal expected, events('[2.12e-02]')
end
it 'parses zero exponent floats' do
expected = [:start_document, :start_array, [:value, 2.12], :end_array, :end_document]
assert_equal expected, events('[2.12e0]')
assert_equal expected, events('[2.12e00]')
assert_equal expected, events('[2.12e-0]')
assert_equal expected, events('[2.12e-00]')
end
it 'parses zero exponent integers' do
expected = [:start_document, :start_array, [:value, 2.0], :end_array, :end_document]
assert_equal expected, events('[2e0]')
assert_equal expected, events('[2e00]')
assert_equal expected, events('[2e-0]')
assert_equal expected, events('[2e-00]')
end
it 'rejects missing exponent' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('[e]')
assert_equal expected, events('[1e]')
assert_equal expected, events('[1e-]')
assert_equal expected, events('[1e--]')
assert_equal expected, events('[1e+]')
assert_equal expected, events('[1e++]')
assert_equal expected, events('[0.e]')
assert_equal expected, events('[10.e]')
end
it 'rejects float with trailing character' do
expected = [:start_document, :start_array, [:value, 0.0], :error]
assert_equal expected, events('[0.0q]')
end
it 'rejects integer with trailing character' do
expected = [:start_document, :start_array, [:value, 1], :error]
assert_equal expected, events('[1q]')
end
end
describe 'parsing string tokens' do
describe 'parsing two-character escapes' do
it 'rejects invalid escape characters' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('["\\a"]')
end
it 'parses quotation mark' do
expected = [:start_document, :start_array, [:value, "\""], :end_array, :end_document]
assert_equal expected, events('["\""]')
end
it 'parses reverse solidus' do
expected = [:start_document, :start_array, [:value, "\\"], :end_array, :end_document]
assert_equal expected, events('["\\\"]')
end
it 'parses solidus' do
expected = [:start_document, :start_array, [:value, "/"], :end_array, :end_document]
assert_equal expected, events('["\/"]')
end
it 'parses backspace' do
expected = [:start_document, :start_array, [:value, "\b"], :end_array, :end_document]
assert_equal expected, events('["\b"]')
end
it 'parses form feed' do
expected = [:start_document, :start_array, [:value, "\f"], :end_array, :end_document]
assert_equal expected, events('["\f"]')
end
it 'parses line feed' do
expected = [:start_document, :start_array, [:value, "\n"], :end_array, :end_document]
assert_equal expected, events('["\n"]')
end
it 'parses carriage return' do
expected = [:start_document, :start_array, [:value, "\r"], :end_array, :end_document]
assert_equal expected, events('["\r"]')
end
it 'parses tab' do
expected = [:start_document, :start_array, [:value, "\t"], :end_array, :end_document]
assert_equal expected, events('["\t"]')
end
it 'parses a series of escapes with whitespace' do
expected = [:start_document, :start_array, [:value, "\" \\ / \b \f \n \r \t"], :end_array, :end_document]
assert_equal expected, events('["\" \\\ \/ \b \f \n \r \t"]')
end
it 'parses a series of escapes without whitespace' do
expected = [:start_document, :start_array, [:value, "\"\\/\b\f\n\r\t"], :end_array, :end_document]
assert_equal expected, events('["\"\\\\/\b\f\n\r\t"]')
end
it 'parses a series of escapes with duplicate characters between them' do
expected = [:start_document, :start_array, [:value, "\"t\\b/f\bn\f/\nn\rr\t"], :end_array, :end_document]
assert_equal expected, events('["\"t\\\b\/f\bn\f/\nn\rr\t"]')
end
end
describe 'parsing control characters' do
it 'rejects control character in array' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events("[\" \u0000 \"]")
end
it 'rejects control character in object' do
expected = [:start_document, :start_object, :error]
assert_equal expected, events("{\" \u0000 \":12}")
end
it 'parses escaped control character' do
expected = [:start_document, :start_array, [:value, "\u0000"], :end_array, :end_document]
assert_equal expected, events('["\\u0000"]')
end
it 'parses escaped control character in object key' do
expected = [:start_document, :start_object, [:key, "\u0000"], [:value, 12], :end_object, :end_document]
assert_equal expected, events('{"\\u0000": 12}')
end
it 'parses non-control character' do
# del ascii 127 is allowed unescaped in json
expected = [:start_document, :start_array, [:value, " \u007F "], :end_array, :end_document]
assert_equal expected, events("[\" \u007f \"]")
end
end
describe 'parsing unicode escape sequences' do
it 'parses escaped ascii character' do
a = "\x61"
escaped = '\u0061'
expected = [:start_document, :start_array, [:value, a], :end_array, :end_document]
assert_equal expected, events('["' + escaped + '"]')
end
it 'parses un-escaped raw unicode' do
# U+1F602 face with tears of joy
face = "\xf0\x9f\x98\x82"
expected = [:start_document, :start_array, [:value, face], :end_array, :end_document]
assert_equal expected, events('["' + face + '"]')
end
it 'parses escaped unicode surrogate pairs' do
# U+1F602 face with tears of joy
face = "\xf0\x9f\x98\x82"
escaped = '\uD83D\uDE02'
expected = [:start_document, :start_array, [:value, face], :end_array, :end_document]
assert_equal expected, events('["' + escaped + '"]')
end
it 'rejects partial unicode escapes' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('[" \\u "]')
assert_equal expected, events('[" \\u2 "]')
assert_equal expected, events('[" \\u26 "]')
assert_equal expected, events('[" \\u260 "]')
end
it 'parses unicode escapes' do
# U+2603 snowman
snowman = "\xe2\x98\x83"
escaped = '\u2603'
expected = [:start_document, :start_array, [:value, snowman], :end_array, :end_document]
assert_equal expected, events('["' + escaped + '"]')
expected = [:start_document, :start_array, [:value, 'snow' + snowman + ' man'], :end_array, :end_document]
assert_equal expected, events('["snow' + escaped + ' man"]')
expected = [:start_document, :start_array, [:value, 'snow' + snowman + '3 man'], :end_array, :end_document]
assert_equal expected, events('["snow' + escaped + '3 man"]')
expected = [:start_document, :start_object, [:key, 'snow' + snowman + '3 man'], [:value, 1], :end_object, :end_document]
assert_equal expected, events('{"snow\\u26033 man": 1}')
end
end
describe 'parsing unicode escapes with surrogate pairs' do
it 'converts missing second pair to question mark' do
expected = [:start_document, :start_array, [:value, '?'], :end_array, :end_document]
assert_equal expected, events('["\uD834"]')
end
it 'rejects missing first pair' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('["\uDD1E"]')
end
it 'rejects double second pair' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('["\uDD1E\uDD1E"]')
end
it 'rejects reversed pair' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('["\uDD1E\uD834"]')
end
it 'parses correct pairs in object keys and values' do
# U+1D11E G-Clef
clef = "\xf0\x9d\x84\x9e"
expected = [
:start_document,
:start_object,
[:key, clef],
[:value, "g\u{1D11E}clef"],
:end_object,
:end_document
]
assert_equal expected, events(%q{ {"\uD834\uDD1E": "g\uD834\uDD1Eclef"} })
end
end
end
describe 'parsing arrays' do
it 'rejects trailing comma' do
expected = [:start_document, :start_array, [:value, 12], :error]
assert_equal expected, events('[12, ]')
end
it 'parses nested empty array' do
expected = [:start_document, :start_array, :start_array, :end_array, :end_array, :end_document]
assert_equal expected, events('[[]]')
end
it 'parses nested array with value' do
expected = [:start_document, :start_array, :start_array, [:value, 2.1], :end_array, :end_array, :end_document]
assert_equal expected, events('[[ 2.10 ]]')
end
it 'rejects malformed arrays' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('[}')
assert_equal expected, events('[,]')
assert_equal expected, events('[, 12]')
end
it 'rejects malformed nested arrays' do
expected = [:start_document, :start_array, :start_array, :error]
assert_equal(expected, events('[[}]'))
assert_equal expected, events('[[}]')
assert_equal expected, events('[[,]]')
end
it 'rejects malformed array value lists' do
expected = [:start_document, :start_array, [:value, "test"], :error]
assert_equal expected, events('["test"}')
assert_equal expected, events('["test",]')
assert_equal expected, events('["test" "test"]')
assert_equal expected, events('["test" 12]')
end
it 'parses array with value' do
expected = [:start_document, :start_array, [:value, "test"], :end_array, :end_document]
assert_equal expected, events('["test"]')
end
it 'parses array with value list' do
expected = [
:start_document,
:start_array,
[:value, 1],
[:value, 2],
[:value, nil],
[:value, 12.1],
[:value, "test"],
:end_array,
:end_document
]
assert_equal expected, events('[1,2, null, 12.1,"test"]')
end
end
describe 'parsing objects' do
it 'rejects malformed objects' do
expected = [:start_document, :start_object, :error]
assert_equal expected, events('{]')
assert_equal expected, events('{:}')
end
it 'parses single key object' do
expected = [:start_document, :start_object, [:key, "key 1"], [:value, 12], :end_object, :end_document]
assert_equal expected, events('{"key 1" : 12}')
end
it 'parses object key value list' do
expected = [
:start_document,
:start_object,
[:key, "key 1"], [:value, 12],
[:key, "key 2"], [:value, "two"],
:end_object,
:end_document
]
assert_equal expected, events('{"key 1" : 12, "key 2":"two"}')
end
it 'rejects object key with no value' do
expected = [
:start_document,
:start_object,
[:key, "key"],
:start_array,
[:value, nil],
[:value, false],
[:value, true],
:end_array,
[:key, "key 2"],
:error
]
assert_equal expected, events('{"key": [ null , false , true ] ,"key 2"}')
end
it 'rejects object with trailing comma' do
expected = [:start_document, :start_object, [:key, "key 1"], [:value, 12], :error]
assert_equal expected, events('{"key 1" : 12,}')
end
end
describe 'parsing unicode bytes' do
it 'parses single byte utf-8' do
expected = [:start_document, :start_array, [:value, "test"], :end_array, :end_document]
assert_equal expected, events('["test"]')
end
it 'parses full two byte utf-8' do
expected = [
:start_document,
:start_array,
[:value, "résumé"],
[:value, "éé"],
:end_array,
:end_document
]
assert_equal expected, events("[\"résumé\", \"é\xC3\xA9\"]")
end
# Parser should throw an error when only one byte of a two byte character
# is available. The \xC3 byte is the first byte of the é character.
it 'rejects a partial two byte utf-8 string' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events("[\"\xC3\"]")
end
it 'parses valid two byte utf-8 string' do
expected = [:start_document, :start_array, [:value, 'é'], :end_array, :end_document]
assert_equal expected, events("[\"\xC3\xA9\"]")
end
it 'parses full three byte utf-8 string' do
expected = [
:start_document,
:start_array,
[:value, "snow\u2603man"],
[:value, "\u2603\u2603"],
:end_array,
:end_document
]
assert_equal expected, events("[\"snow\u2603man\", \"\u2603\u2603\"]")
end
it 'rejects one byte of three byte utf-8 string' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events("[\"\xE2\"]")
end
it 'rejects two bytes of three byte utf-8 string' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events("[\"\xE2\x98\"]")
end
it 'parses full three byte utf-8 string' do
expected = [:start_document, :start_array, [:value, "\u2603"], :end_array, :end_document]
assert_equal expected, events("[\"\xE2\x98\x83\"]")
end
it 'parses full four byte utf-8 string' do
expected = [
:start_document,
:start_array,
[:value, "\u{10102} check mark"],
:end_array,
:end_document
]
assert_equal expected, events("[\"\u{10102} check mark\"]")
end
it 'rejects one byte of four byte utf-8 string' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events("[\"\xF0\"]")
end
it 'rejects two bytes of four byte utf-8 string' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events("[\"\xF0\x90\"]")
end
it 'rejects three bytes of four byte utf-8 string' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events("[\"\xF0\x90\x84\"]")
end
it 'parses full four byte utf-8 string' do
expected = [:start_document, :start_array, [:value, "\u{10102}"], :end_array, :end_document]
assert_equal expected, events("[\"\xF0\x90\x84\x82\"]")
end
end
describe 'parsing json text from the module' do
it 'parses an array document' do
result = Yajl::FFI::Parser.parse('[1,2,3]')
assert_equal [1, 2, 3], result
end
it 'parses a true keyword literal document' do
result = Yajl::FFI::Parser.parse('true')
assert_equal true, result
end
it 'parses a false keyword literal document' do
result = Yajl::FFI::Parser.parse('false')
assert_equal false, result
end
it 'parses a null keyword literal document' do
result = Yajl::FFI::Parser.parse('null')
assert_equal nil, result
end
it 'parses a string literal document' do
result = Yajl::FFI::Parser.parse('"hello"')
assert_equal 'hello', result
end
it 'parses an integer literal document' do
result = Yajl::FFI::Parser.parse('42')
assert_equal 42, result
end
it 'parses a float literal document' do
result = Yajl::FFI::Parser.parse('42.12')
assert_equal 42.12, result
end
it 'rejects a partial float literal document' do
assert_raises(Yajl::FFI::ParserError) do
Yajl::FFI::Parser.parse('42.')
end
end
it 'rejects a partial document' do
assert_raises(Yajl::FFI::ParserError) do
Yajl::FFI::Parser.parse('{')
end
end
it 'rejects an empty document' do
assert_raises(Yajl::FFI::ParserError) do
Yajl::FFI::Parser.parse('')
end
end
end
it 'registers observers in initializer block' do
events = []
parser = Yajl::FFI::Parser.new do
start_document { events << :start_document }
end_document { events << :end_document }
start_object { events << :start_object }
end_object { events << :end_object }
key {|k| events << [:key, k] }
value {|v| events << [:value, v] }
end
parser << '{"key":12}'
expected = [:start_document, :start_object, [:key, "key"], [:value, 12], :end_object, :end_document]
assert_equal expected, events
end
private
# Run a worst case, one byte at a time, parse against the JSON string and
# return a list of events generated by the parser. A special :error event is
# included if the parser threw an exception.
#
# json - The String to parse.
# parser - The optional Parser instance to use.
#
# Returns an Events instance.
def events(json, parser = nil)
parser ||= Yajl::FFI::Parser.new
collector = Events.new(parser)
begin
json.each_byte {|byte| parser << [byte].pack('C') }
rescue Yajl::FFI::ParserError
collector.error
end
collector.events
end
# Dynamically map methods in this class to parser callback methods
# so we can collect parser events for inspection by test cases.
class Events
METHODS = %w[start_document end_document start_object end_object start_array end_array key value]
attr_reader :events
def initialize(parser)
@events = []
METHODS.each do |name|
parser.send(name, &method(name))
end
end
METHODS.each do |name|
define_method(name) do |*args|
@events << (args.empty? ? name.to_sym : [name.to_sym, *args])
end
end
def error
@events << :error
end
end
end
Add zero with exponent tests.
require 'yajl/ffi'
require 'minitest/autorun'
describe Yajl::FFI::Parser do
subject { Yajl::FFI::Parser.new }
describe 'parsing a document' do
it 'rejects documents containing bad start character' do
expected = [:error]
assert_equal expected, events('a')
end
it 'rejects documents starting with period' do
expected = [:error]
assert_equal expected, events('.')
end
it 'parses a null value document' do
expected = [:start_document, [:value, nil], :end_document]
assert_equal expected, events('null')
end
it 'parses a false value document' do
expected = [:start_document, [:value, false], :end_document]
assert_equal expected, events('false')
end
it 'parses a true value document' do
expected = [:start_document, [:value, true], :end_document]
assert_equal expected, events('true')
end
it 'parses a string document' do
expected = [:start_document, [:value, "test"], :end_document]
assert_equal expected, events('"test"')
end
it 'parses an integer value document' do
expected = [:start_document, [:value, 12], :end_document]
events = events('12', subject)
assert events.empty?
subject.finish
assert_equal expected, events
end
it 'parses a float value document' do
expected = [:start_document, [:value, 12.1], :end_document]
events = events('12.1', subject)
assert events.empty?
subject.finish
assert_equal expected, events
end
it 'parses a value document with leading whitespace' do
expected = [:start_document, [:value, false], :end_document]
assert_equal expected, events(' false ')
end
it 'parses array documents' do
expected = [:start_document, :start_array, :end_array, :end_document]
assert_equal expected, events('[]')
assert_equal expected, events('[ ]')
assert_equal expected, events(' [] ')
assert_equal expected, events(' [ ] ')
end
it 'parses object documents' do
expected = [:start_document, :start_object, :end_object, :end_document]
assert_equal expected, events('{}')
assert_equal expected, events('{ }')
assert_equal expected, events(' {} ')
assert_equal expected, events(' { } ')
end
it 'rejects documents with trailing characters' do
expected = [:start_document, :start_object, :end_object, :end_document, :error]
assert_equal expected, events('{}a')
assert_equal expected, events('{ } 12')
assert_equal expected, events(' {} false')
assert_equal expected, events(' { }, {}')
end
it 'ignores whitespace around tokens, preserves it within strings' do
json = %Q{
{ " key 1 " : \t [
1, 2, " my string ",\r
false, true, null ]
}
}
expected = [
:start_document,
:start_object,
[:key, " key 1 "],
:start_array,
[:value, 1],
[:value, 2],
[:value, " my string "],
[:value, false],
[:value, true],
[:value, nil],
:end_array,
:end_object,
:end_document
]
assert_equal expected, events(json)
end
it 'rejects partial keyword tokens' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('[tru]')
assert_equal expected, events('[fal]')
assert_equal expected, events('[nul,true]')
assert_equal expected, events('[fals1]')
end
it 'parses single keyword tokens' do
expected = [:start_document, :start_array, [:value, true], :end_array, :end_document]
assert_equal expected, events('[true]')
end
it 'parses keywords in series' do
expected = [:start_document, :start_array, [:value, true], [:value, nil], :end_array, :end_document]
assert_equal expected, events('[true, null]')
end
end
describe 'finishing the parse' do
it 'rejects finish with no json data provided' do
-> { subject.finish }.must_raise Yajl::FFI::ParserError
end
it 'rejects partial null keyword' do
subject << 'nul'
-> { subject.finish }.must_raise Yajl::FFI::ParserError
end
it 'rejects partial true keyword' do
subject << 'tru'
-> { subject.finish }.must_raise Yajl::FFI::ParserError
end
it 'rejects partial false keyword' do
subject << 'fals'
-> { subject.finish }.must_raise Yajl::FFI::ParserError
end
it 'rejects partial float literal' do
subject << '42.'
-> { subject.finish }.must_raise Yajl::FFI::ParserError
end
it 'does nothing on subsequent finish' do
begin
subject << 'false'
subject.finish
subject.finish
rescue
fail 'raised unexpected error'
end
end
end
describe 'parsing number tokens' do
it 'rejects invalid negative numbers' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('[-]')
expected = [:start_document, :start_array, [:value, 1], :error]
assert_equal expected, events('[1-0]')
end
it 'parses integer zero' do
expected = [:start_document, :start_array, [:value, 0], :end_array, :end_document]
assert_equal expected, events('[0]')
assert_equal expected, events('[-0]')
end
it 'parses float zero' do
expected = [:start_document, :start_array, [:value, 0.0], :end_array, :end_document]
assert_equal expected, events('[0.0]')
assert_equal expected, events('[-0.0]')
end
it 'rejects multi zero' do
expected = [:start_document, :start_array, [:value, 0], :error]
assert_equal expected, events('[00]')
assert_equal expected, events('[-00]')
end
it 'rejects integers that start with zero' do
expected = [:start_document, :start_array, [:value, 0], :error]
assert_equal expected, events('[01]')
assert_equal expected, events('[-01]')
end
it 'parses integer tokens' do
expected = [:start_document, :start_array, [:value, 1], :end_array, :end_document]
assert_equal expected, events('[1]')
expected = [:start_document, :start_array, [:value, -1], :end_array, :end_document]
assert_equal expected, events('[-1]')
expected = [:start_document, :start_array, [:value, 123], :end_array, :end_document]
assert_equal expected, events('[123]')
expected = [:start_document, :start_array, [:value, -123], :end_array, :end_document]
assert_equal expected, events('[-123]')
end
it 'parses float tokens' do
expected = [:start_document, :start_array, [:value, 1.0], :end_array, :end_document]
assert_equal expected, events('[1.0]')
assert_equal expected, events('[1.00]')
end
it 'parses negative floats' do
expected = [:start_document, :start_array, [:value, -1.0], :end_array, :end_document]
assert_equal expected, events('[-1.0]')
assert_equal expected, events('[-1.00]')
end
it 'parses multi-digit floats' do
expected = [:start_document, :start_array, [:value, 123.012], :end_array, :end_document]
assert_equal expected, events('[123.012]')
assert_equal expected, events('[123.0120]')
end
it 'parses negative multi-digit floats' do
expected = [:start_document, :start_array, [:value, -123.012], :end_array, :end_document]
assert_equal expected, events('[-123.012]')
assert_equal expected, events('[-123.0120]')
end
it 'rejects floats missing leading zero' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('[.1]')
assert_equal expected, events('[-.1]')
assert_equal expected, events('[.01]')
assert_equal expected, events('[-.01]')
end
it 'rejects float missing fraction' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('[.]')
assert_equal expected, events('[..]')
assert_equal expected, events('[0.]')
assert_equal expected, events('[12.]')
end
it 'parses zero with implicit positive exponent as float' do
expected = [:start_document, :start_array, [:value, 0.0], :end_array, :end_document]
events = events('[0e2]')
assert_equal expected, events
assert_kind_of Float, events[2][1]
end
it 'parses zero with explicit positive exponent as float' do
expected = [:start_document, :start_array, [:value, 0.0], :end_array, :end_document]
events = events('[0e+2]')
assert_equal expected, events
assert_kind_of Float, events[2][1]
end
it 'parses zero with negative exponent as float' do
expected = [:start_document, :start_array, [:value, 0.0], :end_array, :end_document]
events = events('[0e-2]')
assert_equal expected, events
assert_kind_of Float, events[2][1]
end
it 'parses positive exponent integers' do
expected = [:start_document, :start_array, [:value, 212], :end_array, :end_document]
assert_equal expected, events('[2.12e2]')
assert_equal expected, events('[2.12e02]')
assert_equal expected, events('[2.12e+2]')
assert_equal expected, events('[2.12e+02]')
end
it 'parses positive exponent floats' do
expected = [:start_document, :start_array, [:value, 21.2], :end_array, :end_document]
assert_equal expected, events('[2.12e1]')
assert_equal expected, events('[2.12e01]')
assert_equal expected, events('[2.12e+1]')
assert_equal expected, events('[2.12e+01]')
end
it 'parses negative exponent' do
expected = [:start_document, :start_array, [:value, 0.0212], :end_array, :end_document]
assert_equal expected, events('[2.12e-2]')
assert_equal expected, events('[2.12e-02]')
assert_equal expected, events('[2.12e-2]')
assert_equal expected, events('[2.12e-02]')
end
it 'parses zero exponent floats' do
expected = [:start_document, :start_array, [:value, 2.12], :end_array, :end_document]
assert_equal expected, events('[2.12e0]')
assert_equal expected, events('[2.12e00]')
assert_equal expected, events('[2.12e-0]')
assert_equal expected, events('[2.12e-00]')
end
it 'parses zero exponent integers' do
expected = [:start_document, :start_array, [:value, 2.0], :end_array, :end_document]
assert_equal expected, events('[2e0]')
assert_equal expected, events('[2e00]')
assert_equal expected, events('[2e-0]')
assert_equal expected, events('[2e-00]')
end
it 'rejects missing exponent' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('[e]')
assert_equal expected, events('[1e]')
assert_equal expected, events('[1e-]')
assert_equal expected, events('[1e--]')
assert_equal expected, events('[1e+]')
assert_equal expected, events('[1e++]')
assert_equal expected, events('[0.e]')
assert_equal expected, events('[10.e]')
end
it 'rejects float with trailing character' do
expected = [:start_document, :start_array, [:value, 0.0], :error]
assert_equal expected, events('[0.0q]')
end
it 'rejects integer with trailing character' do
expected = [:start_document, :start_array, [:value, 1], :error]
assert_equal expected, events('[1q]')
end
end
describe 'parsing string tokens' do
describe 'parsing two-character escapes' do
it 'rejects invalid escape characters' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('["\\a"]')
end
it 'parses quotation mark' do
expected = [:start_document, :start_array, [:value, "\""], :end_array, :end_document]
assert_equal expected, events('["\""]')
end
it 'parses reverse solidus' do
expected = [:start_document, :start_array, [:value, "\\"], :end_array, :end_document]
assert_equal expected, events('["\\\"]')
end
it 'parses solidus' do
expected = [:start_document, :start_array, [:value, "/"], :end_array, :end_document]
assert_equal expected, events('["\/"]')
end
it 'parses backspace' do
expected = [:start_document, :start_array, [:value, "\b"], :end_array, :end_document]
assert_equal expected, events('["\b"]')
end
it 'parses form feed' do
expected = [:start_document, :start_array, [:value, "\f"], :end_array, :end_document]
assert_equal expected, events('["\f"]')
end
it 'parses line feed' do
expected = [:start_document, :start_array, [:value, "\n"], :end_array, :end_document]
assert_equal expected, events('["\n"]')
end
it 'parses carriage return' do
expected = [:start_document, :start_array, [:value, "\r"], :end_array, :end_document]
assert_equal expected, events('["\r"]')
end
it 'parses tab' do
expected = [:start_document, :start_array, [:value, "\t"], :end_array, :end_document]
assert_equal expected, events('["\t"]')
end
it 'parses a series of escapes with whitespace' do
expected = [:start_document, :start_array, [:value, "\" \\ / \b \f \n \r \t"], :end_array, :end_document]
assert_equal expected, events('["\" \\\ \/ \b \f \n \r \t"]')
end
it 'parses a series of escapes without whitespace' do
expected = [:start_document, :start_array, [:value, "\"\\/\b\f\n\r\t"], :end_array, :end_document]
assert_equal expected, events('["\"\\\\/\b\f\n\r\t"]')
end
it 'parses a series of escapes with duplicate characters between them' do
expected = [:start_document, :start_array, [:value, "\"t\\b/f\bn\f/\nn\rr\t"], :end_array, :end_document]
assert_equal expected, events('["\"t\\\b\/f\bn\f/\nn\rr\t"]')
end
end
describe 'parsing control characters' do
it 'rejects control character in array' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events("[\" \u0000 \"]")
end
it 'rejects control character in object' do
expected = [:start_document, :start_object, :error]
assert_equal expected, events("{\" \u0000 \":12}")
end
it 'parses escaped control character' do
expected = [:start_document, :start_array, [:value, "\u0000"], :end_array, :end_document]
assert_equal expected, events('["\\u0000"]')
end
it 'parses escaped control character in object key' do
expected = [:start_document, :start_object, [:key, "\u0000"], [:value, 12], :end_object, :end_document]
assert_equal expected, events('{"\\u0000": 12}')
end
it 'parses non-control character' do
# del ascii 127 is allowed unescaped in json
expected = [:start_document, :start_array, [:value, " \u007F "], :end_array, :end_document]
assert_equal expected, events("[\" \u007f \"]")
end
end
describe 'parsing unicode escape sequences' do
it 'parses escaped ascii character' do
a = "\x61"
escaped = '\u0061'
expected = [:start_document, :start_array, [:value, a], :end_array, :end_document]
assert_equal expected, events('["' + escaped + '"]')
end
it 'parses un-escaped raw unicode' do
# U+1F602 face with tears of joy
face = "\xf0\x9f\x98\x82"
expected = [:start_document, :start_array, [:value, face], :end_array, :end_document]
assert_equal expected, events('["' + face + '"]')
end
it 'parses escaped unicode surrogate pairs' do
# U+1F602 face with tears of joy
face = "\xf0\x9f\x98\x82"
escaped = '\uD83D\uDE02'
expected = [:start_document, :start_array, [:value, face], :end_array, :end_document]
assert_equal expected, events('["' + escaped + '"]')
end
it 'rejects partial unicode escapes' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('[" \\u "]')
assert_equal expected, events('[" \\u2 "]')
assert_equal expected, events('[" \\u26 "]')
assert_equal expected, events('[" \\u260 "]')
end
it 'parses unicode escapes' do
# U+2603 snowman
snowman = "\xe2\x98\x83"
escaped = '\u2603'
expected = [:start_document, :start_array, [:value, snowman], :end_array, :end_document]
assert_equal expected, events('["' + escaped + '"]')
expected = [:start_document, :start_array, [:value, 'snow' + snowman + ' man'], :end_array, :end_document]
assert_equal expected, events('["snow' + escaped + ' man"]')
expected = [:start_document, :start_array, [:value, 'snow' + snowman + '3 man'], :end_array, :end_document]
assert_equal expected, events('["snow' + escaped + '3 man"]')
expected = [:start_document, :start_object, [:key, 'snow' + snowman + '3 man'], [:value, 1], :end_object, :end_document]
assert_equal expected, events('{"snow\\u26033 man": 1}')
end
end
describe 'parsing unicode escapes with surrogate pairs' do
it 'converts missing second pair to question mark' do
expected = [:start_document, :start_array, [:value, '?'], :end_array, :end_document]
assert_equal expected, events('["\uD834"]')
end
it 'rejects missing first pair' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('["\uDD1E"]')
end
it 'rejects double second pair' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('["\uDD1E\uDD1E"]')
end
it 'rejects reversed pair' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('["\uDD1E\uD834"]')
end
it 'parses correct pairs in object keys and values' do
# U+1D11E G-Clef
clef = "\xf0\x9d\x84\x9e"
expected = [
:start_document,
:start_object,
[:key, clef],
[:value, "g\u{1D11E}clef"],
:end_object,
:end_document
]
assert_equal expected, events(%q{ {"\uD834\uDD1E": "g\uD834\uDD1Eclef"} })
end
end
end
describe 'parsing arrays' do
it 'rejects trailing comma' do
expected = [:start_document, :start_array, [:value, 12], :error]
assert_equal expected, events('[12, ]')
end
it 'parses nested empty array' do
expected = [:start_document, :start_array, :start_array, :end_array, :end_array, :end_document]
assert_equal expected, events('[[]]')
end
it 'parses nested array with value' do
expected = [:start_document, :start_array, :start_array, [:value, 2.1], :end_array, :end_array, :end_document]
assert_equal expected, events('[[ 2.10 ]]')
end
it 'rejects malformed arrays' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events('[}')
assert_equal expected, events('[,]')
assert_equal expected, events('[, 12]')
end
it 'rejects malformed nested arrays' do
expected = [:start_document, :start_array, :start_array, :error]
assert_equal(expected, events('[[}]'))
assert_equal expected, events('[[}]')
assert_equal expected, events('[[,]]')
end
it 'rejects malformed array value lists' do
expected = [:start_document, :start_array, [:value, "test"], :error]
assert_equal expected, events('["test"}')
assert_equal expected, events('["test",]')
assert_equal expected, events('["test" "test"]')
assert_equal expected, events('["test" 12]')
end
it 'parses array with value' do
expected = [:start_document, :start_array, [:value, "test"], :end_array, :end_document]
assert_equal expected, events('["test"]')
end
it 'parses array with value list' do
expected = [
:start_document,
:start_array,
[:value, 1],
[:value, 2],
[:value, nil],
[:value, 12.1],
[:value, "test"],
:end_array,
:end_document
]
assert_equal expected, events('[1,2, null, 12.1,"test"]')
end
end
describe 'parsing objects' do
it 'rejects malformed objects' do
expected = [:start_document, :start_object, :error]
assert_equal expected, events('{]')
assert_equal expected, events('{:}')
end
it 'parses single key object' do
expected = [:start_document, :start_object, [:key, "key 1"], [:value, 12], :end_object, :end_document]
assert_equal expected, events('{"key 1" : 12}')
end
it 'parses object key value list' do
expected = [
:start_document,
:start_object,
[:key, "key 1"], [:value, 12],
[:key, "key 2"], [:value, "two"],
:end_object,
:end_document
]
assert_equal expected, events('{"key 1" : 12, "key 2":"two"}')
end
it 'rejects object key with no value' do
expected = [
:start_document,
:start_object,
[:key, "key"],
:start_array,
[:value, nil],
[:value, false],
[:value, true],
:end_array,
[:key, "key 2"],
:error
]
assert_equal expected, events('{"key": [ null , false , true ] ,"key 2"}')
end
it 'rejects object with trailing comma' do
expected = [:start_document, :start_object, [:key, "key 1"], [:value, 12], :error]
assert_equal expected, events('{"key 1" : 12,}')
end
end
describe 'parsing unicode bytes' do
it 'parses single byte utf-8' do
expected = [:start_document, :start_array, [:value, "test"], :end_array, :end_document]
assert_equal expected, events('["test"]')
end
it 'parses full two byte utf-8' do
expected = [
:start_document,
:start_array,
[:value, "résumé"],
[:value, "éé"],
:end_array,
:end_document
]
assert_equal expected, events("[\"résumé\", \"é\xC3\xA9\"]")
end
# Parser should throw an error when only one byte of a two byte character
# is available. The \xC3 byte is the first byte of the é character.
it 'rejects a partial two byte utf-8 string' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events("[\"\xC3\"]")
end
it 'parses valid two byte utf-8 string' do
expected = [:start_document, :start_array, [:value, 'é'], :end_array, :end_document]
assert_equal expected, events("[\"\xC3\xA9\"]")
end
it 'parses full three byte utf-8 string' do
expected = [
:start_document,
:start_array,
[:value, "snow\u2603man"],
[:value, "\u2603\u2603"],
:end_array,
:end_document
]
assert_equal expected, events("[\"snow\u2603man\", \"\u2603\u2603\"]")
end
it 'rejects one byte of three byte utf-8 string' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events("[\"\xE2\"]")
end
it 'rejects two bytes of three byte utf-8 string' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events("[\"\xE2\x98\"]")
end
it 'parses full three byte utf-8 string' do
expected = [:start_document, :start_array, [:value, "\u2603"], :end_array, :end_document]
assert_equal expected, events("[\"\xE2\x98\x83\"]")
end
it 'parses full four byte utf-8 string' do
expected = [
:start_document,
:start_array,
[:value, "\u{10102} check mark"],
:end_array,
:end_document
]
assert_equal expected, events("[\"\u{10102} check mark\"]")
end
it 'rejects one byte of four byte utf-8 string' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events("[\"\xF0\"]")
end
it 'rejects two bytes of four byte utf-8 string' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events("[\"\xF0\x90\"]")
end
it 'rejects three bytes of four byte utf-8 string' do
expected = [:start_document, :start_array, :error]
assert_equal expected, events("[\"\xF0\x90\x84\"]")
end
it 'parses full four byte utf-8 string' do
expected = [:start_document, :start_array, [:value, "\u{10102}"], :end_array, :end_document]
assert_equal expected, events("[\"\xF0\x90\x84\x82\"]")
end
end
describe 'parsing json text from the module' do
it 'parses an array document' do
result = Yajl::FFI::Parser.parse('[1,2,3]')
assert_equal [1, 2, 3], result
end
it 'parses a true keyword literal document' do
result = Yajl::FFI::Parser.parse('true')
assert_equal true, result
end
it 'parses a false keyword literal document' do
result = Yajl::FFI::Parser.parse('false')
assert_equal false, result
end
it 'parses a null keyword literal document' do
result = Yajl::FFI::Parser.parse('null')
assert_equal nil, result
end
it 'parses a string literal document' do
result = Yajl::FFI::Parser.parse('"hello"')
assert_equal 'hello', result
end
it 'parses an integer literal document' do
result = Yajl::FFI::Parser.parse('42')
assert_equal 42, result
end
it 'parses a float literal document' do
result = Yajl::FFI::Parser.parse('42.12')
assert_equal 42.12, result
end
it 'rejects a partial float literal document' do
assert_raises(Yajl::FFI::ParserError) do
Yajl::FFI::Parser.parse('42.')
end
end
it 'rejects a partial document' do
assert_raises(Yajl::FFI::ParserError) do
Yajl::FFI::Parser.parse('{')
end
end
it 'rejects an empty document' do
assert_raises(Yajl::FFI::ParserError) do
Yajl::FFI::Parser.parse('')
end
end
end
it 'registers observers in initializer block' do
events = []
parser = Yajl::FFI::Parser.new do
start_document { events << :start_document }
end_document { events << :end_document }
start_object { events << :start_object }
end_object { events << :end_object }
key {|k| events << [:key, k] }
value {|v| events << [:value, v] }
end
parser << '{"key":12}'
expected = [:start_document, :start_object, [:key, "key"], [:value, 12], :end_object, :end_document]
assert_equal expected, events
end
private
# Run a worst case, one byte at a time, parse against the JSON string and
# return a list of events generated by the parser. A special :error event is
# included if the parser threw an exception.
#
# json - The String to parse.
# parser - The optional Parser instance to use.
#
# Returns an Events instance.
def events(json, parser = nil)
parser ||= Yajl::FFI::Parser.new
collector = Events.new(parser)
begin
json.each_byte {|byte| parser << [byte].pack('C') }
rescue Yajl::FFI::ParserError
collector.error
end
collector.events
end
# Dynamically map methods in this class to parser callback methods
# so we can collect parser events for inspection by test cases.
class Events
METHODS = %w[start_document end_document start_object end_object start_array end_array key value]
attr_reader :events
def initialize(parser)
@events = []
METHODS.each do |name|
parser.send(name, &method(name))
end
end
METHODS.each do |name|
define_method(name) do |*args|
@events << (args.empty? ? name.to_sym : [name.to_sym, *args])
end
end
def error
@events << :error
end
end
end
|
#! /usr/bin/env ruby
# frozen_string_literal: true
#
# check-disk
#
# DESCRIPTION:
# Uses the sys-filesystem gem to get filesystem mount points and metrics
#
# OUTPUT:
# plain text
#
# PLATFORMS:
# Linux, BSD, Windows
#
# DEPENDENCIES:
# gem: sensu-plugin
# gem: sys-filesystem
#
# USAGE:
#
# NOTES:
#
# LICENSE:
# Copyright 2015 Yieldbot Inc <Sensu-Plugins>
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
#
require 'sensu-plugin/check/cli'
require 'sys/filesystem'
#
# Check Disk
#
class CheckDisk < Sensu::Plugin::Check::CLI
include Sys
option :fstype,
short: '-t TYPE[,TYPE]',
description: 'Only check fs type(s)',
proc: proc { |a| a.split(',') }
option :ignoretype,
short: '-x TYPE[,TYPE]',
description: 'Ignore fs type(s)',
proc: proc { |a| a.split(',') }
option :ignoremnt,
short: '-i MNT[,MNT]',
description: 'Ignore mount point(s)',
proc: proc { |a| a.split(',') }
option :includemnt,
description: 'Include only mount point(s)',
short: '-I MNT[,MNT]',
proc: proc { |a| a.split(',') }
option :ignorepathre,
short: '-p PATHRE',
description: 'Ignore mount point(s) matching regular expression',
proc: proc { |a| Regexp.new(a) }
option :ignoreopt,
short: '-o TYPE[.TYPE]',
description: 'Ignore option(s)',
proc: proc { |a| a.split('.') }
option :ignore_reserved,
description: 'Ignore bytes reserved for prvileged processes',
short: '-r',
long: '--ignore-reserved',
boolean: true,
default: false
option :bwarn,
short: '-w PERCENT',
description: 'Warn if PERCENT or more of disk full',
proc: proc(&:to_i),
default: 85
option :bcrit,
short: '-c PERCENT',
description: 'Critical if PERCENT or more of disk full',
proc: proc(&:to_i),
default: 95
option :iwarn,
short: '-W PERCENT',
description: 'Warn if PERCENT or more of inodes used',
proc: proc(&:to_i),
default: 85
option :icrit,
short: '-K PERCENT',
description: 'Critical if PERCENT or more of inodes used',
proc: proc(&:to_i),
default: 95
option :magic,
short: '-m MAGIC',
description: 'Magic factor to adjust warn/crit thresholds. Example: .9',
proc: proc(&:to_f),
default: 1.0
option :normal,
short: '-n NORMAL',
description: 'Levels are not adapted for filesystems of exactly this '\
'size, where levels are reduced for smaller filesystems and raised '\
'for larger filesystems.',
proc: proc(&:to_f),
default: 20
option :minimum,
short: '-l MINIMUM',
description: 'Minimum size to adjust (in GB)',
proc: proc(&:to_f),
default: 100
# Setup variables
#
def initialize
super
@crit_fs = []
@warn_fs = []
end
# Get mount data
#
def fs_mounts
begin
mounts = Filesystem.mounts
rescue StandardError
unknown 'An error occured getting the mount info'
end
mounts.each do |line|
begin
next if config[:fstype] && !config[:fstype].include?(line.mount_type)
next if config[:ignoretype]&.include?(line.mount_type)
next if config[:ignoremnt]&.include?(line.mount_point)
next if config[:ignorepathre]&.match(line.mount_point)
next if config[:ignoreopt]&.include?(line.options)
next if config[:includemnt] && !config[:includemnt].include?(line.mount_point)
rescue StandardError
unknown 'An error occured getting the mount info'
end
check_mount(line)
end
end
# Adjust the percentages based on volume size
#
def adj_percent(size, percent)
hsize = (size / (1024.0 * 1024.0)) / config[:normal].to_f
felt = hsize**config[:magic]
scale = felt / hsize
100 - ((100 - percent) * scale)
end
def check_mount(line)
begin
fs_info = Filesystem.stat(line.mount_point)
rescue StandardError
@warn_fs << "#{line.mount_point} Unable to read."
return
end
if line.mount_type == 'devfs'
return
end
if fs_info.respond_to?(:inodes) && !fs_info.inodes.nil? # needed for windows
percent_i = percent_inodes(fs_info)
if percent_i >= config[:icrit]
@crit_fs << "#{line.mount_point} #{percent_i}% inode usage"
elsif percent_i >= config[:iwarn]
@warn_fs << "#{line.mount_point} #{percent_i}% inode usage"
end
end
percent_b = percent_bytes(fs_info)
if fs_info.bytes_total < (config[:minimum] * 1_000_000_000)
bcrit = config[:bcrit]
bwarn = config[:bwarn]
else
bcrit = adj_percent(fs_info.bytes_total, config[:bcrit])
bwarn = adj_percent(fs_info.bytes_total, config[:bwarn])
end
used = to_human(fs_info.bytes_used)
total = to_human(fs_info.bytes_total)
if percent_b >= bcrit
@crit_fs << "#{line.mount_point} #{percent_b}% bytes usage (#{used}/#{total})"
elsif percent_b >= bwarn
@warn_fs << "#{line.mount_point} #{percent_b}% bytes usage (#{used}/#{total})"
end
end
def to_human(size)
unit = [[1_099_511_627_776, 'TiB'], [1_073_741_824, 'GiB'], [1_048_576, 'MiB'], [1024, 'KiB'], [0, 'B']].detect { |u| size >= u[0] }
format("%.2f #{unit[1]}", (size >= 1024 ? size.to_f / unit[0] : size))
end
# Determine the percent inode usage
#
def percent_inodes(fs_info)
(100.0 - (100.0 * fs_info.inodes_free / fs_info.inodes)).round(2)
end
# Determine the percent byte usage
#
def percent_bytes(fs_info)
if config[:ignore_reserved]
(100.0 - (100.0 * fs_info.bytes_available / fs_info.bytes_total)).round(2)
else
(100.0 - (100.0 * fs_info.bytes_free / fs_info.bytes_total)).round(2)
end
end
# Generate output
#
def usage_summary
(@crit_fs + @warn_fs).join(', ')
end
# Main function
#
def run
fs_mounts
critical usage_summary unless @crit_fs.empty?
warning usage_summary unless @warn_fs.empty?
ok "All disk usage under #{config[:bwarn]}% and inode usage under #{config[:iwarn]}%"
end
end
More accurate disk percent usage ignoring privileged bytes
#! /usr/bin/env ruby
# frozen_string_literal: true
#
# check-disk
#
# DESCRIPTION:
# Uses the sys-filesystem gem to get filesystem mount points and metrics
#
# OUTPUT:
# plain text
#
# PLATFORMS:
# Linux, BSD, Windows
#
# DEPENDENCIES:
# gem: sensu-plugin
# gem: sys-filesystem
#
# USAGE:
#
# NOTES:
#
# LICENSE:
# Copyright 2015 Yieldbot Inc <Sensu-Plugins>
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
#
require 'sensu-plugin/check/cli'
require 'sys/filesystem'
#
# Check Disk
#
class CheckDisk < Sensu::Plugin::Check::CLI
include Sys
option :fstype,
short: '-t TYPE[,TYPE]',
description: 'Only check fs type(s)',
proc: proc { |a| a.split(',') }
option :ignoretype,
short: '-x TYPE[,TYPE]',
description: 'Ignore fs type(s)',
proc: proc { |a| a.split(',') }
option :ignoremnt,
short: '-i MNT[,MNT]',
description: 'Ignore mount point(s)',
proc: proc { |a| a.split(',') }
option :includemnt,
description: 'Include only mount point(s)',
short: '-I MNT[,MNT]',
proc: proc { |a| a.split(',') }
option :ignorepathre,
short: '-p PATHRE',
description: 'Ignore mount point(s) matching regular expression',
proc: proc { |a| Regexp.new(a) }
option :ignoreopt,
short: '-o TYPE[.TYPE]',
description: 'Ignore option(s)',
proc: proc { |a| a.split('.') }
option :ignore_reserved,
description: 'Ignore bytes reserved for privileged processes',
short: '-r',
long: '--ignore-reserved',
boolean: true,
default: false
option :bwarn,
short: '-w PERCENT',
description: 'Warn if PERCENT or more of disk full',
proc: proc(&:to_i),
default: 85
option :bcrit,
short: '-c PERCENT',
description: 'Critical if PERCENT or more of disk full',
proc: proc(&:to_i),
default: 95
option :iwarn,
short: '-W PERCENT',
description: 'Warn if PERCENT or more of inodes used',
proc: proc(&:to_i),
default: 85
option :icrit,
short: '-K PERCENT',
description: 'Critical if PERCENT or more of inodes used',
proc: proc(&:to_i),
default: 95
option :magic,
short: '-m MAGIC',
description: 'Magic factor to adjust warn/crit thresholds. Example: .9',
proc: proc(&:to_f),
default: 1.0
option :normal,
short: '-n NORMAL',
description: 'Levels are not adapted for filesystems of exactly this '\
'size, where levels are reduced for smaller filesystems and raised '\
'for larger filesystems.',
proc: proc(&:to_f),
default: 20
option :minimum,
short: '-l MINIMUM',
description: 'Minimum size to adjust (in GB)',
proc: proc(&:to_f),
default: 100
# Setup variables
#
def initialize
super
@crit_fs = []
@warn_fs = []
end
# Get mount data
#
def fs_mounts
begin
mounts = Filesystem.mounts
rescue StandardError
unknown 'An error occured getting the mount info'
end
mounts.each do |line|
begin
next if config[:fstype] && !config[:fstype].include?(line.mount_type)
next if config[:ignoretype]&.include?(line.mount_type)
next if config[:ignoremnt]&.include?(line.mount_point)
next if config[:ignorepathre]&.match(line.mount_point)
next if config[:ignoreopt]&.include?(line.options)
next if config[:includemnt] && !config[:includemnt].include?(line.mount_point)
rescue StandardError
unknown 'An error occured getting the mount info'
end
check_mount(line)
end
end
# Adjust the percentages based on volume size
#
def adj_percent(size, percent)
hsize = (size / (1024.0 * 1024.0)) / config[:normal].to_f
felt = hsize**config[:magic]
scale = felt / hsize
100 - ((100 - percent) * scale)
end
def check_mount(line)
begin
fs_info = Filesystem.stat(line.mount_point)
rescue StandardError
@warn_fs << "#{line.mount_point} Unable to read."
return
end
if line.mount_type == 'devfs'
return
end
if fs_info.respond_to?(:inodes) && !fs_info.inodes.nil? # needed for windows
percent_i = percent_inodes(fs_info)
if percent_i >= config[:icrit]
@crit_fs << "#{line.mount_point} #{percent_i}% inode usage"
elsif percent_i >= config[:iwarn]
@warn_fs << "#{line.mount_point} #{percent_i}% inode usage"
end
end
percent_b = percent_bytes(fs_info)
if fs_info.bytes_total < (config[:minimum] * 1_000_000_000)
bcrit = config[:bcrit]
bwarn = config[:bwarn]
else
bcrit = adj_percent(fs_info.bytes_total, config[:bcrit])
bwarn = adj_percent(fs_info.bytes_total, config[:bwarn])
end
used = to_human(fs_info.bytes_used)
total = to_human(fs_info.bytes_total)
if percent_b >= bcrit
@crit_fs << "#{line.mount_point} #{percent_b}% bytes usage (#{used}/#{total})"
elsif percent_b >= bwarn
@warn_fs << "#{line.mount_point} #{percent_b}% bytes usage (#{used}/#{total})"
end
end
def to_human(size)
unit = [[1_099_511_627_776, 'TiB'], [1_073_741_824, 'GiB'], [1_048_576, 'MiB'], [1024, 'KiB'], [0, 'B']].detect { |u| size >= u[0] }
format("%.2f #{unit[1]}", (size >= 1024 ? size.to_f / unit[0] : size))
end
# Determine the percent inode usage
#
def percent_inodes(fs_info)
(100.0 - (100.0 * fs_info.inodes_free / fs_info.inodes)).round(2)
end
# Determine the percent byte usage
#
def percent_bytes(fs_info)
if config[:ignore_reserved]
u100 = fs_info.bytes_used * 100.0
nonroot_total = fs_info.bytes_used + fs_info.bytes_available
if nonroot_total.zero?
0
else
(u100 / nonroot_total + (u100 % nonroot_total != 0 ? 1 : 0)).truncate(2)
end
else
(100.0 - (100.0 * fs_info.bytes_free / fs_info.bytes_total)).round(2)
end
end
# Generate output
#
def usage_summary
(@crit_fs + @warn_fs).join(', ')
end
# Main function
#
def run
fs_mounts
critical usage_summary unless @crit_fs.empty?
warning usage_summary unless @warn_fs.empty?
ok "All disk usage under #{config[:bwarn]}% and inode usage under #{config[:iwarn]}%"
end
end
|
Parser tests
new file: spec/parser_spec.rb
require "exchange_rate/parser"
describe Parser::Plain do
before(:all) do
@xml = File.open("spec/fixtures/example.xml") { |f| f.read }
@parser = Parser::Plain.new @xml
end
it "should not accept a File type as an argument" do
expect {
Parser::Plain.new(File.open "spec/fixtures/example.xml").parse
}.to raise_error TypeError
end
it "should accept a String as an argument" do
expect {
Parser::Plain.new(File.open("spec/fixtures/example.xml").read).parse
}.to_not raise_error
end
context "Parser::Plain#parse" do
it "should return a Ox Document" do
expect(@parser.parse).to be_a Ox::Document
end
end
end
|
Added RSpec tests to player.rb
require "spec_helper"
module TicTacToe
describe Player do
context "#initialize" do
it "raises an exception when initialized with {}" do
expect {Player.new({})}.to raise_error
end
it "does to raise an error when initialized with a valid input hash" do
input = {color: "X", name: "Fred"}
expect {Player.new(input)}.to_not raise_error
end
end
context "#color" do
it "returns the color" do
input = {color: "X", name: "Fred"}
player = Player.new(input)
expect(player.color).to eq "X"
end
end
context "#player_name" do
it "returns the player's name" do
input = {color: "X", name: "Fred"}
player = Player.new(input)
expect(player.name).to eq "Fred"
end
end
end
end |
require 'spec_helper'
require_relatite '../src/team.rb'
describe Team do
end
Fix typo in require call.
require 'spec_helper'
require_relative '../src/team.rb'
describe Team do
end
|
require 'helper'
describe Plucky do
describe ".to_object_id" do
before do
@id = BSON::ObjectId.new
end
it "converts nil to nil" do
Plucky.to_object_id(nil).should be_nil
end
it "converts blank to nil" do
Plucky.to_object_id('').should be_nil
end
it "leaves object id alone" do
Plucky.to_object_id(@id).should equal(@id)
end
it "converts string to object id" do
Plucky.to_object_id(@id.to_s).should == @id
end
it "not convert string that is not legal object id" do
Plucky.to_object_id('foo').should == 'foo'
Plucky.to_object_id(1).should == 1
end
end
describe ".modifier?" do
context "with a string" do
it "returns true if modifier" do
Plucky.modifier?('$in').should be_true
end
it "returns false if not modifier" do
Plucky.modifier?('nope').should be_false
end
end
context "with a symbol" do
it "returns true if modifier" do
Plucky.modifier?(:$in).should be_true
end
it "returns false if not modifier" do
Plucky.modifier?(:nope).should be_false
end
end
end
describe "::Methods" do
it "returns array of methods" do
Plucky::Methods.should == [
:where, :filter,
:sort, :order, :reverse,
:paginate, :per_page, :limit, :skip, :offset,
:fields, :ignore, :only,
:each, :find_each, :find_one, :find,
:count, :size, :distinct,
:last, :first, :all, :to_a,
:exists?, :exist?, :empty?,
:remove,
].sort_by(&:to_s)
end
end
end
Minor spec fix
require 'helper'
describe Plucky do
describe ".to_object_id" do
before do
@id = BSON::ObjectId.new
end
it "converts nil to nil" do
Plucky.to_object_id(nil).should be_nil
end
it "converts blank to nil" do
Plucky.to_object_id('').should be_nil
end
it "leaves object id alone" do
Plucky.to_object_id(@id).should equal(@id)
end
it "converts string to object id" do
Plucky.to_object_id(@id.to_s).should == @id
end
it "not convert string that is not legal object id" do
Plucky.to_object_id('foo').should == 'foo'
Plucky.to_object_id(1).should == 1
end
end
describe ".modifier?" do
context "with a string" do
it "returns true if modifier" do
Plucky.modifier?('$in').should == true
end
it "returns false if not modifier" do
Plucky.modifier?('nope').should == false
end
end
context "with a symbol" do
it "returns true if modifier" do
Plucky.modifier?(:$in).should == true
end
it "returns false if not modifier" do
Plucky.modifier?(:nope).should == false
end
end
end
describe "::Methods" do
it "returns array of methods" do
Plucky::Methods.should == [
:where, :filter,
:sort, :order, :reverse,
:paginate, :per_page, :limit, :skip, :offset,
:fields, :ignore, :only,
:each, :find_each, :find_one, :find,
:count, :size, :distinct,
:last, :first, :all, :to_a,
:exists?, :exist?, :empty?,
:remove,
].sort_by(&:to_s)
end
end
end
|
require File.expand_path('../spec_helper', __FILE__)
module Alf
describe Rename do
let(:input) {[
{:a => "a", :b => "b"},
]}
let(:expected){[
{:z => "a", :b => "b"},
]}
subject{ operator.to_a }
describe "When factored with Lispy" do
let(:operator){ Lispy.rename(input, {:a => :z}) }
it{ should == expected }
end
describe "When factored from commandline args" do
let(:operator){ Rename.new.set_args(['a', 'z']) }
before{ operator.pipe(input) }
it{ should == expected }
end
end
end
Moved spec coverage a bit upper on Rename
require File.expand_path('../spec_helper', __FILE__)
module Alf
describe Rename do
let(:input) {[
{:a => "a", :b => "b"},
]}
let(:expected){[
{:z => "a", :b => "b"},
]}
subject{ operator.to_a }
describe "When factored with Lispy" do
let(:operator){ Lispy.rename(input, {:a => :z}) }
it{ should == expected }
end
describe "When factored from commandline args" do
let(:operator){ Rename.run(['--', 'a', 'z']) }
before{ operator.pipe(input) }
it{ should == expected }
end
end
end
|
#!/usr/bin/env ruby
# encoding: utf-8
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe RRadio::Task do
context 'when radiotray has 3 channels' do
before do
player = double('player')
player.stub(:default_iface=).with('net.sourceforge.radiotray').and_return(true)
player.stub(:introspect)
player.stub(:listRadios).and_return([['test', 'test', 'test']])
player.stub(:playRadio).with('test').and_return('test')
service = double('service')
service.stub(:object).with('/net/sourceforge/radiotray').and_return(player)
@dbus = mock(DBus::SessionBus)
@dbus.stub(:service).with('net.sourceforge.radiotray').and_return(service)
DBus::SessionBus.stub(:instance).and_return(@dbus)
end
describe ':list action' do
before do
@task = RRadio::Task.new(['list'], {}, {})
end
it 'should return 3 channels' do
$stdout.should_receive(:puts).with(/test/).exactly(3).times
@task.list.length.should eq 3
end
end
end
end
Updated spec for task actions
#!/usr/bin/env ruby
# encoding: utf-8
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe RRadio::Task do
before do
@player = double('player')
@player.stub(:default_iface=).with('net.sourceforge.radiotray').and_return(true)
@player.stub(:introspect)
@service = double('service')
@service.stub(:object).with('/net/sourceforge/radiotray').and_return(@player)
@dbus = mock(DBus::SessionBus)
@dbus.stub(:service).with('net.sourceforge.radiotray').and_return(@service)
DBus::SessionBus.stub(:instance).and_return(@dbus)
end
context 'when radiotray has 3 channels' do
before do
@player.stub(:listRadios).and_return([['Jazz', 'R&B', 'Country']])
end
context 'when playing Jazz channel' do
before do
@player.stub(:playRadio).with('Jazz')
@player.stub(:getCurrentRadio).and_return(['Jazz'])
@player.stub(:turnOff).and_return(true)
@player.stub(:volumeUp).and_return(true)
@player.stub(:volumeDown).and_return(true)
end
describe ':list action' do
before do
@task = RRadio::Task.new ['list']
end
it 'should return 3 channels' do
$stdout.should_receive(:puts).with(/Country|Jazz|R&B/).exactly(3).times
@task.list.length.should eq 3
end
it 'should return sorted list' do
$stdout.should_receive(:puts).with(/Country|Jazz|R&B/).exactly(3).times
@task.list.should eq ['Country', 'Jazz', 'R&B']
end
end
describe ':play action' do
before do
@task = RRadio::Task.new ['play']
end
it 'should display error with invalid index' do
$stdout.should_receive(:puts).with(/does\snot\sexist/).exactly(1).times
@task.play('5').should eq nil
end
it 'should display channel name' do
$stdout.should_receive(:puts).with(/Jazz/).exactly(1).times
@task.play('1').should eq nil
end
end
describe ':off action' do
before do
@task = RRadio::Task.new ['off']
end
it 'should display channel name' do
$stdout.should_receive(:puts).with(/Jazz/).once
@task.off.should eq nil
end
end
describe ':show action' do
before do
@task = RRadio::Task.new ['show']
end
it 'should display channel name' do
$stdout.should_receive(:puts).with(/Jazz/).once
@task.show.should eq nil
end
end
describe ':volume action' do
before do
@task = RRadio::Task.new ['volume']
end
it 'should respond volume up with 1-5' do
@task.volume('up', '1').should eq 1
@task.volume('up', '3').should eq 3
end
it 'should respond volume up with too big value' do
@task.volume('up', '99').should eq 5
end
it 'should respond volume down with 1-5' do
@task.volume('up', '2').should eq 2
@task.volume('up', '4').should eq 4
end
it 'should respond volume down with too big value' do
@task.volume('up', '99').should eq 5
end
it 'should not renpond with invalid action' do
@task.volume('keep', '5').should eq nil
end
end
end
context 'when not playing radio' do
before do
@player.stub(:getCurrentRadio).and_return(nil)
end
describe ':off action' do
before do
@task = RRadio::Task.new ['off']
end
it 'should not respond and display nothing' do
$stdout.should_not_receive(:puts)
@task.off.should eq nil
end
end
describe ':show action' do
before do
@task = RRadio::Task.new ['show']
end
it 'should not display channel name' do
$stdout.should_receive(:puts).with(nil)
@task.show.should eq nil
end
end
end
end
end
|
require 'spec_helper'
describe AttrSecure::Secure do
context "with a simple key" do
subject { described_class.new(secret) }
let(:secret) { 'fWSvpC6Eh1/FFE1TUgXpcEzMmmGc9IZSqoexzEslzKI=' }
describe '#encrypt' do
it "should encrypt a string" do
expect(subject.encrypt('encrypted')).to be_a(String)
expect(subject.encrypt('encrypted')).to_not be_empty
expect(subject.encrypt('encrypted')).to_not eq(subject.encrypt('encrypted'))
end
end
describe '#decrypt' do
let(:encrypted_value) { subject.encrypt('decrypted') }
it "should decrypt a string" do
expect(subject.decrypt(encrypted_value)).to eq('decrypted')
end
end
end
# context "with an array of keys" do
# subject { described_class.new(secret) }
# let(:secret) { 'fWSvpC6Eh1/FFE1TUgXpcEzMmmGc9IZSqoexzEslzKI=,d9ssNmUYn7UpMoSc0eM2glVUG2DPYwXveLTDU7j8pBY=' }
# describe '#encrypt' do
# it "should encrypt a string" do
# expect(subject.encrypt('encrypted')).to be_a(String)
# expect(subject.encrypt('encrypted')).to_not be_empty
# expect(subject.encrypt('encrypted')).to_not eq(subject.encrypt('encrypted'))
# end
# end
# describe '#decrypt' do
# let(:encrypted_value) { subject.encrypt('decrypted') }
# it "should decrypt a string" do
# expect(subject.decrypt(encrypted_value)).to eq('decrypted')
# end
# end
# end
end
Test illustrating issue with Secure#secret=
require 'spec_helper'
describe AttrSecure::Secure do
context "with a simple key" do
subject { described_class.new(secret1) }
let(:secret1) { 'fWSvpC6Eh1/FFE1TUgXpcEzMmmGc9IZSqoexzEslzKI=' }
let(:secret2) { 'd9ssNmUYn7UpMoSc0eM2glVUG2DPYwXveLTDU7j8pBY=' }
describe '#secret=' do
it "should update the list of secrets" do
expect(subject.secret).to eq([secret1])
subject.secret = secret2
expect(subject.secret).to eq([secret2])
end
end
describe '#encrypt' do
it "should encrypt a string" do
expect(subject.encrypt('encrypted')).to be_a(String)
expect(subject.encrypt('encrypted')).to_not be_empty
expect(subject.encrypt('encrypted')).to_not eq(subject.encrypt('encrypted'))
end
end
describe '#decrypt' do
let(:encrypted_value) { subject.encrypt('decrypted') }
it "should decrypt a string" do
expect(subject.decrypt(encrypted_value)).to eq('decrypted')
end
end
end
# context "with an array of keys" do
# subject { described_class.new(secret) }
# let(:secret) { 'fWSvpC6Eh1/FFE1TUgXpcEzMmmGc9IZSqoexzEslzKI=,d9ssNmUYn7UpMoSc0eM2glVUG2DPYwXveLTDU7j8pBY=' }
# describe '#encrypt' do
# it "should encrypt a string" do
# expect(subject.encrypt('encrypted')).to be_a(String)
# expect(subject.encrypt('encrypted')).to_not be_empty
# expect(subject.encrypt('encrypted')).to_not eq(subject.encrypt('encrypted'))
# end
# end
# describe '#decrypt' do
# let(:encrypted_value) { subject.encrypt('decrypted') }
# it "should decrypt a string" do
# expect(subject.decrypt(encrypted_value)).to eq('decrypted')
# end
# end
# end
end
|
require 'spec_helper'
describe 'nagios::server' do
let(:chef_run) { runner.converge 'nagios::server' }
subject { chef_run }
before do
stub_search(:users, 'groups:sysadmin NOT action:remove').and_return([])
stub_search(:node, 'hostname:* AND chef_environment:test').and_return([])
stub_search(:role, '*:*').and_return([])
Chef::DataBag.stub(:list).and_return([])
# nagios::server_package stubs
stub_command('dpkg -l nagios3').and_return(true)
end
it { should install_package 'nagios3' }
it { should enable_service 'nagios' }
it { should start_service 'nagios' }
end
Fix the search mock in for chef spec. Also fix indentation in the
server_spec file
require 'spec_helper'
describe 'nagios::server' do
let(:chef_run) { runner.converge 'nagios::server' }
subject { chef_run }
before do
stub_search(:users, 'groups:sysadmin NOT action:remove').and_return([])
stub_search(:node, 'name:* AND chef_environment:test').and_return([])
stub_search(:role, '*:*').and_return([])
Chef::DataBag.stub(:list).and_return([])
# nagios::server_package stubs
stub_command('dpkg -l nagios3').and_return(true)
end
it { should install_package 'nagios3' }
it { should enable_service 'nagios' }
it { should start_service 'nagios' }
end
|
require 'rubygems'
using_git = File.exist?(File.expand_path('../../.git/', __FILE__))
if using_git
require 'bundler'
Bundler.setup
end
require 'rspec'
Dir['./spec/support/**/*.rb'].each { |f| require f }
require 'vcr'
require 'monkey_patches'
module VCR
SPEC_ROOT = File.dirname(__FILE__)
def reset!(stubbing_lib = :fakeweb)
@configuration = nil
configuration.stub_with stubbing_lib if stubbing_lib
end
end
RSpec.configure do |config|
config.color_enabled = true
config.debug = (using_git && RUBY_INTERPRETER == :mri && !%w[ 1.9.3 ].include?(RUBY_VERSION) && !ENV['CI'])
config.treat_symbols_as_metadata_keys_with_true_values = true
tmp_dir = File.expand_path('../../tmp/cassette_library_dir', __FILE__)
config.before(:each) do
VCR.reset!
VCR.configuration.cassette_library_dir = tmp_dir
VCR.turn_on! unless VCR.turned_on?
VCR.eject_cassette while VCR.current_cassette
WebMock.allow_net_connect!
FakeWeb.allow_net_connect = true
FakeWeb.clean_registry
VCR::HttpStubbingAdapters::Common.adapters.each(&:reset!)
end
config.after(:each) do
FileUtils.rm_rf tmp_dir
VCR::HttpStubbingAdapters::Common.adapters.each do |a|
a.ignored_hosts = []
end
end
config.before(:all, :disable_warnings => true) do
@orig_std_err = $stderr
$stderr = StringIO.new
end
config.after(:all, :disable_warnings => true) do
$stderr = @orig_std_err
end
config.filter_run :focus => true
config.run_all_when_everything_filtered = true
config.alias_it_should_behave_like_to :it_performs, 'it performs'
end
http_stubbing_dir = File.join(File.dirname(__FILE__), '..', 'lib', 'vcr', 'http_stubbing_adapters')
Dir[File.join(http_stubbing_dir, '*.rb')].each do |file|
next if RUBY_INTERPRETER != :mri && file =~ /(typhoeus)/
require "vcr/http_stubbing_adapters/#{File.basename(file)}"
end
Remove unnecessary cleanup lines from global before(:each) hook.
require 'rubygems'
using_git = File.exist?(File.expand_path('../../.git/', __FILE__))
if using_git
require 'bundler'
Bundler.setup
end
require 'rspec'
Dir['./spec/support/**/*.rb'].each { |f| require f }
require 'vcr'
require 'monkey_patches'
module VCR
SPEC_ROOT = File.dirname(__FILE__)
def reset!(stubbing_lib = :fakeweb)
@configuration = nil
configuration.stub_with stubbing_lib if stubbing_lib
end
end
RSpec.configure do |config|
config.color_enabled = true
config.debug = (using_git && RUBY_INTERPRETER == :mri && !%w[ 1.9.3 ].include?(RUBY_VERSION) && !ENV['CI'])
config.treat_symbols_as_metadata_keys_with_true_values = true
tmp_dir = File.expand_path('../../tmp/cassette_library_dir', __FILE__)
config.before(:each) do
VCR.reset!
VCR.configuration.cassette_library_dir = tmp_dir
VCR.turn_on! unless VCR.turned_on?
VCR.eject_cassette while VCR.current_cassette
VCR::HttpStubbingAdapters::Common.adapters.each(&:reset!)
end
config.after(:each) do
FileUtils.rm_rf tmp_dir
VCR::HttpStubbingAdapters::Common.adapters.each do |a|
a.ignored_hosts = []
end
end
config.before(:all, :disable_warnings => true) do
@orig_std_err = $stderr
$stderr = StringIO.new
end
config.after(:all, :disable_warnings => true) do
$stderr = @orig_std_err
end
config.filter_run :focus => true
config.run_all_when_everything_filtered = true
config.alias_it_should_behave_like_to :it_performs, 'it performs'
end
http_stubbing_dir = File.join(File.dirname(__FILE__), '..', 'lib', 'vcr', 'http_stubbing_adapters')
Dir[File.join(http_stubbing_dir, '*.rb')].each do |file|
next if RUBY_INTERPRETER != :mri && file =~ /(typhoeus)/
require "vcr/http_stubbing_adapters/#{File.basename(file)}"
end
|
# This file is copied to ~/spec when you run 'ruby script/generate rspec'
# from the project root directory.
ENV["RAILS_ENV"] = "test"
require File.expand_path(File.dirname(__FILE__) + "/../config/environment")
require 'spec'
require 'spec/rails'
include AuthenticatedTestHelper
require 'mock_controller'
require File.dirname(__FILE__) + '/toolbox_spec_helper'
include ToolboxSpecHelper
Spec::Runner.configure do |config|
# If you're not using ActiveRecord you should remove these
# lines, delete config/database.yml and disable :active_record
# in your config/boot.rb
config.use_transactional_fixtures = true
config.use_instantiated_fixtures = false
config.fixture_path = RAILS_ROOT + '/spec/fixtures/'
# == Fixtures
#
# You can declare fixtures for each example_group like this:
# describe "...." do
# fixtures :table_a, :table_b
#
# Alternatively, if you prefer to declare them only once, you can
# do so right here. Just uncomment the next line and replace the fixture
# names with your fixtures.
#
# config.global_fixtures = :table_a, :table_b
#
# If you declare global fixtures, be aware that they will be declared
# for all of your examples, even those that don't use them.
#
# == Mock Framework
#
# RSpec uses it's own mocking framework by default. If you prefer to
# use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
end
def simple_preview
sp = parts(:simple_preview)
#sp.stub!(:liquid).and_return(%Q[<h2>{{ simple_preview.title }}</h2>\n<p>{{ simple_preview.body }}</p>])
return sp
end
break MVC in specs, accordingly to ApplicationController
git-svn-id: d95c7f39bc46a13f717da3baac7edcc188a27d76@1729 9df96bbd-a318-0410-8f51-977bef1cc4bb
# This file is copied to ~/spec when you run 'ruby script/generate rspec'
# from the project root directory.
ENV["RAILS_ENV"] = "test"
require File.expand_path(File.dirname(__FILE__) + "/../config/environment")
require 'spec'
require 'spec/rails'
include AuthenticatedTestHelper
require 'mock_controller'
require File.dirname(__FILE__) + '/toolbox_spec_helper'
include ToolboxSpecHelper
Spec::Runner.configure do |config|
# If you're not using ActiveRecord you should remove these
# lines, delete config/database.yml and disable :active_record
# in your config/boot.rb
config.use_transactional_fixtures = true
config.use_instantiated_fixtures = false
config.fixture_path = RAILS_ROOT + '/spec/fixtures/'
# == Fixtures
#
# You can declare fixtures for each example_group like this:
# describe "...." do
# fixtures :table_a, :table_b
#
# Alternatively, if you prefer to declare them only once, you can
# do so right here. Just uncomment the next line and replace the fixture
# names with your fixtures.
#
# config.global_fixtures = :table_a, :table_b
#
# If you declare global fixtures, be aware that they will be declared
# for all of your examples, even those that don't use them.
#
# == Mock Framework
#
# RSpec uses it's own mocking framework by default. If you prefer to
# use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
config.before(:each) do
# break MVC like in ApplicationController
[Part, Rendering, Page].each do |klass|
klass.active_controller = mock_controller
end
end
end
def simple_preview
sp = parts(:simple_preview)
#sp.stub!(:liquid).and_return(%Q[<h2>{{ simple_preview.title }}</h2>\n<p>{{ simple_preview.body }}</p>])
return sp
end
def mock_controller
finder = mock(ActionView::TemplateFinder)
def finder.pick_template(ppath,ext)
path = File.join(RAILS_ROOT, 'spec', 'fixtures', 'views', "#{ppath}.#{ext}")
File.exists?(path) ? path : false
end
view = mock(ActionView::Base)
view.stub!(:finder).and_return(finder)
controller = mock(PublicController)
controller.stub!(:current_theme).and_return('cool')
controller.stub!(:view_paths).and_return([ File.join(RAILS_ROOT,'spec','fixtures','views') ])
controller.stub!(:master_helper_module).and_return(PublicController.new.master_helper_module)
# there is no such thing as PublicController#template
controller.instance_variable_set('@template', view)
controller
end
|
require 'vcs_toolkit'
RSpec.configure do |config|
# Use defaults for now
end
Use expect syntax for all tests
require 'vcs_toolkit'
RSpec.configure do |config|
config.expect_with :rspec do |expect_config|
expect_config.syntax = :expect
end
end |
require 'subtitulos_downloader'
RSpec.configure do |config|
config.color_enabled = true
config.formatter = 'progress'
end
Changed formatter for spec tests
require 'subtitulos_downloader'
RSpec.configure do |config|
config.color_enabled = true
config.formatter = 'documentation'
end |
require 'simplecov'
SimpleCov.start do
add_filter 'vendor'
end
require 'aws-sdk'
require File.expand_path('../../lib/opendelivery/domain.rb', __FILE__)
require File.expand_path('../../lib/opendelivery/stack.rb', __FILE__)
# choose your own adventure!
# load File.expand_path("~/.aws/aws.config")
load File.expand_path("/opt/aws/aws.config")
#load File.expand_path("c:\\aws\\aws.config")
don't fail if you can't find creds file
require 'simplecov'
SimpleCov.start do
add_filter 'vendor'
end
require 'aws-sdk'
require File.expand_path('../../lib/opendelivery/domain.rb', __FILE__)
require File.expand_path('../../lib/opendelivery/stack.rb', __FILE__)
# choose your own adventure!
#config_file = "~/.aws/aws.config"
#config_file = "c:\\aws\\aws.config"
config_file = "/opt/aws/aws.config"
if File.exist?(config_file)
load File.expand_path(config_file)
end
|
def transient_file(path)
f = File.open(path, 'w')
yield
File.delete(f)
end
must close file before deleting
def transient_file(path)
tranny = File.open(path, 'w') do |f|
yield
f
end
File.delete(tranny)
end
|
require 'rspec'
require 'json'
require 'rdota'
require 'webmock/rspec'
Dir["./spec/support/**/*.rb"].sort.each {|f| require f}
RSpec.configure do |config|
config.color_enabled = true
config.formatter = 'documentation'
end
#TODO: Maybe refactor this so the client is only created for tests that need it
module SpecHelper
def client
@client ||= test_client
end
def fixture_for(category)
json = json_for(category)
classname = camelize(category)
class_for_category = class_from_string("Rdota::#{classname}")
class_for_category.new(json)
end
# TODO: this should be a method on String
def camelize(str)
str.split('_').map(&:capitalize).join
end
private
def test_client
client = Rdota::Client.new
client.configure do |configuration|
configuration.api_key = 'API_KEY'
end
client
end
def json_for(category)
filepath = File.expand_path("spec/fixtures/#{category}.json")
JSON.parse(IO.read(filepath))
end
def class_from_string(str)
str.split('::').inject(Object) do |mod, inner_module|
mod.const_get(inner_module)
end
end
end
fixed indentation in spec_helper
require 'rspec'
require 'json'
require 'rdota'
require 'webmock/rspec'
Dir["./spec/support/**/*.rb"].sort.each {|f| require f}
RSpec.configure do |config|
config.color_enabled = true
config.formatter = 'documentation'
end
#TODO: Maybe refactor this so the client is only created for tests that need it
module SpecHelper
def client
@client ||= test_client
end
def fixture_for(category)
json = json_for(category)
classname = camelize(category)
class_for_category = class_from_string("Rdota::#{classname}")
class_for_category.new(json)
end
# TODO: this should be a method on String
def camelize(str)
str.split('_').map(&:capitalize).join
end
private
def test_client
client = Rdota::Client.new
client.configure do |configuration|
configuration.api_key = 'API_KEY'
end
client
end
def json_for(category)
filepath = File.expand_path("spec/fixtures/#{category}.json")
JSON.parse(IO.read(filepath))
end
def class_from_string(str)
str.split('::').inject(Object) do |mod, inner_module|
mod.const_get(inner_module)
end
end
end
|
require_relative '../lib/bamboozled'
#dependencies
require 'minitest/autorun'
require 'webmock/minitest'
require 'turn'
Turn.config do |c|
# :outline - turn's original case/test outline mode [default]
c.format = :outline
# turn on invoke/execute tracing, enable full backtrace
c.trace = true
# use humanized test names (works only with :outline format)
c.natural = true
end
Remove turn gem to get tests to run
require_relative '../lib/bamboozled'
#dependencies
require 'minitest/autorun'
require 'webmock/minitest'
|
require 'simplecov'
SimpleCov.start 'rails'
# This file was generated by the `rails generate rspec:install` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# avoid https://github.com/mbj/mutant#mutations-with-infinite-runtimes
config.around(:each) do |example|
Timeout.timeout(60, &example)
end
if ENV['CI']
config.before(focus: true) { fail 'Remove focused specs before commit!' }
config.before(skip: true) { fail 'Remove skipped specs before commit!' }
config.before(:each) { |example| fail 'Add issues metadata!' if example.metadata.fetch(:issues, []).empty? }
end
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
config.before(:suite) do
DatabaseCleaner.strategy = :truncation
end
config.before(:each) do
DatabaseCleaner.clean
end
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# This allows you to limit a spec run to individual examples or groups
# you care about by tagging them with `:focus` metadata. When nothing
# is tagged with `:focus`, all examples get run. RSpec also provides
# aliases for `it`, `describe`, and `context` that include `:focus`
# metadata: `fit`, `fdescribe` and `fcontext`, respectively.
config.filter_run_when_matching :focus
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = "spec/examples.txt"
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
config.disable_monkey_patching!
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = 'spec/tmp/examples.txt'
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
end
refuse_coverage_drop below 100%
require 'simplecov'
SimpleCov.start 'rails'
SimpleCov.minimum_coverage 100
SimpleCov.refuse_coverage_drop
# This file was generated by the `rails generate rspec:install` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# avoid https://github.com/mbj/mutant#mutations-with-infinite-runtimes
config.around(:each) do |example|
Timeout.timeout(60, &example)
end
if ENV['CI']
config.before(focus: true) { fail 'Remove focused specs before commit!' }
config.before(skip: true) { fail 'Remove skipped specs before commit!' }
config.before(:each) { |example| fail 'Add issues metadata!' if example.metadata.fetch(:issues, []).empty? }
end
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
config.before(:suite) do
DatabaseCleaner.strategy = :truncation
end
config.before(:each) do
DatabaseCleaner.clean
end
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# This allows you to limit a spec run to individual examples or groups
# you care about by tagging them with `:focus` metadata. When nothing
# is tagged with `:focus`, all examples get run. RSpec also provides
# aliases for `it`, `describe`, and `context` that include `:focus`
# metadata: `fit`, `fdescribe` and `fcontext`, respectively.
config.filter_run_when_matching :focus
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = "spec/examples.txt"
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
config.disable_monkey_patching!
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = 'spec/tmp/examples.txt'
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
end
|
require 'chefspec'
require 'chefspec/berkshelf'
RSpec.configure do |config|
config.color = true
config.formatter = 'doc'
config.log_level = :fatal
end
at_exit { ChefSpec::Coverage.report! }
Use standard chefspec helper
Signed-off-by: Tim Smith <764ef62106582a09ed09dfa0b6bff7c05fd7d1e4@chef.io>
require 'chefspec'
require 'chefspec/berkshelf'
RSpec.configure do |config|
config.color = true # Use color in STDOUT
config.formatter = :documentation # Use the specified formatter
config.log_level = :error # Avoid deprecation notice SPAM
end
|
add spec_helper
require 'rspec'
require 'typekitty'
require 'webmock/rspec'
|
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
require 'rspec'
require 'brick'
RSpec.shared_context "local paths" do
def project_dir
File.expand_path(File.join(File.dirname(__FILE__), '..'))
end
end
add ostruct to spec classpath
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
require 'rspec'
require 'brick'
require 'ostruct'
RSpec.shared_context "local paths" do
def project_dir
File.expand_path(File.join(File.dirname(__FILE__), '..'))
end
end
|
$:.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
require 'bravo'
require 'rspec'
require 'ruby-debug'
class SpecHelper
include Savon::Logger
end
# Requires supporting files with custom matchers and macros, etc,
# in ./support/ and its subdirectories.
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each {|f| require f}
Savon::Request.log = false unless ENV["VERBOSE"] == "true"
Bravo.pkey = "spec/fixtures/pkey"
Bravo.cert = "spec/fixtures/cert.crt"
Bravo.cuit = "30711034389"
Bravo.sale_point = "0002"
Bravo.service_url = "http://wswhomo.afip.gov.ar/wsfev1/service.asmx?WSDL"
Bravo.default_concepto = "Productos y Servicios"
Bravo.default_documento = "CUIT"
Bravo.default_moneda = :peso
Bravo.own_iva_cond = :responsable_inscripto
CUIT is set through env variable for testing.
$:.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
require 'bravo'
require 'rspec'
require 'ruby-debug'
class SpecHelper
include Savon::Logger
end
# Requires supporting files with custom matchers and macros, etc,
# in ./support/ and its subdirectories.
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each {|f| require f}
Savon::Request.log = false unless ENV["VERBOSE"] == "true"
Bravo.pkey = "spec/fixtures/pkey"
Bravo.cert = "spec/fixtures/cert.crt"
Bravo.cuit = ENV["CUIT"] || raise(Bravo::NullOrInvalidAttribute.new, "Please set CUIT env variable.")
Bravo.sale_point = "0002"
Bravo.service_url = "http://wswhomo.afip.gov.ar/wsfev1/service.asmx?WSDL"
Bravo.default_concepto = "Productos y Servicios"
Bravo.default_documento = "CUIT"
Bravo.default_moneda = :peso
Bravo.own_iva_cond = :responsable_inscripto
|
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$LOAD_PATH.unshift(File.join(Gem.loaded_specs['kaminari'].gem_dir, 'spec'))
$LOAD_PATH.unshift(File.dirname(__FILE__))
begin
require 'rails'
rescue LoadError
end
require 'bundler/setup'
Bundler.require
require 'capybara/rspec'
require 'database_cleaner'
if defined? Rails
require 'fake_app/rails_app'
require 'rspec/rails'
end
# Requires supporting files with custom matchers and macros, etc,
# in ./support/ and its subdirectories.
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each {|f| require f}
Dir["#{File.join(Gem.loaded_specs['kaminari'].gem_dir, 'spec')}/support/**/*.rb"].each {|f| require f}
RSpec.configure do |config|
config.mock_with :rr
config.filter_run_excluding :generator_spec => true if !ENV['GENERATOR_SPEC']
end
Assume that Rails is available for testing
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$LOAD_PATH.unshift(File.join(Gem.loaded_specs['kaminari'].gem_dir, 'spec'))
$LOAD_PATH.unshift(File.dirname(__FILE__))
require 'rails'
require 'bundler/setup'
Bundler.require
require 'capybara/rspec'
require 'database_cleaner'
require 'fake_app/rails_app'
require 'rspec/rails'
# Requires supporting files with custom matchers and macros, etc,
# in ./support/ and its subdirectories.
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each {|f| require f}
Dir["#{File.join(Gem.loaded_specs['kaminari'].gem_dir, 'spec')}/support/**/*.rb"].each {|f| require f}
RSpec.configure do |config|
config.mock_with :rr
config.filter_run_excluding :generator_spec => true if !ENV['GENERATOR_SPEC']
end
|
require "pry"
require "rspec"
require "rdkafka"
def rdkafka_config
config = {
:"bootstrap.servers" => "localhost:9092",
:"group.id" => "ruby-test-#{Random.new.rand(0..10_000)}",
:"auto.offset.reset" => "earliest",
:"enable.partition.eof" => false
}
if ENV["DEBUG_PRODUCER"]
config[:debug] = "broker,topic,msg"
elsif ENV["DEBUG_CONSUMER"]
config[:debug] = "cgrp,topic,fetch"
end
Rdkafka::Config.new(config)
end
def native_client
config = rdkafka_config
config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer)
end
def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30)
offset = delivery_report.offset - 1
consumer = rdkafka_config.consumer
consumer.subscribe(topic)
timeout = Time.now.to_i + timeout_in_seconds
loop do
if timeout <= Time.now.to_i
raise "Timeout of #{timeout_in_seconds} seconds reached in wait_for_message"
end
message = consumer.poll(100)
if message && message.offset == offset
return message
end
end
ensure
consumer.close
end
Take partition into account in wait_for_message spec helper
require "pry"
require "rspec"
require "rdkafka"
def rdkafka_config
config = {
:"bootstrap.servers" => "localhost:9092",
:"group.id" => "ruby-test-#{Random.new.rand(0..1_000_000)}",
:"auto.offset.reset" => "earliest",
:"enable.partition.eof" => false
}
if ENV["DEBUG_PRODUCER"]
config[:debug] = "broker,topic,msg"
elsif ENV["DEBUG_CONSUMER"]
config[:debug] = "cgrp,topic,fetch"
end
Rdkafka::Config.new(config)
end
def native_client
config = rdkafka_config
config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer)
end
def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30)
consumer = rdkafka_config.consumer
consumer.subscribe(topic)
timeout = Time.now.to_i + timeout_in_seconds
loop do
if timeout <= Time.now.to_i
raise "Timeout of #{timeout_in_seconds} seconds reached in wait_for_message"
end
message = consumer.poll(100)
if message &&
message.partition == delivery_report.partition &&
message.offset == delivery_report.offset - 1
return message
end
end
ensure
consumer.close
end
|
require 'rubygems'
require "bundler/setup"
require 'rolify'
require 'rolify/matchers'
require 'rails'
require 'its'
require 'coveralls'
Coveralls.wear_merged!
ENV['ADAPTER'] ||= 'active_record'
load File.dirname(__FILE__) + "/support/adapters/#{ENV['ADAPTER']}.rb"
load File.dirname(__FILE__) + '/support/data.rb'
begin
require 'pry'
rescue LoadError
end
def reset_defaults
Rolify.use_defaults
Rolify.use_mongoid if ENV['ADAPTER'] == 'mongoid'
end
def provision_user(user, roles)
roles.each do |role|
if role.is_a? Array
user.add_role *role
else
user.add_role role
end
end
user
end
def silence_warnings(&block)
warn_level = $VERBOSE
$VERBOSE = nil
result = block.call
$VERBOSE = warn_level
result
end
Try to fix loading error
require 'rubygems'
require "bundler/setup"
require 'rolify'
require 'rolify/matchers'
require 'rails'
require 'its'
require 'coveralls'
Coveralls.wear_merged!
ENV['ADAPTER'] ||= 'active_record'
begin
load File.dirname(__FILE__) + "/support/adapters/#{ENV['ADAPTER']}.rb"
rescue NameError => e
if e.message =~ /uninitialized constant RSpec::Matchers::BuiltIn::MatchArray/
RSpec::Matchers::OperatorMatcher.register(ActiveRecord::Relation, '=~', RSpec::Matchers::BuiltIn::MatchArray)
end
end
load File.dirname(__FILE__) + '/support/data.rb'
begin
require 'pry'
rescue LoadError
end
def reset_defaults
Rolify.use_defaults
Rolify.use_mongoid if ENV['ADAPTER'] == 'mongoid'
end
def provision_user(user, roles)
roles.each do |role|
if role.is_a? Array
user.add_role *role
else
user.add_role role
end
end
user
end
def silence_warnings(&block)
warn_level = $VERBOSE
$VERBOSE = nil
result = block.call
$VERBOSE = warn_level
result
end
|
# encoding: utf-8
require 'sql'
require 'devtools/spec_helper'
if ENV['COVERAGE'] == 'true'
require 'simplecov'
require 'coveralls'
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start do
command_name 'spec:unit'
add_filter 'config/'
add_filter 'spec/'
minimum_coverage 100
end
end
RSpec.configure do |config|
config.include(SQL::NodeHelper)
config.extend(SQL::NodeHelper)
config.expect_with :rspec do |expect_with|
expect_with.syntax = :expect
end
end
Fix coverage reporting
if ENV['COVERAGE'] == 'true'
require 'simplecov'
require 'coveralls'
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start do
command_name 'spec:unit'
add_filter 'config/'
add_filter 'spec/'
minimum_coverage 100
end
end
# encoding: utf-8
require 'sql'
require 'devtools/spec_helper'
RSpec.configure do |config|
config.include(SQL::NodeHelper)
config.extend(SQL::NodeHelper)
config.expect_with :rspec do |expect_with|
expect_with.syntax = :expect
end
end
|
if ENV['RAILS_ENV'] == 'test'
require 'simplecov'
SimpleCov.start 'rails'
if ENV['CIRCLE_ARTIFACTS'] # https://circleci.com/docs/code-coverage
dir = File.join("..", "..", "..", ENV['CIRCLE_ARTIFACTS'], "coverage")
SimpleCov.coverage_dir(dir)
end
end
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
require 'rspec/its'
require 'webmock/rspec'
WebMock.disable_net_connect!(allow_localhost: true)
require 'capybara/poltergeist'
Capybara.javascript_driver = :poltergeist
require 'capybara-screenshot/rspec'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
Dir[Rails.root.join("spec/*/shared_examples/**/*.rb")].each { |f| require f }
RSpec.configure do |config|
# ## Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
# Syntax sugar to use the FactoryGirl methods directly instead FactoryGirl.create ete.
config.include FactoryGirl::Syntax::Methods
config.include LoginMacro
config.include JavascriptMacro
config.include StubDaemon
config.include ConfigHistories
# If true, the base class of anonymous controllers will be inferred
# automatically. This will be the default behavior in future versions of
# rspec-rails.
config.infer_base_class_for_anonymous_controllers = false
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = "random"
# allow `should`
config.expect_with :rspec do |c|
c.syntax = [:should, :expect]
end
config.mock_with :rspec do |c|
c.syntax = [:should, :expect]
end
# rspec 2.99
config.infer_spec_type_from_file_location!
end
Clean up temporary data_dir after all spec finished
if ENV['RAILS_ENV'] == 'test'
require 'simplecov'
SimpleCov.start 'rails'
if ENV['CIRCLE_ARTIFACTS'] # https://circleci.com/docs/code-coverage
dir = File.join("..", "..", "..", ENV['CIRCLE_ARTIFACTS'], "coverage")
SimpleCov.coverage_dir(dir)
end
end
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
require 'rspec/its'
require 'webmock/rspec'
WebMock.disable_net_connect!(allow_localhost: true)
require 'capybara/poltergeist'
Capybara.javascript_driver = :poltergeist
require 'capybara-screenshot/rspec'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
Dir[Rails.root.join("spec/*/shared_examples/**/*.rb")].each { |f| require f }
RSpec.configure do |config|
# ## Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
# Syntax sugar to use the FactoryGirl methods directly instead FactoryGirl.create ete.
config.include FactoryGirl::Syntax::Methods
config.include LoginMacro
config.include JavascriptMacro
config.include StubDaemon
config.include ConfigHistories
# If true, the base class of anonymous controllers will be inferred
# automatically. This will be the default behavior in future versions of
# rspec-rails.
config.infer_base_class_for_anonymous_controllers = false
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = "random"
# allow `should`
config.expect_with :rspec do |c|
c.syntax = [:should, :expect]
end
config.mock_with :rspec do |c|
c.syntax = [:should, :expect]
end
# rspec 2.99
config.infer_spec_type_from_file_location!
config.after(:all) do
FileUtils.rm_rf FluentdUI.data_dir
end
end
|
# encoding UTF-8
require 'rubygems'
require 'rspec'
require 'vcr'
$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
ENV['INSTAGRAM_API_TOKEN'] = 'TEST-TOKEN-NOT-RELEVANT' unless ENV['INSTAGRAM_API_TOKEN']
require 'instagram_reporter'
RSpec.configure do |c|
c.order = "random"
c.tty = true
c.color = true
end
VCR.configure do |c|
c.configure_rspec_metadata!
c.cassette_library_dir = 'spec/cassettes'
c.hook_into :typhoeus
c.default_cassette_options = { :record => :new_episodes }
c.filter_sensitive_data('<API_TOKEN>') { ENV['INSTAGRAM_API_TOKEN'] }
end
Log output iff specs run in verbose mode
# encoding UTF-8
require 'rubygems'
require 'rspec'
require 'vcr'
$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
ENV['INSTAGRAM_API_TOKEN'] = 'TEST-TOKEN-NOT-RELEVANT' unless ENV['INSTAGRAM_API_TOKEN']
require 'instagram_reporter'
RSpec.configure do |c|
c.order = "random"
c.tty = true
c.color = true
end
VCR.configure do |c|
c.configure_rspec_metadata!
c.cassette_library_dir = 'spec/cassettes'
c.hook_into :typhoeus
c.default_cassette_options = { :record => :new_episodes }
c.filter_sensitive_data('<API_TOKEN>') { ENV['INSTAGRAM_API_TOKEN'] }
end
def puts(*args); end unless ENV['VERBOSE']
|
require 'rspec/core'
require 'autotest/rspec2'
Dir['./spec/support/**/*.rb'].map {|f| require f}
class NullObject
private
def method_missing(method, *args, &block)
# ignore
end
end
def sandboxed(&block)
begin
@orig_config = RSpec.configuration
@orig_world = RSpec.world
new_config = RSpec::Core::Configuration.new
new_config.include(RSpec::Matchers)
new_world = RSpec::Core::World.new(new_config)
RSpec.instance_variable_set(:@configuration, new_config)
RSpec.instance_variable_set(:@world, new_world)
object = Object.new
object.extend(RSpec::Core::ObjectExtensions)
object.extend(RSpec::Core::SharedExampleGroup)
(class << RSpec::Core::ExampleGroup; self; end).class_eval do
alias_method :orig_run, :run
def run(reporter=nil)
@orig_mock_space = RSpec::Mocks::space
RSpec::Mocks::space = RSpec::Mocks::Space.new
orig_run(reporter || NullObject.new)
ensure
RSpec::Mocks::space = @orig_mock_space
end
end
object.instance_eval(&block)
ensure
(class << RSpec::Core::ExampleGroup; self; end).class_eval do
remove_method :run
alias_method :run, :orig_run
remove_method :orig_run
end
RSpec.instance_variable_set(:@configuration, @orig_config)
RSpec.instance_variable_set(:@world, @orig_world)
end
end
def in_editor?
ENV.has_key?('TM_MODE') || ENV.has_key?('EMACS') || ENV.has_key?('VIM')
end
RSpec.configure do |c|
c.color_enabled = !in_editor?
c.filter_run :focus => true
c.run_all_when_everything_filtered = true
c.filter_run_excluding :ruby => lambda {|version|
case version.to_s
when "!jruby"
RUBY_ENGINE != "jruby"
when /^> (.*)/
!(RUBY_VERSION.to_s > $1)
else
!(RUBY_VERSION.to_s =~ /^#{version.to_s}/)
end
}
c.around do |example|
sandboxed { example.run }
end
end
no need to include matchers in spec helper
require 'rspec/core'
require 'autotest/rspec2'
Dir['./spec/support/**/*.rb'].map {|f| require f}
class NullObject
private
def method_missing(method, *args, &block)
# ignore
end
end
def sandboxed(&block)
begin
@orig_config = RSpec.configuration
@orig_world = RSpec.world
new_config = RSpec::Core::Configuration.new
new_world = RSpec::Core::World.new(new_config)
RSpec.instance_variable_set(:@configuration, new_config)
RSpec.instance_variable_set(:@world, new_world)
object = Object.new
object.extend(RSpec::Core::ObjectExtensions)
object.extend(RSpec::Core::SharedExampleGroup)
(class << RSpec::Core::ExampleGroup; self; end).class_eval do
alias_method :orig_run, :run
def run(reporter=nil)
@orig_mock_space = RSpec::Mocks::space
RSpec::Mocks::space = RSpec::Mocks::Space.new
orig_run(reporter || NullObject.new)
ensure
RSpec::Mocks::space = @orig_mock_space
end
end
object.instance_eval(&block)
ensure
(class << RSpec::Core::ExampleGroup; self; end).class_eval do
remove_method :run
alias_method :run, :orig_run
remove_method :orig_run
end
RSpec.instance_variable_set(:@configuration, @orig_config)
RSpec.instance_variable_set(:@world, @orig_world)
end
end
def in_editor?
ENV.has_key?('TM_MODE') || ENV.has_key?('EMACS') || ENV.has_key?('VIM')
end
RSpec.configure do |c|
c.color_enabled = !in_editor?
c.filter_run :focus => true
c.run_all_when_everything_filtered = true
c.filter_run_excluding :ruby => lambda {|version|
case version.to_s
when "!jruby"
RUBY_ENGINE != "jruby"
when /^> (.*)/
!(RUBY_VERSION.to_s > $1)
else
!(RUBY_VERSION.to_s =~ /^#{version.to_s}/)
end
}
c.around do |example|
sandboxed { example.run }
end
end
|
$LOAD_PATH.unshift(File.dirname(__FILE__))
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
require 'rubygems' rescue nil
require 'rhubarb/rhubarb'
require 'mrg/grid/config'
require 'enumerator'
require 'spec'
require 'spec/autorun'
Spec::Runner.configure do |config|
end
include Mrg::Grid::Config
def setup_rhubarb(kwargs=nil)
kwargs ||= {}
dbname = kwargs[:dbname] || ":memory:"
classes = kwargs[:classes] || (MAIN_DB_TABLES + SNAP_DB_TABLES)
Rhubarb::Persistence::open(dbname)
classes.each {|cl| cl.create_table}
end
def teardown_rhubarb
Rhubarb::Persistence::close
end
module DescribeGetterAndSetter
def describe_getter_and_setter(setmsg, getmsg, values, key=nil)
key ||= @gskey
param = @store.send(@add_msg, key)
values.each do |val|
param = @store.send(@find_msg, key)
param.send(setmsg, val)
param = @store.send(@find_msg, key)
param.send(getmsg).should == val
end
end
end
Disabled prepared statements in test suite; they don't currently play well with temporary tables.
$LOAD_PATH.unshift(File.dirname(__FILE__))
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
require 'rubygems' rescue nil
require 'rhubarb/rhubarb'
require 'mrg/grid/config'
require 'enumerator'
require 'spec'
require 'spec/autorun'
Spec::Runner.configure do |config|
end
include Mrg::Grid::Config
def setup_rhubarb(kwargs=nil)
kwargs ||= {}
dbname = kwargs[:dbname] || ":memory:"
classes = kwargs[:classes] || (MAIN_DB_TABLES + SNAP_DB_TABLES)
Rhubarb::Persistence::open(dbname, :default, false)
classes.each {|cl| cl.create_table}
end
def teardown_rhubarb
Rhubarb::Persistence::close
end
module DescribeGetterAndSetter
def describe_getter_and_setter(setmsg, getmsg, values, key=nil)
key ||= @gskey
param = @store.send(@add_msg, key)
values.each do |val|
param = @store.send(@find_msg, key)
param.send(setmsg, val)
param = @store.send(@find_msg, key)
param.send(getmsg).should == val
end
end
end
|
# encoding: utf-8
$:.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$:.unshift(File.dirname(__FILE__))
require 'rspec'
require 'rails/all'
require 'js-routes'
require "active_support/core_ext/hash/slice"
require 'coffee-script'
if defined?(JRUBY_VERSION)
require 'rhino'
def jscontext
@context ||= Rhino::Context.new
end
def js_error_class
Rhino::JSError
end
else
require "v8"
def jscontext
@context ||= V8::Context.new
end
def js_error_class
V8::JSError
end
end
def evaljs(string)
jscontext.eval(string)
end
def routes
App.routes.url_helpers
end
def blog_routes
BlogEngine::Engine.routes.url_helpers
end
module BlogEngine
class Engine < Rails::Engine
isolate_namespace BlogEngine
end
end
class App < Rails::Application
# Enable the asset pipeline
config.assets.enabled = true
# initialize_on_precompile
config.assets.initialize_on_precompile = true
end
def draw_routes
BlogEngine::Engine.routes.draw do
resources :posts
end
App.routes.draw do
resources :inboxes do
resources :messages do
resources :attachments
end
end
root :to => "inboxes#index"
namespace :admin do
resources :users
end
scope "/returns/:return" do
resources :objects
end
resources :returns
scope "(/optional/:optional_id)" do
resources :things
end
get "/other_optional/(:optional_id)" => "foo#foo", :as => :foo
get 'books/*section/:title' => 'books#show', :as => :book
get 'books/:title/*section' => 'books#show', :as => :book_title
mount BlogEngine::Engine => "/blog", :as => :blog_app
get '/no_format' => "foo#foo", :format => false, :as => :no_format
get '/json_only' => "foo#foo", :format => true, :constraints => {:format => /json/}, :as => :json_only
get '/привет' => "foo#foo", :as => :hello
end
end
# prevent warning
Rails.configuration.active_support.deprecation = :log
# Requires supporting files with custom matchers and macros, etc,
# in ./support/ and its subdirectories.
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each {|f| require f}
RSpec.configure do |config|
config.before(:each) do
evaljs("var window = this;")
jscontext[:log] = lambda {|context, value| puts value.inspect}
end
config.before(:all) do
# compile all js files begin
Dir["#{File.expand_path(File.join(File.dirname(__FILE__), "..", "lib"))}/**/*.coffee"].each do |coffee|
File.open(coffee.gsub(/\.coffee$/, ""), 'w') {|f| f.write(CoffeeScript.compile(File.read(coffee))) }
end
# compile all js files end
draw_routes
end
end
DRY solution
# encoding: utf-8
$:.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$:.unshift(File.dirname(__FILE__))
require 'rspec'
require 'rails/all'
require 'js-routes'
require "active_support/core_ext/hash/slice"
require 'coffee-script'
if defined?(JRUBY_VERSION)
require 'rhino'
JS_LIB_CLASS = Rhino
else
require "v8"
JS_LIB_CLASS = V8
end
def jscontext
@context ||= JS_LIB_CLASS::Context.new
end
def js_error_class
JS_LIB_CLASS::JSError
end
def evaljs(string)
jscontext.eval(string)
end
def routes
App.routes.url_helpers
end
def blog_routes
BlogEngine::Engine.routes.url_helpers
end
module BlogEngine
class Engine < Rails::Engine
isolate_namespace BlogEngine
end
end
class App < Rails::Application
# Enable the asset pipeline
config.assets.enabled = true
# initialize_on_precompile
config.assets.initialize_on_precompile = true
end
def draw_routes
BlogEngine::Engine.routes.draw do
resources :posts
end
App.routes.draw do
resources :inboxes do
resources :messages do
resources :attachments
end
end
root :to => "inboxes#index"
namespace :admin do
resources :users
end
scope "/returns/:return" do
resources :objects
end
resources :returns
scope "(/optional/:optional_id)" do
resources :things
end
get "/other_optional/(:optional_id)" => "foo#foo", :as => :foo
get 'books/*section/:title' => 'books#show', :as => :book
get 'books/:title/*section' => 'books#show', :as => :book_title
mount BlogEngine::Engine => "/blog", :as => :blog_app
get '/no_format' => "foo#foo", :format => false, :as => :no_format
get '/json_only' => "foo#foo", :format => true, :constraints => {:format => /json/}, :as => :json_only
get '/привет' => "foo#foo", :as => :hello
end
end
# prevent warning
Rails.configuration.active_support.deprecation = :log
# Requires supporting files with custom matchers and macros, etc,
# in ./support/ and its subdirectories.
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each {|f| require f}
RSpec.configure do |config|
config.before(:each) do
evaljs("var window = this;")
jscontext[:log] = lambda {|context, value| puts value.inspect}
end
config.before(:all) do
# compile all js files begin
Dir["#{File.expand_path(File.join(File.dirname(__FILE__), "..", "lib"))}/**/*.coffee"].each do |coffee|
File.open(coffee.gsub(/\.coffee$/, ""), 'w') {|f| f.write(CoffeeScript.compile(File.read(coffee))) }
end
# compile all js files end
draw_routes
end
end
|
# Gems from the gemspec.
require 'webmock'
require 'webmock/rspec'
require 'rspec/wait'
require 'rack'
require 'rack/test'
require 'rake'
require 'airbrake'
require 'airbrake/rake/tasks'
Airbrake.configure do |c|
c.project_id = 113743
c.project_key = 'fd04e13d806a90f96614ad8e529b2822'
c.logger = Logger.new('/dev/null')
c.app_version = '1.2.3'
c.workers = 5
end
RSpec.configure do |c|
c.order = 'random'
c.color = true
c.disable_monkey_patching!
c.wait_timeout = 3
c.include Rack::Test::Methods
end
# Load integration tests only when they're run through appraisals.
if ENV['APPRAISAL_INITIALIZED']
# Gems from appraisals that every application uses.
require 'warden'
# Load a Rails app or skip.
begin
ENV['RAILS_ENV'] = 'test'
if RUBY_ENGINE == 'jruby'
require 'activerecord-jdbcsqlite3-adapter'
else
require 'sqlite3'
end
require 'rails'
rails_vsn = Gem::Version.new(Rails.version)
ENV['DATABASE_URL'] = if rails_vsn <= Gem::Version.new('4.2')
'sqlite3:///:memory:'
else
'sqlite3::memory:'
end
require 'action_controller'
require 'action_view'
require 'action_view/testing/resolvers'
require 'active_record/railtie'
if rails_vsn >= Gem::Version.new('4.2')
require 'active_job'
# Silence logger.
ActiveJob::Base.logger.level = 99
end
require 'resque'
require 'resque_spec'
require 'airbrake/resque/failure'
Resque::Failure.backend = Resque::Failure::Airbrake
require 'delayed_job'
require 'delayed_job_active_record'
require 'airbrake/delayed_job/plugin'
Delayed::Worker.delay_jobs = false
require 'airbrake/rails/railtie'
load 'apps/rails/dummy_task.rake'
require 'apps/rails/dummy_app'
rescue LoadError
puts '** Skipped Rails specs'
end
# Load a Rack app or skip.
begin
# Don't load the Rack app since we want to test Sinatra if it's loaded.
raise LoadError if defined?(Sinatra)
require 'apps/rack/dummy_app'
rescue LoadError
puts '** Skipped Rack specs'
end
end
# Make sure tests that use async requests fail.
Thread.abort_on_exception = true
AirbrakeTestError = Class.new(StandardError)
# Print header with versions information. This simplifies debugging of build
# failures on CircleCI.
versions = <<EOS
#{'#' * 80}
# RUBY_VERSION: #{RUBY_VERSION}
# RUBY_ENGINE: #{RUBY_ENGINE}
EOS
versions << "# JRUBY_VERSION #{JRUBY_VERSION}\n" if defined?(JRUBY_VERSION)
versions << "# Rails version: #{Rails.version}\n" if defined?(Rails)
versions << "# Rack release: #{Rack.release}\n"
versions << '#' * 80
puts versions
spec: require mistakenly forgotten pry
# Gems from the gemspec.
require 'webmock'
require 'webmock/rspec'
require 'rspec/wait'
require 'rack'
require 'rack/test'
require 'rake'
require 'pry'
require 'airbrake'
require 'airbrake/rake/tasks'
Airbrake.configure do |c|
c.project_id = 113743
c.project_key = 'fd04e13d806a90f96614ad8e529b2822'
c.logger = Logger.new('/dev/null')
c.app_version = '1.2.3'
c.workers = 5
end
RSpec.configure do |c|
c.order = 'random'
c.color = true
c.disable_monkey_patching!
c.wait_timeout = 3
c.include Rack::Test::Methods
end
# Load integration tests only when they're run through appraisals.
if ENV['APPRAISAL_INITIALIZED']
# Gems from appraisals that every application uses.
require 'warden'
# Load a Rails app or skip.
begin
ENV['RAILS_ENV'] = 'test'
if RUBY_ENGINE == 'jruby'
require 'activerecord-jdbcsqlite3-adapter'
else
require 'sqlite3'
end
require 'rails'
rails_vsn = Gem::Version.new(Rails.version)
ENV['DATABASE_URL'] = if rails_vsn <= Gem::Version.new('4.2')
'sqlite3:///:memory:'
else
'sqlite3::memory:'
end
require 'action_controller'
require 'action_view'
require 'action_view/testing/resolvers'
require 'active_record/railtie'
if rails_vsn >= Gem::Version.new('4.2')
require 'active_job'
# Silence logger.
ActiveJob::Base.logger.level = 99
end
require 'resque'
require 'resque_spec'
require 'airbrake/resque/failure'
Resque::Failure.backend = Resque::Failure::Airbrake
require 'delayed_job'
require 'delayed_job_active_record'
require 'airbrake/delayed_job/plugin'
Delayed::Worker.delay_jobs = false
require 'airbrake/rails/railtie'
load 'apps/rails/dummy_task.rake'
require 'apps/rails/dummy_app'
rescue LoadError
puts '** Skipped Rails specs'
end
# Load a Rack app or skip.
begin
# Don't load the Rack app since we want to test Sinatra if it's loaded.
raise LoadError if defined?(Sinatra)
require 'apps/rack/dummy_app'
rescue LoadError
puts '** Skipped Rack specs'
end
end
# Make sure tests that use async requests fail.
Thread.abort_on_exception = true
AirbrakeTestError = Class.new(StandardError)
# Print header with versions information. This simplifies debugging of build
# failures on CircleCI.
versions = <<EOS
#{'#' * 80}
# RUBY_VERSION: #{RUBY_VERSION}
# RUBY_ENGINE: #{RUBY_ENGINE}
EOS
versions << "# JRUBY_VERSION #{JRUBY_VERSION}\n" if defined?(JRUBY_VERSION)
versions << "# Rails version: #{Rails.version}\n" if defined?(Rails)
versions << "# Rack release: #{Rack.release}\n"
versions << '#' * 80
puts versions
|
require 'rspec'
require_relative '../lib/messagemedia-soap'
Fix require path in spec_helper
require 'rspec'
require_relative '../lib/messagemedia-ruby'
|
$:.unshift File.expand_path(File.join(__dir__, '..', 'lib'))
require 'fluent/test'
require 'fluent/plugin/out_jubatus'
require 'fluent/plugin/jubatus'
add coverage
require 'simplecov'
SimpleCov.start do
add_filter 'spec'
end
$:.unshift File.expand_path(File.join(__dir__, '..', 'lib'))
require 'fluent/test'
require 'fluent/plugin/out_jubatus'
require 'fluent/plugin/jubatus'
|
require 'rubygems'
require 'bundler/setup'
require 'openfire_admin'
require 'fakeweb'
module FakeWebHelper
def path_of(url)
"http://localhost:9090#{url}"
end
def fixture(path)
File.join(File.dirname(__FILE__),"fixtures",path)
end
def read_fixture(path)
open(fixture(path)).read
end
def redirect_to(path)
{
:status=>["302"],
:location=>path
}
end
def register_uri(method,url,option)
url = path_of(url) if url =~ %r[^/]
if option.is_a?(String)
if option =~ %r[^/]
option = { :body => read_fixture(option) }
else
option = { :body => option }
end
end
FakeWeb.register_uri(method,url,option)
end
def get_last_form
Hash[URI.decode_www_form(FakeWeb.last_request.body)]
end
def last_post_is form_values
FakeWeb.last_request.should be_a_kind_of(Net::HTTP::Post)
begin
ex = get_last_form
ex.should include(form_values)
rescue RSpec::Expectations::ExpectationNotMetError => e
ret = [e.message]
form_values.keys.each{|k|
unless form_values[k]==ex[k]
if ex[k].nil?
ret << "#{k}: '#{form_values[k]}' != nil"
else
ret << "#{k}: '#{form_values[k]}' != '#{ex[k]}'"
end
end
}
raise RSpec::Expectations::ExpectationNotMetError.new( "#{ret.join("\n")}")
end
end
def access
FakeWeb.clean_registry
FakeWeb.last_request = nil
FakeWeb.allow_net_connect = false
ret = yield
FakeWeb.clean_registry
ret
end
def expect_get(path,option)
access{
register_uri(:get,path,option)
ret = yield
FakeWeb.last_request.should be_a_kind_of(Net::HTTP::Get)
ret
}
end
def expect_post(path,form_values,option)
access{
register_uri(:post,path,option)
ret = yield
last_post_is form_values
ret
}
end
def expect_requests(expects)
access{
expects.each{|ex| register_uri(*ex) }
yield
}
end
end
RSpec.configure do |config|
config.include FakeWebHelper
# some (optional) config here
end
add coveralls
require 'rubygems'
require 'bundler/setup'
require 'openfire_admin'
require 'fakeweb'
require 'coveralls'
Coveralls.wear!
module FakeWebHelper
def path_of(url)
"http://localhost:9090#{url}"
end
def fixture(path)
File.join(File.dirname(__FILE__),"fixtures",path)
end
def read_fixture(path)
open(fixture(path)).read
end
def redirect_to(path)
{
:status=>["302"],
:location=>path
}
end
def register_uri(method,url,option)
url = path_of(url) if url =~ %r[^/]
if option.is_a?(String)
if option =~ %r[^/]
option = { :body => read_fixture(option) }
else
option = { :body => option }
end
end
FakeWeb.register_uri(method,url,option)
end
def get_last_form
Hash[URI.decode_www_form(FakeWeb.last_request.body)]
end
def last_post_is form_values
FakeWeb.last_request.should be_a_kind_of(Net::HTTP::Post)
begin
ex = get_last_form
ex.should include(form_values)
rescue RSpec::Expectations::ExpectationNotMetError => e
ret = [e.message]
form_values.keys.each{|k|
unless form_values[k]==ex[k]
if ex[k].nil?
ret << "#{k}: '#{form_values[k]}' != nil"
else
ret << "#{k}: '#{form_values[k]}' != '#{ex[k]}'"
end
end
}
raise RSpec::Expectations::ExpectationNotMetError.new( "#{ret.join("\n")}")
end
end
def access
FakeWeb.clean_registry
FakeWeb.last_request = nil
FakeWeb.allow_net_connect = false
ret = yield
FakeWeb.clean_registry
ret
end
def expect_get(path,option)
access{
register_uri(:get,path,option)
ret = yield
FakeWeb.last_request.should be_a_kind_of(Net::HTTP::Get)
ret
}
end
def expect_post(path,form_values,option)
access{
register_uri(:post,path,option)
ret = yield
last_post_is form_values
ret
}
end
def expect_requests(expects)
access{
expects.each{|ex| register_uri(*ex) }
yield
}
end
end
RSpec.configure do |config|
config.include FakeWebHelper
end
|
require 'rspec'
require 'spidr/version'
include Spidr
Reset Webmock after each spec.
require 'rspec'
require 'webmock/rspec'
require 'spidr/version'
include Spidr
RSpec.configure do |config|
config.after { WebMock.reset! }
end
|
if RUBY_VERSION > '1.9' && RUBY_VERSION < '2.2'
require 'simplecov'
require 'coveralls'
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start do
# report groups
add_group 'Wire Protocol', 'lib/mongo/protocol'
# filters
add_filter 'tasks'
add_filter 'spec'
add_filter 'bin'
end
end
require 'mongo'
require 'support/helpers'
require 'support/matchers'
require 'support/monitoring'
require 'support/cluster_simulator'
Mongo::Logger.logger = Logger.new($stdout, Logger::DEBUG)
# Mongo::Logger.logger = Logger.new(StringIO.new, Logger::DEBUG)
RSpec.configure do |config|
config.color = true
config.fail_fast = true unless ENV['CI'] || ENV['JENKINS_HOME']
config.formatter = 'documentation'
config.include Helpers
config.include ClusterSimulator::Helpers
ClusterSimulator.configure(config)
config.after do
Mongo::Server::Monitor.threads.each do |object_id, thread|
thread.kill
end
end
config.before(:suite) do
`mongo ruby-driver ./spec/support/users_24.js`
`mongo ruby-driver ./spec/support/users_26.js`
end
end
TEST_DB = 'ruby-driver'
TEST_COLL = 'test'
TEST_SET = 'ruby-driver-rs'
COVERAGE_MIN = 90
# require all shared examples
Dir['./spec/support/shared/*.rb'].sort.each { |file| require file }
Print out user additions for debug
if RUBY_VERSION > '1.9' && RUBY_VERSION < '2.2'
require 'simplecov'
require 'coveralls'
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start do
# report groups
add_group 'Wire Protocol', 'lib/mongo/protocol'
# filters
add_filter 'tasks'
add_filter 'spec'
add_filter 'bin'
end
end
require 'mongo'
require 'support/helpers'
require 'support/matchers'
require 'support/monitoring'
require 'support/cluster_simulator'
Mongo::Logger.logger = Logger.new($stdout, Logger::DEBUG)
# Mongo::Logger.logger = Logger.new(StringIO.new, Logger::DEBUG)
RSpec.configure do |config|
config.color = true
config.fail_fast = true unless ENV['CI'] || ENV['JENKINS_HOME']
config.formatter = 'documentation'
config.include Helpers
config.include ClusterSimulator::Helpers
ClusterSimulator.configure(config)
config.after do
Mongo::Server::Monitor.threads.each do |object_id, thread|
thread.kill
end
end
config.before(:suite) do
p `mongo ruby-driver ./spec/support/users_24.js`
p `mongo ruby-driver ./spec/support/users_26.js`
end
end
TEST_DB = 'ruby-driver'
TEST_COLL = 'test'
TEST_SET = 'ruby-driver-rs'
COVERAGE_MIN = 90
# require all shared examples
Dir['./spec/support/shared/*.rb'].sort.each { |file| require file }
|
require 'rubygems'
require 'simplecov'
SimpleCov.start do
add_filter "/spec"
add_filter "/features"
# internet_connection mostly contains logic copied from the ruby 1.8.7
# stdlib for which I haven't written tests.
add_filter "internet_connection"
end
SimpleCov.at_exit do
File.open(File.join(SimpleCov.coverage_path, 'coverage_percent.txt'), 'w') do |f|
f.write SimpleCov.result.covered_percent
end
SimpleCov.result.format!
end
using_git = File.exist?(File.expand_path('../../.git/', __FILE__))
if using_git
require 'bundler'
Bundler.setup
end
require 'rspec'
Dir['./spec/support/**/*.rb'].each { |f| require f }
require 'vcr'
require 'monkey_patches'
module VCR
SPEC_ROOT = File.dirname(__FILE__)
def reset!(hook = :fakeweb)
instance_variables.each do |ivar|
instance_variable_set(ivar, nil)
end
configuration.hook_into hook if hook
end
end
RSpec.configure do |config|
config.order = :rand
config.color_enabled = true
config.debug = (using_git && RUBY_INTERPRETER == :mri && !%w[ 1.9.3 ].include?(RUBY_VERSION) && !ENV['CI'])
config.treat_symbols_as_metadata_keys_with_true_values = true
tmp_dir = File.expand_path('../../tmp/cassette_library_dir', __FILE__)
config.before(:each) do
unless example.metadata[:skip_vcr_reset]
VCR.reset!
VCR.configuration.cassette_library_dir = tmp_dir
end
end
config.after(:each) do
FileUtils.rm_rf tmp_dir
end
config.before(:all, :disable_warnings => true) do
@orig_std_err = $stderr
$stderr = StringIO.new
end
config.after(:all, :disable_warnings => true) do
$stderr = @orig_std_err
end
config.filter_run :focus => true
config.run_all_when_everything_filtered = true
config.alias_it_should_behave_like_to :it_performs, 'it performs'
end
VCR::SinatraApp.boot
Only uses simplecov on MRI 1.9.
Rubinius in 1.9 mode has problems with it:
https://github.com/rubinius/rubinius/issues/1575
require 'rubygems'
if RUBY_VERSION =~ /1.9/ && RUBY_ENGINE == 'ruby'
require 'simplecov'
SimpleCov.start do
add_filter "/spec"
add_filter "/features"
# internet_connection mostly contains logic copied from the ruby 1.8.7
# stdlib for which I haven't written tests.
add_filter "internet_connection"
end
SimpleCov.at_exit do
File.open(File.join(SimpleCov.coverage_path, 'coverage_percent.txt'), 'w') do |f|
f.write SimpleCov.result.covered_percent
end
SimpleCov.result.format!
end
end
using_git = File.exist?(File.expand_path('../../.git/', __FILE__))
if using_git
require 'bundler'
Bundler.setup
end
require 'rspec'
Dir['./spec/support/**/*.rb'].each { |f| require f }
require 'vcr'
require 'monkey_patches'
module VCR
SPEC_ROOT = File.dirname(__FILE__)
def reset!(hook = :fakeweb)
instance_variables.each do |ivar|
instance_variable_set(ivar, nil)
end
configuration.hook_into hook if hook
end
end
RSpec.configure do |config|
config.order = :rand
config.color_enabled = true
config.debug = (using_git && RUBY_INTERPRETER == :mri && !%w[ 1.9.3 ].include?(RUBY_VERSION) && !ENV['CI'])
config.treat_symbols_as_metadata_keys_with_true_values = true
tmp_dir = File.expand_path('../../tmp/cassette_library_dir', __FILE__)
config.before(:each) do
unless example.metadata[:skip_vcr_reset]
VCR.reset!
VCR.configuration.cassette_library_dir = tmp_dir
end
end
config.after(:each) do
FileUtils.rm_rf tmp_dir
end
config.before(:all, :disable_warnings => true) do
@orig_std_err = $stderr
$stderr = StringIO.new
end
config.after(:all, :disable_warnings => true) do
$stderr = @orig_std_err
end
config.filter_run :focus => true
config.run_all_when_everything_filtered = true
config.alias_it_should_behave_like_to :it_performs, 'it performs'
end
VCR::SinatraApp.boot
|
if RUBY_VERSION > '1.9'
require 'simplecov'
require 'coveralls'
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start do
add_filter "/spec/"
end
end
require 'rubygems'
require 'bundler/setup'
require 'health_inspector'
module HealthInspector::SpecHelpers
def repo_path
@repo_path ||= File.expand_path("../chef-repo", __FILE__)
end
def health_inspector_context
@health_inspector_context ||= double(
:repo_path => repo_path,
:cookbook_path => ["#{repo_path}/cookbooks"]
)
end
end
RSpec.configure do |c|
c.include HealthInspector::SpecHelpers
end
shared_examples "a chef model" do
let(:pairing) { described_class.new(health_inspector_context, :name => "dummy") }
it "should detect if an item does not exist locally" do
pairing.server = {}
pairing.local = nil
pairing.validate
pairing.errors.should_not be_empty
pairing.errors.first.should == "exists on server but not locally"
end
it "should detect if an item does not exist on server" do
pairing.server = nil
pairing.local = {}
pairing.validate
pairing.errors.should_not be_empty
pairing.errors.first.should == "exists locally but not on server"
end
it "should detect if an item does not exist locally or on server" do
pairing.server = nil
pairing.local = nil
pairing.validate
pairing.errors.to_a.should == ["does not exist locally or on server"]
end
end
shared_examples "a chef model that can be represented in json" do
let(:pairing) { described_class.new(health_inspector_context, :name => "dummy") }
it "should detect if an item is different" do
pairing.server = {"foo" => "bar"}
pairing.local = {"foo" => "baz"}
pairing.validate
pairing.errors.should_not be_empty
pairing.errors.first.should == {"foo"=>{"server"=>"bar", "local"=>"baz"}}
end
it "should detect if a nested hash is different" do
pairing.server = {"foo" => {"bar" => {"fizz" => "buzz"}}}
pairing.local = {"foo" => {"baz" => {"fizz" => "buzz"}}}
pairing.validate
pairing.errors.should_not be_empty
expected_errors = {
"foo" => {
"bar" => { "server" => {"fizz" => "buzz"}, "local" => nil },
"baz" => { "server" => nil, "local" => {"fizz" => "buzz"} }
}
}
pairing.errors.first.should == expected_errors
end
it "should detect if an item is the same" do
pairing.server = {"foo" => "bar"}
pairing.local = {"foo" => "bar"}
pairing.validate
pairing.errors.should be_empty
end
it "should detect if an string and symbol keys convert to the same values" do
pairing.server = {"foo" => "bar"}
pairing.local = {:foo => "bar"}
pairing.validate
pairing.errors.should be_empty
end
it "should detect if matching hashes are the same" do
pairing.server = {"foo" => {"bar" => "fizz"}}
pairing.local = {"foo" => {"bar" => "fizz"}}
pairing.validate
pairing.errors.should be_empty
end
it "should detect if matching hashes with mismatched symbols and keys are the same" do
pairing.server = {"foo" => {"bar" => "fizz"}}
pairing.local = {:foo => {:bar => "fizz"}}
pairing.validate
pairing.errors.should be_empty
end
it "should detect if matching arrays are the same" do
pairing.server = {"foo" => ["bar", "fizz"]}
pairing.local = {"foo" => ["bar", "fizz"]}
pairing.validate
pairing.errors.should be_empty
end
it "should detect if matching arrays with hashes are the same" do
pairing.server = {"foo" => ["bar", {"fizz" => "buzz"}]}
pairing.local = {"foo" => ["bar", {"fizz" => "buzz"}]}
pairing.validate
pairing.errors.should be_empty
end
it "should detect if matching arrays with hashes containing symbols/strings are the same" do
pairing.server = {"foo" => ["bar", {"fizz" => "buzz"}]}
pairing.local = {"foo" => ["bar", {:fizz => "buzz"}]}
pairing.validate
pairing.errors.should be_empty
end
end
shared_examples 'a knife inspect runner' do
describe '#run' do
context 'when passing an item as an argument' do
let :inspect_runner do
described_class.new ['some_item']
end
let :validator do
double
end
let :item do
double
end
it 'inspects this item' do
expect(checklist).to receive(:new).
with(inspect_runner).
and_return validator
expect(validator).to receive(:load_item).
with('some_item').and_return item
expect(validator).to receive(:validate_item).
with(item).and_return true
expect(inspect_runner).to receive(:exit).with true
inspect_runner.run
end
end
context 'when not passing arguments' do
let :inspect_runner do
described_class.new
end
it 'inspects all the items' do
expect(checklist).to receive(:run).
with(inspect_runner).
and_return true
expect(inspect_runner).to receive(:exit).with true
inspect_runner.run
end
end
end
end
Ignore vendor directory in simplecov
if RUBY_VERSION > '1.9'
require 'simplecov'
require 'coveralls'
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start do
add_filter '/spec/'
add_filter '/vendor/'
end
end
require 'rubygems'
require 'bundler/setup'
require 'health_inspector'
module HealthInspector::SpecHelpers
def repo_path
@repo_path ||= File.expand_path("../chef-repo", __FILE__)
end
def health_inspector_context
@health_inspector_context ||= double(
:repo_path => repo_path,
:cookbook_path => ["#{repo_path}/cookbooks"]
)
end
end
RSpec.configure do |c|
c.include HealthInspector::SpecHelpers
end
shared_examples "a chef model" do
let(:pairing) { described_class.new(health_inspector_context, :name => "dummy") }
it "should detect if an item does not exist locally" do
pairing.server = {}
pairing.local = nil
pairing.validate
pairing.errors.should_not be_empty
pairing.errors.first.should == "exists on server but not locally"
end
it "should detect if an item does not exist on server" do
pairing.server = nil
pairing.local = {}
pairing.validate
pairing.errors.should_not be_empty
pairing.errors.first.should == "exists locally but not on server"
end
it "should detect if an item does not exist locally or on server" do
pairing.server = nil
pairing.local = nil
pairing.validate
pairing.errors.to_a.should == ["does not exist locally or on server"]
end
end
shared_examples "a chef model that can be represented in json" do
let(:pairing) { described_class.new(health_inspector_context, :name => "dummy") }
it "should detect if an item is different" do
pairing.server = {"foo" => "bar"}
pairing.local = {"foo" => "baz"}
pairing.validate
pairing.errors.should_not be_empty
pairing.errors.first.should == {"foo"=>{"server"=>"bar", "local"=>"baz"}}
end
it "should detect if a nested hash is different" do
pairing.server = {"foo" => {"bar" => {"fizz" => "buzz"}}}
pairing.local = {"foo" => {"baz" => {"fizz" => "buzz"}}}
pairing.validate
pairing.errors.should_not be_empty
expected_errors = {
"foo" => {
"bar" => { "server" => {"fizz" => "buzz"}, "local" => nil },
"baz" => { "server" => nil, "local" => {"fizz" => "buzz"} }
}
}
pairing.errors.first.should == expected_errors
end
it "should detect if an item is the same" do
pairing.server = {"foo" => "bar"}
pairing.local = {"foo" => "bar"}
pairing.validate
pairing.errors.should be_empty
end
it "should detect if an string and symbol keys convert to the same values" do
pairing.server = {"foo" => "bar"}
pairing.local = {:foo => "bar"}
pairing.validate
pairing.errors.should be_empty
end
it "should detect if matching hashes are the same" do
pairing.server = {"foo" => {"bar" => "fizz"}}
pairing.local = {"foo" => {"bar" => "fizz"}}
pairing.validate
pairing.errors.should be_empty
end
it "should detect if matching hashes with mismatched symbols and keys are the same" do
pairing.server = {"foo" => {"bar" => "fizz"}}
pairing.local = {:foo => {:bar => "fizz"}}
pairing.validate
pairing.errors.should be_empty
end
it "should detect if matching arrays are the same" do
pairing.server = {"foo" => ["bar", "fizz"]}
pairing.local = {"foo" => ["bar", "fizz"]}
pairing.validate
pairing.errors.should be_empty
end
it "should detect if matching arrays with hashes are the same" do
pairing.server = {"foo" => ["bar", {"fizz" => "buzz"}]}
pairing.local = {"foo" => ["bar", {"fizz" => "buzz"}]}
pairing.validate
pairing.errors.should be_empty
end
it "should detect if matching arrays with hashes containing symbols/strings are the same" do
pairing.server = {"foo" => ["bar", {"fizz" => "buzz"}]}
pairing.local = {"foo" => ["bar", {:fizz => "buzz"}]}
pairing.validate
pairing.errors.should be_empty
end
end
shared_examples 'a knife inspect runner' do
describe '#run' do
context 'when passing an item as an argument' do
let :inspect_runner do
described_class.new ['some_item']
end
let :validator do
double
end
let :item do
double
end
it 'inspects this item' do
expect(checklist).to receive(:new).
with(inspect_runner).
and_return validator
expect(validator).to receive(:load_item).
with('some_item').and_return item
expect(validator).to receive(:validate_item).
with(item).and_return true
expect(inspect_runner).to receive(:exit).with true
inspect_runner.run
end
end
context 'when not passing arguments' do
let :inspect_runner do
described_class.new
end
it 'inspects all the items' do
expect(checklist).to receive(:run).
with(inspect_runner).
and_return true
expect(inspect_runner).to receive(:exit).with true
inspect_runner.run
end
end
end
end
|
# frozen_string_literal: true
#
require 'simplecov'
SimpleCov.start 'rails'
SimpleCov.start do
add_filter '/config/'
add_filter '/spec/'
refuse_coverage_drop if ENV['CI']
end
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
abort("The Rails environment is running in production mode!") if Rails.env.production?
require 'rspec/rails'
require 'devise'
require 'factory_bot_rails'
ActiveRecord::Migration.maintain_test_schema!
Capybara.default_driver = :selenium
RSpec.configure do |config|
config.order = :random
config.include FactoryBot::Syntax::Methods
config.include Devise::Test::ControllerHelpers, type: :controller
config.include Devise::Test::IntegrationHelpers, type: :system
config.before :all do
FactoryBot.reload
end
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
config.use_transactional_fixtures = true
config.infer_spec_type_from_file_location!
config.filter_rails_from_backtrace!
end
def fill_in_base_incident_fields
select Time.now.strftime('%Y'), from: 'Year'
select Time.now.strftime('%B'), from: 'Month'
select Time.now.strftime('%-e'), from: 'Day'
select Time.now.strftime('%I %p'), from: 'Hour'
select Time.now.strftime('%m'), from: 'Minute'
fill_in 'Bus #', with: '1803'
fill_in 'Location', with: 'Mill and Locust'
fill_in 'ZIP', with: '01108'
select 'Springfield', from: 'Town'
select 'North', from: 'Direction'
fill_in 'Describe the incident in detail.',
with: 'Lorem ipsum dolor sit amet.'
end
def incident_in_divisions(divisions, *traits)
attributes = if traits.last.is_a? Hash
traits.pop
else Hash.new
end
driver = create :user, :driver, divisions: divisions
report = create :incident_report, user: driver
create :incident, *traits, attributes.merge(driver_incident_report: report)
end
# source: https://robots.thoughtbot.com/automatically-wait-for-ajax-with-capybara
def wait_for_ajax!
Timeout.timeout Capybara.default_max_wait_time do
loop until page.evaluate_script('jQuery.active').zero?
end
end
def wait_for_animation!
sleep 0.5
end
def when_current_user_is(user)
current_user = case user
when User, nil then user
when Symbol then create(:user, user)
end
sign_in current_user
end
Making some sort of progress by breaking up loop statement
# frozen_string_literal: true
#
require 'simplecov'
SimpleCov.start 'rails'
SimpleCov.start do
add_filter '/config/'
add_filter '/spec/'
refuse_coverage_drop if ENV['CI']
end
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
abort("The Rails environment is running in production mode!") if Rails.env.production?
require 'rspec/rails'
require 'devise'
require 'factory_bot_rails'
ActiveRecord::Migration.maintain_test_schema!
Capybara.default_driver = :selenium
RSpec.configure do |config|
config.order = :random
config.include FactoryBot::Syntax::Methods
config.include Devise::Test::ControllerHelpers, type: :controller
config.include Devise::Test::IntegrationHelpers, type: :system
config.before :all do
FactoryBot.reload
end
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
config.use_transactional_fixtures = true
config.infer_spec_type_from_file_location!
config.filter_rails_from_backtrace!
end
def fill_in_base_incident_fields
select Time.now.strftime('%Y'), from: 'Year'
select Time.now.strftime('%B'), from: 'Month'
select Time.now.strftime('%-e'), from: 'Day'
select Time.now.strftime('%I %p'), from: 'Hour'
select Time.now.strftime('%m'), from: 'Minute'
fill_in 'Bus #', with: '1803'
fill_in 'Location', with: 'Mill and Locust'
fill_in 'ZIP', with: '01108'
select 'Springfield', from: 'Town'
select 'North', from: 'Direction'
fill_in 'Describe the incident in detail.',
with: 'Lorem ipsum dolor sit amet.'
end
def incident_in_divisions(divisions, *traits)
attributes = if traits.last.is_a? Hash
traits.pop
else Hash.new
end
driver = create :user, :driver, divisions: divisions
report = create :incident_report, user: driver
create :incident, *traits, attributes.merge(driver_incident_report: report)
end
# source: https://robots.thoughtbot.com/automatically-wait-for-ajax-with-capybara
def wait_for_ajax!
Timeout.timeout Capybara.default_wait_time do
loop do
# want this to happen if jQuery is undefined
if page.find('jQuery') == undefined
raise "User doesn't have correct traits to access the page."
end
active = page.evaluate_script 'jQuery.active'
break if active == 0
end
end
end
def wait_for_animation!
sleep 0.5
end
def when_current_user_is(user)
current_user = case user
when User, nil then user
when Symbol then create(:user, user)
end
sign_in current_user
end
|
ENV['RAILS_ENV'] ||= 'test'
require 'vcr'
VCR.configure do |c|
c.cassette_library_dir = 'spec/vcr_cassettes'
c.hook_into :webmock
end
RSpec.configure do |config|
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
config.filter_run :focus
config.run_all_when_everything_filtered = true
config.example_status_persistence_file_path = "spec/examples.txt"
config.disable_monkey_patching!
if config.files_to_run.one?
config.default_formatter = 'doc'
end
config.profile_examples = 10
Kernel.srand config.seed
end
dont profile specs
ENV['RAILS_ENV'] ||= 'test'
require 'vcr'
VCR.configure do |c|
c.cassette_library_dir = 'spec/vcr_cassettes'
c.hook_into :webmock
end
RSpec.configure do |config|
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
config.filter_run :focus
config.run_all_when_everything_filtered = true
config.example_status_persistence_file_path = "spec/examples.txt"
config.disable_monkey_patching!
if config.files_to_run.one?
config.default_formatter = 'doc'
end
end
|
require "codeclimate-test-reporter"
CodeClimate::TestReporter.start
require 'simplecov'
SimpleCov.start "rails"
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
# Commented out for the benefit of zeus
#require 'rspec/autorun'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
# Checks for pending migrations before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.check_pending! if defined?(ActiveRecord::Migration)
VCR.configure do |c|
c.cassette_library_dir = 'spec/fixtures/vcr_cassettes'
c.hook_into :webmock
c.ignore_hosts 'codeclimate.com'
end
# We don't want webmock to get involved with the excon library at all
# Otherwise tests that involve streaming output from a docker container
# just buffer. It took way too long to figure out where the problem was!
WebMock::HttpLibAdapters::ExconAdapter.disable!
RSpec.configure do |config|
# ## Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# If true, the base class of anonymous controllers will be inferred
# automatically. This will be the default behavior in future versions of
# rspec-rails.
config.infer_base_class_for_anonymous_controllers = false
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = "random"
config.infer_spec_type_from_file_location!
config.include FactoryGirl::Syntax::Methods
config.before(:suite) do
begin
DatabaseCleaner.start
VCR.use_cassette('scraper_validations') { FactoryGirl.lint }
ensure
DatabaseCleaner.clean
end
end
# For tests marked as :docker tests don't use VCR
config.around(:each) do |ex|
if ex.metadata.key?(:docker)
VCR.turned_off do
WebMock.allow_net_connect!
ex.run
end
else
ex.run
end
end
config.filter_run_excluding docker: true if ENV["DONT_RUN_DOCKER_TESTS"]
config.filter_run_excluding slow: true unless ENV["RUN_SLOW_TESTS"]
end
Use single quotes rather than double quotes where possible
require 'codeclimate-test-reporter'
CodeClimate::TestReporter.start
require 'simplecov'
SimpleCov.start 'rails'
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
require 'rspec/rails'
# Commented out for the benefit of zeus
# require 'rspec/autorun'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
# Checks for pending migrations before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.check_pending! if defined?(ActiveRecord::Migration)
VCR.configure do |c|
c.cassette_library_dir = 'spec/fixtures/vcr_cassettes'
c.hook_into :webmock
c.ignore_hosts 'codeclimate.com'
end
# We don't want webmock to get involved with the excon library at all
# Otherwise tests that involve streaming output from a docker container
# just buffer. It took way too long to figure out where the problem was!
WebMock::HttpLibAdapters::ExconAdapter.disable!
RSpec.configure do |config|
# ## Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# If true, the base class of anonymous controllers will be inferred
# automatically. This will be the default behavior in future versions of
# rspec-rails.
config.infer_base_class_for_anonymous_controllers = false
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = 'random'
config.infer_spec_type_from_file_location!
config.include FactoryGirl::Syntax::Methods
config.before(:suite) do
begin
DatabaseCleaner.start
VCR.use_cassette('scraper_validations') { FactoryGirl.lint }
ensure
DatabaseCleaner.clean
end
end
# For tests marked as :docker tests don't use VCR
config.around(:each) do |ex|
if ex.metadata.key?(:docker)
VCR.turned_off do
WebMock.allow_net_connect!
ex.run
end
else
ex.run
end
end
config.filter_run_excluding docker: true if ENV['DONT_RUN_DOCKER_TESTS']
config.filter_run_excluding slow: true unless ENV['RUN_SLOW_TESTS']
end
|
require 'puppetlabs_spec_helper/module_spec_helper'
RSpec.configure do |c|
c.include PuppetlabsSpec::Files
c.before :each do
# Ensure that we don't accidentally cache facts and environment
# between test cases.
Facter::Util::Loader.any_instance.stubs(:load_all)
Facter.clear
Facter.clear_messages
# Store any environment variables away to be restored later
@old_env = {}
ENV.each_key {|k| @old_env[k] = ENV[k]}
if Gem::Version.new(`puppet --version`) >= Gem::Version.new('3.5')
Puppet.settings[:strict_variables]=true
end
end
c.after :each do
PuppetlabsSpec::Files.cleanup
end
end
Use modulesync to manage meta files
require 'puppetlabs_spec_helper/module_spec_helper'
RSpec.configure do |c|
c.include PuppetlabsSpec::Files
c.before :each do
# Ensure that we don't accidentally cache facts and environment
# between test cases.
Facter::Util::Loader.any_instance.stubs(:load_all)
Facter.clear
Facter.clear_messages
# Store any environment variables away to be restored later
@old_env = {}
ENV.each_key {|k| @old_env[k] = ENV[k]}
if Gem::Version.new(`puppet --version`) >= Gem::Version.new('3.5')
Puppet.settings[:strict_variables]=true
end
end
c.after :each do
PuppetlabsSpec::Files.cleanup
end
end
require 'pathname'
dir = Pathname.new(__FILE__).parent
Puppet[:modulepath] = File.join(dir, 'fixtures', 'modules')
# There's no real need to make this version dependent, but it helps find
# regressions in Puppet
#
# 1. Workaround for issue #16277 where default settings aren't initialised from
# a spec and so the libdir is never initialised (3.0.x)
# 2. Workaround for 2.7.20 that now only loads types for the current node
# environment (#13858) so Puppet[:modulepath] seems to get ignored
# 3. Workaround for 3.5 where context hasn't been configured yet,
# ticket https://tickets.puppetlabs.com/browse/MODULES-823
#
ver = Gem::Version.new(Puppet.version.split('-').first)
if Gem::Requirement.new("~> 2.7.20") =~ ver || Gem::Requirement.new("~> 3.0.0") =~ ver || Gem::Requirement.new("~> 3.5") =~ ver
puts "augeasproviders: setting Puppet[:libdir] to work around broken type autoloading"
# libdir is only a single dir, so it can only workaround loading of one external module
Puppet[:libdir] = "#{Puppet[:modulepath]}/augeasproviders_core/lib"
end
|
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$LOAD_PATH.unshift(File.dirname(__FILE__))
require 'rspec'
require 'fakeredis'
require "fakeredis/rspec"
require "support/shared_examples/sortable"
RSpec.configure do |config|
# replaces -b -fdoc --color in .rspec
config.color = true
config.default_formatter = "doc"
config.backtrace_exclusion_patterns = []
end
def fakeredis?
true
end
Enable both should and expect syntax for now
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$LOAD_PATH.unshift(File.dirname(__FILE__))
require 'rspec'
require 'fakeredis'
require "fakeredis/rspec"
require "support/shared_examples/sortable"
RSpec.configure do |config|
# replaces -b -fdoc --color in .rspec
config.color = true
config.default_formatter = "doc"
config.backtrace_exclusion_patterns = []
config.mock_with :rspec do |c|
# TODO: upgrade should syntax to expect syntax
c.syntax = [:should, :expect]
end
config.expect_with :rspec do |c|
# TODO: upgrade should syntax to expect syntax
c.syntax = [:should, :expect]
end
end
def fakeredis?
true
end
|
RSpec.configure do |config|
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
end
Require minimum libraries to run test
require 'ruboty'
require 'ruboty/hibari_bento'
require 'rspec'
RSpec.configure do |config|
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
end
|
require "simplecov"
require "three_little_pigs"
require "pry-byebug"
RSpec.configure do |config|
config.before { allow($stdout).to receive(:puts) }
end
Add callout to codeclimate
require "simplecov"
if ENV["TRAVIS"]
require "codeclimate-test-reporter"
CodeClimate::TestReporter.start
end
require "three_little_pigs"
require "pry-byebug"
RSpec.configure do |config|
config.before { allow($stdout).to receive(:puts) }
end
|
$: << File.dirname(__FILE__) + '/../lib'
require 'rubygems'
%w[spec rails/version action_pack active_record resourceful/maker
action_controller action_controller/test_process action_controller/integration
spec/rspec-rails/redirect_to spec/rspec-rails/render_template].each &method(:require)
Spec::Runner.configure do |config|
config.mock_with :mocha
end
def should_be_called(&block)
pstub = stub
pstub.expects(:call).instance_eval(&(block || proc {}))
proc { |*args| pstub.call(*args) }
end
def stub_model(name)
model = Class.new do
include Resourceful::Serialize::Model
def self.to_s
@name
end
def initialize(attrs = {})
attrs.each do |k, v|
self.stubs(k).returns(v)
end
end
def inspect
"#<#{self.class.send(:instance_variable_get, '@name')}>"
end
end
model.send(:instance_variable_set, '@name', name)
model
end
def stub_const(name)
unless Object.const_defined?(name)
obj = Object.new
obj.extend Spec::MetaClass
obj.metaclass.send(:define_method, :to_s) { name.to_s }
obj.metaclass.send(:alias_method, :inspect, :to_s)
Object.const_set(name, obj)
end
Object.const_get(name)
end
def stub_list(size, name = nil, &block)
list = Array.new(size) { |i| name ? stub("#{name}_#{i}") : stub }
list.each(&block) if block
list
end
module Spec::Matchers
def have_any(&proc)
satisfy { |a| a.any?(&proc) }
end
end
module ControllerMocks
def mock_kontroller(*to_extend)
options = to_extend.last.is_a?(Hash) ? to_extend.slice!(-1) : {}
@kontroller = Class.new
@kontroller.extend Resourceful::Maker
to_extend.each(&@kontroller.method(:extend))
@hidden_actions = Resourceful::ACTIONS.dup
@kontroller.stubs(:hidden_actions).returns(@hidden_actions)
@kontroller.stubs(:plural_action?).returns(false)
@kontroller.stubs(:include)
@kontroller.stubs(:before_filter)
@kontroller.stubs(:helper_method)
end
def mock_controller(*to_extend)
mock_kontroller
@controller = @kontroller.new
to_extend.each(&@controller.method(:extend))
end
def mock_builder(inherited = false)
@builder = stub
@builder.stubs(:response_for)
@builder.stubs(:apply)
@builder.stubs(:instance_eval).yields(@buildercc )
@builder.stubs(:inherited?).returns(inherited)
Resourceful::Base.stubs(:made_resourceful).returns([])
Resourceful::Builder.stubs(:new).returns(@builder)
end
def create_builder
@builder = Resourceful::Builder.new(@kontroller)
class << @builder
alias_method :made_resourceful, :instance_eval
end
end
def responses
@kontroller.read_inheritable_attribute(:resourceful_responses)
end
def callbacks
@kontroller.read_inheritable_attribute(:resourceful_callbacks)
end
def parents
@kontroller.read_inheritable_attribute(:parents)
end
# Evaluates the made_resourceful block of mod (a module)
# in the context of @builder.
# @builder should be initialized via create_builder.
def made_resourceful(mod)
mod.included(@builder)
end
end
module RailsMocks
attr_reader :response, :request, :controller, :kontroller
def included(mod)
require 'ruby-debug'
debugger
end
def mock_resourceful(options = {}, &block)
options = {
:name => "things"
}.merge options
init_kontroller options
init_routes options
stub_const(options[:name].singularize.camelize)
kontroller.make_resourceful(&block)
init_controller options
end
def assigns(name)
controller.instance_variable_get("@#{name}")
end
def redirect_to(opts)
Spec::Rails::Matchers::RedirectTo.new(request, opts)
end
def render_template(path)
Spec::Rails::Matchers::RenderTemplate.new(path.to_s, @controller)
end
private
def init_kontroller(options)
@kontroller = Class.new ActionController::Base
@kontroller.extend Resourceful::Maker
@kontroller.extend Spec::MetaClass
@kontroller.metaclass.send(:define_method, :controller_name) { options[:name] }
@kontroller.metaclass.send(:define_method, :controller_path) { options[:name] }
@kontroller.metaclass.send(:define_method, :inspect) { "#{options[:name].camelize}Controller" }
@kontroller.metaclass.send(:alias_method, :to_s, :inspect)
@kontroller.send(:define_method, :controller_name) { options[:name] }
@kontroller.send(:define_method, :controller_path) { options[:name] }
@kontroller.send(:define_method, :inspect) { "#<#{options[:name].camelize}Controller>" }
@kontroller.send(:alias_method, :to_s, :inspect)
@kontroller.send(:include, ControllerMethods)
@kontroller
end
def init_routes(options)
ActionController::Routing::Routes.clear!
route_block = options[:routes] || proc { |map| map.resources options[:name] }
ActionController::Routing::Routes.draw(&route_block)
end
def init_controller(options)
@controller = kontroller.new
@request = ActionController::TestRequest.new
@response = ActionController::TestResponse.new
@controller.request = @request
@controller.response = @response
@request.accept = '*/*'
@request.env['HTTP_REFERER'] = 'http://test.host'
@controller
end
def action_params(action, params = {})
params.merge case action
when :show, :edit, :destroy: {:id => 12}
when :update: {:id => 12, :thing => {}}
when :create: {:thing => {}}
else {}
end
end
def action_method(action)
method case action
when :index, :show, :edit, :new: :get
when :update: :put
when :create: :post
when :destroy: :delete
end
end
module ControllerMethods
# From rspec-rails ControllerExampleGroup
def render(options=nil, deprecated_status_or_extra_options=nil, &block)
if ::Rails::VERSION::STRING >= '2.0.0' && deprecated_status_or_extra_options.nil?
deprecated_status_or_extra_options = {}
end
unless block_given?
if @template.respond_to?(:finder)
(class << @template.finder; self; end).class_eval do
define_method :file_exists? do; true; end
end
else
(class << @template; self; end).class_eval do
define_method :file_exists? do; true; end
end
end
(class << @template; self; end).class_eval do
define_method :render_file do |*args|
@first_render ||= args[0] unless args[0] =~ /^layouts/
@_first_render ||= args[0] unless args[0] =~ /^layouts/
end
define_method :_pick_template do |*args|
@_first_render ||= args[0] unless args[0] =~ /^layouts/
PickedTemplate.new
end
end
end
super(options, deprecated_status_or_extra_options, &block)
end
class PickedTemplate
def render_template(*ignore_args); end
def render_partial(*ignore_args); end
end
end
end
module Spec::Example::ExampleGroupMethods
def should_render_html(action)
it "should render HTML by default for #{action_string(action)}" do
action_method(action)[action, action_params(action)]
response.should be_success
response.content_type.should == 'text/html'
end
end
def should_render_js(action)
it "should render JS for #{action_string(action)}" do
action_method(action)[action, action_params(action, :format => 'js')]
response.should be_success
response.content_type.should == 'text/javascript'
end
end
def shouldnt_render_xml(action)
it "should render XML for #{action_string(action)}" do
action_method(action)[action, action_params(action, :format => 'xml')]
response.should_not be_success
response.code.should == '406'
end
end
def action_string(action)
case action
when :index: "GET /things"
when :show: "GET /things/12"
when :edit: "GET /things/12/edit"
when :update: "PUT /things/12"
when :create: "POST /things"
when :new: "GET /things/new"
when :destroy: "DELETE /things/12"
end
end
end
module Spec::Example
class IntegrationExampleGroup < Spec::Example::ExampleGroup
include ActionController::TestProcess
include ActionController::Assertions
include RailsMocks
Spec::Example::ExampleGroupFactory.register(:integration, self)
end
end
pedantic change to remove redundant namespaces
$: << File.dirname(__FILE__) + '/../lib'
require 'rubygems'
%w[spec rails/version action_pack active_record resourceful/maker
action_controller action_controller/test_process action_controller/integration
spec/rspec-rails/redirect_to spec/rspec-rails/render_template].each &method(:require)
Spec::Runner.configure do |config|
config.mock_with :mocha
end
def should_be_called(&block)
pstub = stub
pstub.expects(:call).instance_eval(&(block || proc {}))
proc { |*args| pstub.call(*args) }
end
def stub_model(name)
model = Class.new do
include Resourceful::Serialize::Model
def self.to_s
@name
end
def initialize(attrs = {})
attrs.each do |k, v|
self.stubs(k).returns(v)
end
end
def inspect
"#<#{self.class.send(:instance_variable_get, '@name')}>"
end
end
model.send(:instance_variable_set, '@name', name)
model
end
def stub_const(name)
unless Object.const_defined?(name)
obj = Object.new
obj.extend Spec::MetaClass
obj.metaclass.send(:define_method, :to_s) { name.to_s }
obj.metaclass.send(:alias_method, :inspect, :to_s)
Object.const_set(name, obj)
end
Object.const_get(name)
end
def stub_list(size, name = nil, &block)
list = Array.new(size) { |i| name ? stub("#{name}_#{i}") : stub }
list.each(&block) if block
list
end
module Spec::Matchers
def have_any(&proc)
satisfy { |a| a.any?(&proc) }
end
end
module ControllerMocks
def mock_kontroller(*to_extend)
options = to_extend.last.is_a?(Hash) ? to_extend.slice!(-1) : {}
@kontroller = Class.new
@kontroller.extend Resourceful::Maker
to_extend.each(&@kontroller.method(:extend))
@hidden_actions = Resourceful::ACTIONS.dup
@kontroller.stubs(:hidden_actions).returns(@hidden_actions)
@kontroller.stubs(:plural_action?).returns(false)
@kontroller.stubs(:include)
@kontroller.stubs(:before_filter)
@kontroller.stubs(:helper_method)
end
def mock_controller(*to_extend)
mock_kontroller
@controller = @kontroller.new
to_extend.each(&@controller.method(:extend))
end
def mock_builder(inherited = false)
@builder = stub
@builder.stubs(:response_for)
@builder.stubs(:apply)
@builder.stubs(:instance_eval).yields(@buildercc )
@builder.stubs(:inherited?).returns(inherited)
Resourceful::Base.stubs(:made_resourceful).returns([])
Resourceful::Builder.stubs(:new).returns(@builder)
end
def create_builder
@builder = Resourceful::Builder.new(@kontroller)
class << @builder
alias_method :made_resourceful, :instance_eval
end
end
def responses
@kontroller.read_inheritable_attribute(:resourceful_responses)
end
def callbacks
@kontroller.read_inheritable_attribute(:resourceful_callbacks)
end
def parents
@kontroller.read_inheritable_attribute(:parents)
end
# Evaluates the made_resourceful block of mod (a module)
# in the context of @builder.
# @builder should be initialized via create_builder.
def made_resourceful(mod)
mod.included(@builder)
end
end
module RailsMocks
attr_reader :response, :request, :controller, :kontroller
def included(mod)
require 'ruby-debug'
debugger
end
def mock_resourceful(options = {}, &block)
options = {
:name => "things"
}.merge options
init_kontroller options
init_routes options
stub_const(options[:name].singularize.camelize)
kontroller.make_resourceful(&block)
init_controller options
end
def assigns(name)
controller.instance_variable_get("@#{name}")
end
def redirect_to(opts)
Spec::Rails::Matchers::RedirectTo.new(request, opts)
end
def render_template(path)
Spec::Rails::Matchers::RenderTemplate.new(path.to_s, @controller)
end
private
def init_kontroller(options)
@kontroller = Class.new ActionController::Base
@kontroller.extend Resourceful::Maker
@kontroller.extend Spec::MetaClass
@kontroller.metaclass.send(:define_method, :controller_name) { options[:name] }
@kontroller.metaclass.send(:define_method, :controller_path) { options[:name] }
@kontroller.metaclass.send(:define_method, :inspect) { "#{options[:name].camelize}Controller" }
@kontroller.metaclass.send(:alias_method, :to_s, :inspect)
@kontroller.send(:define_method, :controller_name) { options[:name] }
@kontroller.send(:define_method, :controller_path) { options[:name] }
@kontroller.send(:define_method, :inspect) { "#<#{options[:name].camelize}Controller>" }
@kontroller.send(:alias_method, :to_s, :inspect)
@kontroller.send(:include, ControllerMethods)
@kontroller
end
def init_routes(options)
ActionController::Routing::Routes.clear!
route_block = options[:routes] || proc { |map| map.resources options[:name] }
ActionController::Routing::Routes.draw(&route_block)
end
def init_controller(options)
@controller = kontroller.new
@request = ActionController::TestRequest.new
@response = ActionController::TestResponse.new
@controller.request = @request
@controller.response = @response
@request.accept = '*/*'
@request.env['HTTP_REFERER'] = 'http://test.host'
@controller
end
def action_params(action, params = {})
params.merge case action
when :show, :edit, :destroy: {:id => 12}
when :update: {:id => 12, :thing => {}}
when :create: {:thing => {}}
else {}
end
end
def action_method(action)
method case action
when :index, :show, :edit, :new: :get
when :update: :put
when :create: :post
when :destroy: :delete
end
end
module ControllerMethods
# From rspec-rails ControllerExampleGroup
def render(options=nil, deprecated_status_or_extra_options=nil, &block)
if ::Rails::VERSION::STRING >= '2.0.0' && deprecated_status_or_extra_options.nil?
deprecated_status_or_extra_options = {}
end
unless block_given?
if @template.respond_to?(:finder)
(class << @template.finder; self; end).class_eval do
define_method :file_exists? do; true; end
end
else
(class << @template; self; end).class_eval do
define_method :file_exists? do; true; end
end
end
(class << @template; self; end).class_eval do
define_method :render_file do |*args|
@first_render ||= args[0] unless args[0] =~ /^layouts/
@_first_render ||= args[0] unless args[0] =~ /^layouts/
end
define_method :_pick_template do |*args|
@_first_render ||= args[0] unless args[0] =~ /^layouts/
PickedTemplate.new
end
end
end
super(options, deprecated_status_or_extra_options, &block)
end
class PickedTemplate
def render_template(*ignore_args); end
def render_partial(*ignore_args); end
end
end
end
module Spec::Example::ExampleGroupMethods
def should_render_html(action)
it "should render HTML by default for #{action_string(action)}" do
action_method(action)[action, action_params(action)]
response.should be_success
response.content_type.should == 'text/html'
end
end
def should_render_js(action)
it "should render JS for #{action_string(action)}" do
action_method(action)[action, action_params(action, :format => 'js')]
response.should be_success
response.content_type.should == 'text/javascript'
end
end
def shouldnt_render_xml(action)
it "should render XML for #{action_string(action)}" do
action_method(action)[action, action_params(action, :format => 'xml')]
response.should_not be_success
response.code.should == '406'
end
end
def action_string(action)
case action
when :index: "GET /things"
when :show: "GET /things/12"
when :edit: "GET /things/12/edit"
when :update: "PUT /things/12"
when :create: "POST /things"
when :new: "GET /things/new"
when :destroy: "DELETE /things/12"
end
end
end
module Spec::Example
class IntegrationExampleGroup < Spec::Example::ExampleGroup
include ActionController::TestProcess
include ActionController::Assertions
include RailsMocks
ExampleGroupFactory.register(:integration, self)
end
end
|
Rspec added to impressionist dir, so we can test in isolation
require 'lib/impressionist'
|
# This file was generated by the `rails generate rspec:install` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
require 'rspec/rails'
require 'capybara/poltergeist'
RSpec.configure do |config|
# Use color in STDOUT
config.color = true
Capybara.javascript_driver = :poltergeist
# Use color not only in STDOUT but also in pagers and files
# config.tty = true
# by default capybara wraps every test in an undoable transaction, however
# js tests run asynchronously and thus break the pattern. DatabaseCleaner
# solves this problem by manually making sure the db is clean after tests.
config.use_transactional_fixtures = false
config.before(:suite) do
if config.use_transactional_fixtures?
raise(<<-MSG)
Delete line `config.use_transactional_fixtures = true` from rails_helper.rb
(or set it to false) to prevent uncommitted transactions being used in
JavaScript-dependent specs.
During testing, the app-under-test that the browser driver connects to
uses a different database connection to the database connection used by
the spec. The app's database connection would not be able to access
uncommitted transaction data setup over the spec's database connection.
MSG
end
DatabaseCleaner.clean_with(:truncation)
end
config.before(:each, type: :feature) do
if Capybara.current_driver != :rack_test
DatabaseCleaner.strategy = :truncation
end
end
config.before(:each) do
DatabaseCleaner.start
end
config.after(:each) do
DatabaseCleaner.clean
end
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
=begin
# This allows you to limit a spec run to individual examples or groups
# you care about by tagging them with `:focus` metadata. When nothing
# is tagged with `:focus`, all examples get run. RSpec also provides
# aliases for `it`, `describe`, and `context` that include `:focus`
# metadata: `fit`, `fdescribe` and `fcontext`, respectively.
config.filter_run_when_matching :focus
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = "spec/examples.txt"
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
config.disable_monkey_patching!
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
=end
end
Skip JS tests if driver is not installed. (#156)
# This file was generated by the `rails generate rspec:install` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
require 'rspec/rails'
require 'capybara/poltergeist'
RSpec.configure do |config|
# Use color in STDOUT
config.color = true
Capybara.javascript_driver = :poltergeist
# Skip PhantomJS tests if PhantomJS is not installed.
config.filter_run_excluding :js => (not Cliver::detect('phantomjs'))
# Use color not only in STDOUT but also in pagers and files
# config.tty = true
# by default capybara wraps every test in an undoable transaction, however
# js tests run asynchronously and thus break the pattern. DatabaseCleaner
# solves this problem by manually making sure the db is clean after tests.
config.use_transactional_fixtures = false
config.before(:suite) do
if config.use_transactional_fixtures?
raise(<<-MSG)
Delete line `config.use_transactional_fixtures = true` from rails_helper.rb
(or set it to false) to prevent uncommitted transactions being used in
JavaScript-dependent specs.
During testing, the app-under-test that the browser driver connects to
uses a different database connection to the database connection used by
the spec. The app's database connection would not be able to access
uncommitted transaction data setup over the spec's database connection.
MSG
end
DatabaseCleaner.clean_with(:truncation)
end
config.before(:each, type: :feature) do
if Capybara.current_driver != :rack_test
DatabaseCleaner.strategy = :truncation
end
end
config.before(:each) do
DatabaseCleaner.start
end
config.after(:each) do
DatabaseCleaner.clean
end
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
=begin
# This allows you to limit a spec run to individual examples or groups
# you care about by tagging them with `:focus` metadata. When nothing
# is tagged with `:focus`, all examples get run. RSpec also provides
# aliases for `it`, `describe`, and `context` that include `:focus`
# metadata: `fit`, `fdescribe` and `fcontext`, respectively.
config.filter_run_when_matching :focus
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = "spec/examples.txt"
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
config.disable_monkey_patching!
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
=end
end
|
Add spec_helper
$LOAD_PATH.unshift(File.dirname(__FILE__))
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), "..", "lib"))
require 'rspec'
require 'rails_best_practices'
require 'coveralls'
Coveralls.wear!
RSpec.configure do |config|
config.after do
RailsBestPractices::Prepares.clear
end
config.filter_run focus: true
config.run_all_when_everything_filtered = true
end
|
require 'puppetlabs_spec_helper/module_spec_helper'
require 'rspec/core/shared_context'
module Puppet4Helper
extend RSpec::Core::SharedContext
def site_pp
[(File.read('spec/fixtures/manifests/site.pp') rescue '')]
end
let(:pre_condition) { site_pp }
end
RSpec.configure do |c|
c.include Puppet4Helper if Puppet.version >= '4'
c.color = true
c.profile_examples = true if $stdin.isatty && ENV['PROFILE']
c.module_path = 'spec/fixtures/modules'
c.manifest_dir = 'spec/fixtures/manifests'
end
fix future parser warts
require 'puppetlabs_spec_helper/module_spec_helper'
require 'rspec/core/shared_context'
module Puppet4Helper
extend RSpec::Core::SharedContext
def site_pp
[(File.read('spec/fixtures/manifests/site.pp') rescue '')]
end
let(:pre_condition) { site_pp }
end
RSpec.configure do |c|
c.include Puppet4Helper if Puppet.version >= '4' || ENV['FUTURE_PARSER']
c.color = true
c.profile_examples = true if $stdin.isatty && ENV['PROFILE']
c.module_path = 'spec/fixtures/modules'
c.manifest_dir = 'spec/fixtures/manifests'
end
|
require 'tmpdir'
require 'vimrunner'
VIM = Vimrunner.start_gui_vim
VIM.add_plugin(File.expand_path('../..', __FILE__), 'plugin/runspec.vim')
RSpec.configure do |config|
# cd into a temporary directory for every example.
config.around do |example|
Dir.mktmpdir do |dir|
Dir.chdir(dir) do
VIM.command("cd #{dir}")
example.call
end
end
end
config.after(:suite) do
VIM.kill
end
end
Start Vim in a before(:suite) hook.
require 'tmpdir'
require 'vimrunner'
RSpec.configure do |config|
# cd into a temporary directory for every example.
config.around do |example|
Dir.mktmpdir do |dir|
Dir.chdir(dir) do
VIM.command("cd #{dir}")
example.call
end
end
end
config.before(:suite) do
VIM = Vimrunner.start_gui_vim
VIM.add_plugin(File.expand_path('../..', __FILE__), 'plugin/runspec.vim')
end
config.after(:suite) do
VIM.kill
end
end
|
if RUBY_VERSION > '1.9' and (ENV['COVERAGE'] || ENV['TRAVIS'])
require 'simplecov'
require 'coveralls'
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start do
command_name 'spec'
add_filter 'spec'
end
end
require 'tty-command'
module TestHelpers
module Paths
def gem_root
File.expand_path(File.join(File.dirname(__FILE__), ".."))
end
def dir_path(*args)
path = File.join(gem_root, *args)
FileUtils.mkdir_p(path) unless ::File.exist?(path)
File.realpath(path)
end
def tmp_path(*args)
File.expand_path(File.join(dir_path('tmp'), *args))
end
def fixtures_path(*args)
File.expand_path(File.join(dir_path('spec/fixtures'), *args))
end
end
module Platform
def jruby?
RUBY_PLATFORM == "java"
end
end
end
RSpec.configure do |config|
config.include(TestHelpers::Paths)
config.include(TestHelpers::Platform)
config.after(:each, type: :cli) do
FileUtils.rm_rf(tmp_path)
end
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
# Limits the available syntax to the non-monkey patched syntax that is recommended.
config.disable_monkey_patching!
# This setting enables warnings. It's recommended, but in some cases may
# be too noisy due to issues in dependencies.
config.warnings = true
if config.files_to_run.one?
config.default_formatter = 'doc'
end
config.profile_examples = 2
config.order = :random
Kernel.srand config.seed
end
Remove Ruby version check and fix MultiFormatter warning
# frozen_string_literal: true
if ENV["COVERAGE"] || ENV["TRAVIS"]
require "simplecov"
require "coveralls"
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter.new([
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
])
SimpleCov.start do
command_name "spec"
add_filter "spec"
end
end
require "tty-command"
module TestHelpers
module Paths
def gem_root
File.expand_path(File.join(File.dirname(__FILE__), ".."))
end
def dir_path(*args)
path = File.join(gem_root, *args)
FileUtils.mkdir_p(path) unless ::File.exist?(path)
File.realpath(path)
end
def tmp_path(*args)
File.expand_path(File.join(dir_path("tmp"), *args))
end
def fixtures_path(*args)
File.expand_path(File.join(dir_path("spec/fixtures"), *args))
end
end
module Platform
def jruby?
RUBY_PLATFORM == "java"
end
end
end
RSpec.configure do |config|
config.include(TestHelpers::Paths)
config.include(TestHelpers::Platform)
config.after(:each, type: :cli) do
FileUtils.rm_rf(tmp_path)
end
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
# Limits the available syntax to the non-monkey patched syntax that is recommended.
config.disable_monkey_patching!
# This setting enables warnings. It's recommended, but in some cases may
# be too noisy due to issues in dependencies.
config.warnings = true
if config.files_to_run.one?
config.default_formatter = "doc"
end
config.profile_examples = 2
config.order = :random
Kernel.srand config.seed
end
|
# encoding: UTF-8
require 'rspec'
Spec_dir = File.expand_path( File.dirname __FILE__ )
# code coverage
require 'simplecov'
SimpleCov.start do
add_filter "/vendor/"
add_filter "/bin/"
add_filter "/spec/"
end
Dir[ File.join( Spec_dir, "/support/**/*.rb")].each do |f|
require f
end
Use rspec-its because I like it.
# encoding: UTF-8
require 'rspec'
require 'rspec/its'
Spec_dir = File.expand_path( File.dirname __FILE__ )
# code coverage
require 'simplecov'
SimpleCov.start do
add_filter "/vendor/"
add_filter "/bin/"
add_filter "/spec/"
end
Dir[ File.join( Spec_dir, "/support/**/*.rb")].each do |f|
require f
end |
# -*- coding: utf-8 -*-
require "rubygems"
require "bundler"
Bundler.setup()
require 'rspec/core'
require "pry"
require 'yaml'
require 'active_record'
require 'database_cleaner'
require "seed_express"
# load & set database configuration
database_config_yaml = File.expand_path(File.dirname(__FILE__)) + "/config/database.yml"
database_config = YAML.load(File.open(database_config_yaml).read)
ActiveRecord::Base.establish_connection(database_config[Rails.env])
# migrate schema
[
File.expand_path(File.join(File.dirname(__FILE__), '/../db/migrate')),
File.expand_path(File.join(File.dirname(__FILE__), '/migrations')),
].each do |path|
Dir.glob("#{path}/[0-9]*_*.rb").each do |file|
version = file.sub(%r!^#{path}/!, '').sub(/_.*$/, '').to_i
ActiveRecord::Migrator.run(:up, path, version)
end
end
# load models for only spec
require "database_models"
# load application/library models
model_path = File.expand_path(File.dirname(__FILE__)) + "/../app/models"
Dir.glob("#{model_path}/*.rb").each do |model_file|
require model_file
end
# This is settings for DatabaseCleaner
RSpec.configure do |config|
config.before(:suite) do
DatabaseCleaner.clean_with(:truncation)
end
config.before(:each) do
DatabaseCleaner.strategy = :transaction
end
config.before(:each, :js => true) do
DatabaseCleaner.strategy = :truncation
end
config.before(:each) do
DatabaseCleaner.start
end
config.after(:each) do
DatabaseCleaner.clean
end
end
Refactors spec_helper.rb
# -*- coding: utf-8 -*-
require "rubygems"
require "bundler"
Bundler.setup()
require 'rspec/core'
require "pry"
require 'yaml'
require 'active_record'
require 'database_cleaner'
require "seed_express"
# load & set database configuration
database_config_yaml = File.expand_path(File.dirname(__FILE__)) + "/config/database.yml"
database_config = YAML.load(File.open(database_config_yaml).read)
ActiveRecord::Base.establish_connection(database_config[Rails.env])
# migrate schema
['/../db/migrate', '/migrations'].each do |relative_path|
path = File.expand_path(File.join(File.dirname(__FILE__), relative_path))
Dir.glob("#{path}/[0-9]*_*.rb").each do |file|
version = file.sub(%r!^#{path}/!, '').sub(/_.*$/, '').to_i
ActiveRecord::Migrator.run(:up, path, version)
end
end
# load models for only spec
require "database_models"
# load application/library models
model_path = File.expand_path(File.dirname(__FILE__)) + "/../app/models"
Dir.glob("#{model_path}/*.rb").each do |model_file|
require model_file
end
# The followings are settings for DatabaseCleaner
# I don't know why "config.use_transactional_fixtures = true" doesn't work
RSpec.configure do |config|
config.before(:suite) do
DatabaseCleaner.clean_with(:truncation)
end
config.before(:each) do
DatabaseCleaner.strategy = :transaction
end
config.before(:each, :js => true) do
DatabaseCleaner.strategy = :truncation
end
config.before(:each) do
DatabaseCleaner.start
end
config.after(:each) do
DatabaseCleaner.clean
end
end
|
require 'rubygems'
require 'spec'
require File.dirname(__FILE__) + '/../lib/github'
class Module
def metaclass
class << self;self;end
end
end
module Spec::Example::ExampleGroupSubclassMethods
def add_guard(klass, name, is_class = false)
guarded = nil # define variable now for scoping
target = (is_class ? klass.metaclass : klass)
sep = (is_class ? "." : "#")
target.class_eval do
guarded = instance_method(name)
define_method name do |*args|
raise "Testing guards violated: Cannot call #{klass}#{sep}#{name}"
end
end
@guards ||= []
@guards << [klass, name, is_class, guarded]
end
def add_class_guard(klass, name)
add_guard(klass, name, true)
end
def unguard(klass, name, is_class = false)
row = @guards.find { |(k,n,i)| k == klass and n == name and i == is_class }
raise "#{klass}#{is_class ? "." : "#"}#{name} is not guarded" if row.nil?
(is_class ? klass.metaclass : klass).class_eval do
define_method name, row.last
end
@guards.delete row
end
def class_unguard(klass, name)
unguard(klass, name, true)
end
def unguard_all
@guards ||= []
@guards.each do |klass, name, is_class, guarded|
(is_class ? klass.metaclass : klass).class_eval do
define_method name, guarded
end
end
@guards.clear
end
end
# prevent the use of `` in tests
Spec::Runner.configure do |configuration|
# load this here so it's covered by the `` guard
configuration.prepend_before(:all) do
module GitHub
load 'helpers.rb'
load 'commands.rb'
end
end
configuration.prepend_before(:all) do
self.class.send :include, Spec::Example::ExampleGroupSubclassMethods
end
configuration.prepend_before(:each) do
add_guard Kernel, :`
add_guard Kernel, :system
add_guard Kernel, :fork
add_guard Kernel, :exec
add_class_guard Process, :fork
end
configuration.append_after(:each) do
unguard_all
end
end
# include this in any example group that defines @helper
module SetupMethods
def setup_user_and_branch(user = :user, branch = :master)
@helper.should_receive(:user_and_branch).any_number_of_times.and_return([user, branch])
end
def setup_url_for(remote = :origin, user = nil, project = :project)
if user.nil?
user = remote
user = "user" if remote == :origin
end
@helper.should_receive(:url_for).any_number_of_times.with(remote).and_return("git://github.com/#{user}/#{project}.git")
end
end
Implement Class.next_instance to help mock future objects
require 'rubygems'
require 'spec'
require File.dirname(__FILE__) + '/../lib/github'
class Module
def metaclass
class << self;self;end
end
end
class Spec::NextInstanceProxy
def initialize
@deferred = []
end
def method_missing(sym, *args)
proxy = Spec::NextInstanceProxy.new
@deferred << [sym, args, proxy]
proxy
end
def should_receive(*args)
method_missing(:should_receive, *args)
end
alias stub! should_receive
def invoke(obj)
@deferred.each do |(sym, args, proxy)|
result = obj.send(sym, *args)
proxy.invoke(result)
end
end
end
class Class
def next_instance
meth = metaclass.instance_method(:new)
proxy = Spec::NextInstanceProxy.new
metaclass.send :define_method, :new do |*args|
instance = meth.bind(self).call(*args)
proxy.invoke(instance)
metaclass.send :define_method, :new, meth
instance
end
proxy
end
end
module Spec::Example::ExampleGroupSubclassMethods
def add_guard(klass, name, is_class = false)
guarded = nil # define variable now for scoping
target = (is_class ? klass.metaclass : klass)
sep = (is_class ? "." : "#")
target.class_eval do
guarded = instance_method(name)
define_method name do |*args|
raise "Testing guards violated: Cannot call #{klass}#{sep}#{name}"
end
end
@guards ||= []
@guards << [klass, name, is_class, guarded]
end
def add_class_guard(klass, name)
add_guard(klass, name, true)
end
def unguard(klass, name, is_class = false)
row = @guards.find { |(k,n,i)| k == klass and n == name and i == is_class }
raise "#{klass}#{is_class ? "." : "#"}#{name} is not guarded" if row.nil?
(is_class ? klass.metaclass : klass).class_eval do
define_method name, row.last
end
@guards.delete row
end
def class_unguard(klass, name)
unguard(klass, name, true)
end
def unguard_all
@guards ||= []
@guards.each do |klass, name, is_class, guarded|
(is_class ? klass.metaclass : klass).class_eval do
define_method name, guarded
end
end
@guards.clear
end
end
# prevent the use of `` in tests
Spec::Runner.configure do |configuration|
# load this here so it's covered by the `` guard
configuration.prepend_before(:all) do
module GitHub
load 'helpers.rb'
load 'commands.rb'
end
end
configuration.prepend_before(:all) do
self.class.send :include, Spec::Example::ExampleGroupSubclassMethods
end
configuration.prepend_before(:each) do
add_guard Kernel, :`
add_guard Kernel, :system
add_guard Kernel, :fork
add_guard Kernel, :exec
add_class_guard Process, :fork
end
configuration.append_after(:each) do
unguard_all
end
end
# include this in any example group that defines @helper
module SetupMethods
def setup_user_and_branch(user = :user, branch = :master)
@helper.should_receive(:user_and_branch).any_number_of_times.and_return([user, branch])
end
def setup_url_for(remote = :origin, user = nil, project = :project)
if user.nil?
user = remote
user = "user" if remote == :origin
end
@helper.should_receive(:url_for).any_number_of_times.with(remote).and_return("git://github.com/#{user}/#{project}.git")
end
end
|
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
%w(app lib config).each do |dir|
path = File.expand_path("../../#{dir}", __FILE__)
$LOAD_PATH.unshift(path) unless $LOAD_PATH.include?(path)
end
require 'capybara/rspec'
require 'spec_helper_database'
ENV['RACK_ENV'] = 'test'
if ENV['CI'] || ENV['COV']
require 'simplecov'
if ENV['CI']
require 'codecov'
SimpleCov.formatter = SimpleCov::Formatter::Codecov
end
SimpleCov.start do
add_filter '/spec/'
end
end
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# These two settings work together to allow you to limit a spec run
# to individual examples or groups you care about by tagging them with
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
# get run.
config.filter_run :focus
config.run_all_when_everything_filtered = true
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = 'spec/examples.txt'
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://myronmars.to/n/dev-blog/2014/05/notable-changes-in-rspec-3#new__config_option_to_disable_rspeccore_monkey_patching
config.disable_monkey_patching!
# This setting enables warnings. It's recommended, but in some cases may
# be too noisy due to issues in dependencies.
# config.warnings = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 3
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
end
Add SWCert
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
ENV['RACK_ENV'] = 'test'
%w(app lib config).each do |dir|
path = File.expand_path("../../#{dir}", __FILE__)
$LOAD_PATH.unshift(path) unless $LOAD_PATH.include?(path)
end
require 'capybara/rspec'
require 'spec_helper_database'
require 'environment'
if ENV['CI'] || ENV['COV']
require 'simplecov'
if ENV['CI']
require 'codecov'
SimpleCov.formatter = SimpleCov::Formatter::Codecov
end
SimpleCov.start do
add_filter '/spec/'
end
end
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# These two settings work together to allow you to limit a spec run
# to individual examples or groups you care about by tagging them with
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
# get run.
config.filter_run :focus
config.run_all_when_everything_filtered = true
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = 'spec/examples.txt'
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://myronmars.to/n/dev-blog/2014/05/notable-changes-in-rspec-3#new__config_option_to_disable_rspeccore_monkey_patching
config.disable_monkey_patching!
# This setting enables warnings. It's recommended, but in some cases may
# be too noisy due to issues in dependencies.
# config.warnings = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 3
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
end
|
# Set up coverage analysis
#-----------------------------------------------------------------------------#
if ENV['CI'] || ENV['GENERATE_COVERAGE']
require 'simplecov'
require 'coveralls'
if ENV['CI']
SimpleCov.formatter = Coveralls::SimpleCov::Formatter
elsif ENV['GENERATE_COVERAGE']
SimpleCov.formatter = SimpleCov::Formatter::HTMLFormatter
end
SimpleCov.start do
add_filter '/travis_bundle_dir'
end
end
# General Setup
#-----------------------------------------------------------------------------#
require 'pathname'
ROOT = Pathname.new(File.expand_path('../../', __FILE__))
$LOAD_PATH.unshift((ROOT + 'lib').to_s)
$LOAD_PATH.unshift((ROOT + 'spec').to_s)
require 'bundler/setup'
require 'bacon'
require 'mocha-on-bacon'
require 'pretty_bacon'
require 'webmock'
include WebMock::API
require 'cocoapods'
require 'cocoapods_plugin'
#-----------------------------------------------------------------------------#
# The CocoaPods namespace
#
module Pod
# Disable the wrapping so the output is deterministic in the tests.
#
UI.disable_wrap = true
# Redirects the messages to an internal store.
#
module UI
@output = ''
@warnings = ''
class << self
attr_accessor :output
attr_accessor :warnings
def puts(message = '')
@output << "#{message}\n"
end
def warn(message = '', actions = [])
@warnings << "#{message}\n"
end
def print(message)
@output << message
end
end
end
end
#-----------------------------------------------------------------------------#
# Bacon namespace
#
module Bacon
# Add a fixture helper to the Bacon Context
class Context
ROOT = ::ROOT + 'spec/fixtures'
def fixture(name)
ROOT + name
end
end
end
#-----------------------------------------------------------------------------#
# SpecHelper namespace
#
module SpecHelper
# Add this as an extension into the Search and List specs to help stub the plugins.json request
module PluginsStubs
require File.expand_path '../lib/pod/plugins_helper', File.dirname(__FILE__)
def stub_plugins_json_request(json = nil, status = 200)
body = json || File.read(fixture('plugins.json'))
stub_request(:get, Pod::PluginsHelper::PLUGINS_URL).to_return(:status => status, :body => body, :headers => {})
end
end
# Add this as an extension into the Create specs
module PluginsCreateCommand
def create_command(*args)
Pod::Command::Plugins::Create.new CLAide::ARGV.new(args)
end
end
# Add this as an extension into the Search specs
module PluginsSearchCommand
def search_command(*args)
Pod::Command::Plugins::Search.new CLAide::ARGV.new(args)
end
end
end
Removed useless `require` statement
# Set up coverage analysis
#-----------------------------------------------------------------------------#
if ENV['CI'] || ENV['GENERATE_COVERAGE']
require 'simplecov'
require 'coveralls'
if ENV['CI']
SimpleCov.formatter = Coveralls::SimpleCov::Formatter
elsif ENV['GENERATE_COVERAGE']
SimpleCov.formatter = SimpleCov::Formatter::HTMLFormatter
end
SimpleCov.start do
add_filter '/travis_bundle_dir'
end
end
# General Setup
#-----------------------------------------------------------------------------#
require 'pathname'
ROOT = Pathname.new(File.expand_path('../../', __FILE__))
$LOAD_PATH.unshift((ROOT + 'lib').to_s)
$LOAD_PATH.unshift((ROOT + 'spec').to_s)
require 'bundler/setup'
require 'bacon'
require 'mocha-on-bacon'
require 'pretty_bacon'
require 'webmock'
include WebMock::API
require 'cocoapods'
require 'cocoapods_plugin'
#-----------------------------------------------------------------------------#
# The CocoaPods namespace
#
module Pod
# Disable the wrapping so the output is deterministic in the tests.
#
UI.disable_wrap = true
# Redirects the messages to an internal store.
#
module UI
@output = ''
@warnings = ''
class << self
attr_accessor :output
attr_accessor :warnings
def puts(message = '')
@output << "#{message}\n"
end
def warn(message = '', actions = [])
@warnings << "#{message}\n"
end
def print(message)
@output << message
end
end
end
end
#-----------------------------------------------------------------------------#
# Bacon namespace
#
module Bacon
# Add a fixture helper to the Bacon Context
class Context
ROOT = ::ROOT + 'spec/fixtures'
def fixture(name)
ROOT + name
end
end
end
#-----------------------------------------------------------------------------#
# SpecHelper namespace
#
module SpecHelper
# Add this as an extension into the Search and List specs to help stub the plugins.json request
module PluginsStubs
def stub_plugins_json_request(json = nil, status = 200)
body = json || File.read(fixture('plugins.json'))
stub_request(:get, Pod::PluginsHelper::PLUGINS_URL).to_return(:status => status, :body => body, :headers => {})
end
end
# Add this as an extension into the Create specs
module PluginsCreateCommand
def create_command(*args)
Pod::Command::Plugins::Create.new CLAide::ARGV.new(args)
end
end
# Add this as an extension into the Search specs
module PluginsSearchCommand
def search_command(*args)
Pod::Command::Plugins::Search.new CLAide::ARGV.new(args)
end
end
end
|
# First include simplecov to track code coverage
require 'simplecov'
SimpleCov.start do
add_filter '/spec/'
end
require 'rspec'
require 'webmock/rspec'
require 'rack/test'
require 'sinatra'
require 'haml'
%w{parser scraper fixnum}.each do |file|
require File.join(File.dirname(__FILE__), '..', 'lib', 'eventkalender', "#{file}.rb")
end
%w{event meeting conference}.each do |file|
require File.join(File.dirname(__FILE__), '..', 'lib', 'eventkalender', 'event', "#{file}.rb")
end
require File.join(File.dirname(__FILE__), '..', 'webapp.rb')
RSpec.configure do |config|
config.include Rack::Test::Methods
config.run_all_when_everything_filtered = true
config.filter_run :focus
config.color = true
config.order = 'random'
# Create fake webserver to send serve all request local
config.before(:each) do
project_root = File.expand_path('..', __FILE__)
fixtures = {
'http://c3voc.de/wiki/events' => 'events.htm',
'http://c3voc.de/wiki/meetings' => 'meetings.htm'
}
fixtures.each do |url, file|
stub_request(:get, url).to_return(body: File.read("#{project_root}/fixtures/#{file}"),
code: 200,
headers: { 'Content-Type' =>
'text/html; charset=utf-8'} )
end
end
end
def app
Sinatra::Application
end
specs: added new classes, tests are still broken
# First include simplecov to track code coverage
require 'simplecov'
SimpleCov.start do
add_filter '/spec/'
end
require 'rspec'
require 'webmock/rspec'
require 'rack/test'
require 'sinatra'
require 'haml'
%w{scraper fixnum}.each do |file|
require File.join(File.dirname(__FILE__), '..', 'lib', 'eventkalender', "#{file}.rb")
end
%w{event meeting conference}.each do |file|
require File.join(File.dirname(__FILE__), '..', 'lib', 'eventkalender', 'event', "#{file}.rb")
end
%w{parser meetings conferences}.each do |file|
require File.join(File.dirname(__FILE__), '..', 'lib', 'eventkalender', 'parser', "#{file}.rb")
end
require File.join(File.dirname(__FILE__), '..', 'webapp.rb')
RSpec.configure do |config|
config.include Rack::Test::Methods
config.run_all_when_everything_filtered = true
config.filter_run :focus
config.color = true
config.order = 'random'
# Create fake webserver to send serve all request local
config.before(:each) do
project_root = File.expand_path('..', __FILE__)
fixtures = {
'http://c3voc.de/wiki/eventz' => 'events.htm',
'http://c3voc.de/wiki/meetingz' => 'meetings.htm'
}
fixtures.each do |url, file|
stub_request(:get, url).to_return(body: File.read("#{project_root}/fixtures/#{file}"),
code: 200,
headers: { 'Content-Type' =>
'text/html; charset=utf-8'} )
end
end
end
def app
Sinatra::Application
end
|
require "codeclimate-test-reporter"
CodeClimate::TestReporter.start
require 'oneview'
require 'webmock/rspec'
RSpec.configure do |config|
config.mock_with :rspec
end
Configuration to allow codeclimate to work with webmock
require "codeclimate-test-reporter"
CodeClimate::TestReporter.start
require 'oneview'
require 'webmock/rspec'
WebMock.disable_net_connect!(:allow => "codeclimate.com")
RSpec.configure do |config|
config.mock_with :rspec
end |
# frozen_string_literal: true
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), "..", "lib"))
$LOAD_PATH.unshift(File.dirname(__FILE__))
ENV["RAILS_ENV"] ||= "test"
require "rspec"
require "webmock/rspec"
begin
require "pry-byebug"
rescue LoadError # rubocop:disable Lint/HandleExceptions
end
require "timecop"
require "active_record"
require "sqlite3"
require "influxer"
# Rails stub
class Rails
class << self
def cache
@cache ||= ActiveSupport::Cache::MemoryStore.new
end
def logger
@logger ||= Logger.new(nil)
end
def env
"test"
end
end
end
require "influxer/rails/client"
ActiveRecord::Base.send :include, Influxer::Model
ActiveRecord::Base.establish_connection(adapter: "sqlite3", database: ":memory:")
Dir["#{File.dirname(__FILE__)}/support/metrics/*.rb"].sort.each { |f| require f }
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].sort.each { |f| require f }
WebMock.disable_net_connect!
RSpec.configure do |config|
config.mock_with :rspec
config.example_status_persistence_file_path = "tmp/rspec_examples.txt"
config.filter_run :focus
config.run_all_when_everything_filtered = true
config.order = :random
Kernel.srand config.seed
config.after(:each) { Influxer.reset! }
config.after(:each) { Timecop.return }
end
Raise on deprication
# frozen_string_literal: true
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), "..", "lib"))
$LOAD_PATH.unshift(File.dirname(__FILE__))
ENV["RAILS_ENV"] ||= "test"
require "rspec"
require "webmock/rspec"
begin
require "pry-byebug"
rescue LoadError # rubocop:disable Lint/HandleExceptions
end
require "timecop"
require "active_record"
require "sqlite3"
ActiveSupport::Deprecation.behavior = :raise
require "influxer"
# Rails stub
class Rails
class << self
def cache
@cache ||= ActiveSupport::Cache::MemoryStore.new
end
def logger
@logger ||= Logger.new(nil)
end
def env
"test"
end
end
end
require "influxer/rails/client"
ActiveRecord::Base.send :include, Influxer::Model
ActiveRecord::Base.establish_connection(adapter: "sqlite3", database: ":memory:")
Dir["#{File.dirname(__FILE__)}/support/metrics/*.rb"].sort.each { |f| require f }
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].sort.each { |f| require f }
WebMock.disable_net_connect!
RSpec.configure do |config|
config.mock_with :rspec
config.example_status_persistence_file_path = "tmp/rspec_examples.txt"
config.filter_run :focus
config.run_all_when_everything_filtered = true
config.order = :random
Kernel.srand config.seed
config.after(:each) { Influxer.reset! }
config.after(:each) { Timecop.return }
end
|
# frozen_string_literal: true
$VERBOSE = nil unless ENV['RUBY_LOUD'] # silence loud Ruby 2.7 deprecations
ENV['RAILS_ENV'] = 'test'
ENV['DATABASE_URL'] = ENV['DATABASE_TEST_URL'] if ENV['DATABASE_TEST_URL']
require "bundler/setup"
def ci_build?
ENV['CI']
end
require 'simplecov'
SimpleCov.root(File.expand_path('../..', __FILE__))
SimpleCov.start('rails') do
add_filter '/.internal_test_app'
add_filter '/lib/generators'
add_filter '/spec'
add_filter '/tasks'
add_filter '/lib/hyrax/version.rb'
add_filter '/lib/hyrax/engine.rb'
end
require 'factory_bot'
if ENV['IN_DOCKER']
require File.expand_path("config/environment", '../hyrax-webapp')
db_config = ActiveRecord::Base.configurations[ENV['RAILS_ENV']]
ActiveRecord::Tasks::DatabaseTasks.create(db_config)
ActiveRecord::Migrator.migrations_paths = [Pathname.new(ENV['RAILS_ROOT']).join('db', 'migrate').to_s]
ActiveRecord::Tasks::DatabaseTasks.migrate
ActiveRecord::Base.descendants.each(&:reset_column_information)
else
require 'engine_cart'
EngineCart.load_application!
end
ActiveRecord::Migration.maintain_test_schema!
require 'active_fedora/cleaner'
require 'devise'
require 'devise/version'
require 'mida'
require 'rails-controller-testing'
require 'rspec/rails'
require 'rspec/its'
require 'rspec/matchers'
require 'rspec/active_model/mocks'
require 'equivalent-xml'
require 'equivalent-xml/rspec_matchers'
require 'database_cleaner'
require 'hyrax/specs/capybara'
require 'hyrax/specs/clamav'
require 'hyrax/specs/engine_routes'
# ensure Hyrax::Schema gets loaded is resolvable for `support/` models
Hyrax::Schema # rubocop:disable Lint/Void
Valkyrie::MetadataAdapter
.register(Valkyrie::Persistence::Memory::MetadataAdapter.new, :test_adapter)
# Require supporting ruby files from spec/support/ and subdirectories. Note: engine, not Rails.root context.
Dir[File.join(File.dirname(__FILE__), "support/**/*.rb")].each { |f| require f }
require 'webmock/rspec'
allowed_hosts = %w[chrome chromedriver.storage.googleapis.com fcrepo solr]
WebMock.disable_net_connect!(allow_localhost: true, allow: allowed_hosts)
require 'i18n/debug' if ENV['I18N_DEBUG']
require 'byebug' unless ci_build?
require 'hyrax/specs/shared_specs/factories/strategies/json_strategy'
require 'hyrax/specs/shared_specs/factories/strategies/valkyrie_resource'
FactoryBot.register_strategy(:valkyrie_create, ValkyrieCreateStrategy)
FactoryBot.register_strategy(:create_using_test_adapter, ValkyrieTestAdapterCreateStrategy)
FactoryBot.register_strategy(:json, JsonStrategy)
FactoryBot.definition_file_paths = [File.expand_path("../factories", __FILE__)]
FactoryBot.find_definitions
require 'shoulda/matchers'
require 'shoulda/callback/matchers'
Shoulda::Matchers.configure do |config|
config.integrate do |with|
with.test_framework :rspec
with.library :rails
end
end
query_registration_target =
Valkyrie::MetadataAdapter.find(:test_adapter).query_service.custom_queries
[Hyrax::CustomQueries::Navigators::CollectionMembers,
Hyrax::CustomQueries::Navigators::ChildWorksNavigator,
Hyrax::CustomQueries::FindAccessControl,
Hyrax::CustomQueries::FindCollectionsByType,
Hyrax::CustomQueries::FindManyByAlternateIds,
Hyrax::CustomQueries::FindFileMetadata,
Hyrax::CustomQueries::Navigators::FindFiles].each do |handler|
query_registration_target.register_query_handler(handler)
end
ActiveJob::Base.queue_adapter = :test
def clean_active_fedora_repository
ActiveFedora::Cleaner.clean!
# The JS is executed in a different thread, so that other thread
# may think the root path has already been created:
ActiveFedora.fedora.connection.send(:init_base_path)
end
RSpec.configure do |config|
config.disable_monkey_patching!
config.include Shoulda::Matchers::ActiveRecord, type: :model
config.include Shoulda::Matchers::ActiveModel, type: :form
config.include Shoulda::Callback::Matchers::ActiveModel
config.include Hyrax::Matchers
config.full_backtrace = true if ci_build?
config.expect_with :rspec do |c|
c.syntax = :expect
end
config.fixture_path = File.expand_path("../fixtures", __FILE__)
config.use_transactional_fixtures = false
config.before :suite do
DatabaseCleaner.clean_with(:truncation)
# Noid minting causes extra LDP requests which slow the test suite.
Hyrax.config.enable_noids = false
# Don't use the nested relationship reindexer. Null is much faster
Hyrax.config.nested_relationship_reindexer = ->(id:, extent:) {}
end
config.before do |example|
# Many of the specs push data into Redis. This ensures that we
# have a clean slate when processing. It is possible that we
# could narrow the call for this method to be done for clean_repo
# or feature specs.
Hyrax::RedisEventStore.instance.redis.flushdb
if example.metadata[:type] == :feature && Capybara.current_driver != :rack_test
DatabaseCleaner.strategy = :truncation
else
DatabaseCleaner.strategy = :transaction
DatabaseCleaner.start
end
# using :workflow is preferable to :clean_repo, use the former if possible
# It's important that this comes after DatabaseCleaner.start
ensure_deposit_available_for(user) if example.metadata[:workflow]
end
config.include(ControllerLevelHelpers, type: :view)
config.before(:each, type: :view) do
initialize_controller_helpers(view)
# disallow network connections to services within the stack for view specs;
# no db/metadata/index calls
WebMock.disable_net_connect!(allow_localhost: false, allow: 'chromedriver.storage.googleapis.com')
allow(Hyrax)
.to receive(:metadata_adapter)
.and_return(Valkyrie::MetadataAdapter.find(:test_adapter))
end
config.after(:each, type: :view) do
WebMock.disable_net_connect!(allow_localhost: true, allow: allowed_hosts)
end
config.before(:all, type: :feature) do
# Assets take a long time to compile. This causes two problems:
# 1) the profile will show the first feature test taking much longer than it
# normally would.
# 2) The first feature test will trigger rack-timeout
#
# Precompile the assets to prevent these issues.
visit "/assets/application.css"
visit "/assets/application.js"
end
config.after do
DatabaseCleaner.clean
# Ensuring we have a clear queue between each spec.
ActiveJob::Base.queue_adapter.enqueued_jobs = []
ActiveJob::Base.queue_adapter.performed_jobs = []
end
# If true, the base class of anonymous controllers will be inferred
# automatically. This will be the default behavior in future versions of
# rspec-rails.
config.infer_base_class_for_anonymous_controllers = false
config.include Shoulda::Matchers::Independent
config.include Devise::Test::ControllerHelpers, type: :controller
config.include EngineRoutes, type: :controller
config.include Warden::Test::Helpers, type: :request
config.include Warden::Test::Helpers, type: :feature
config.before(:each, type: :feature) do |example|
clean_active_fedora_repository unless
example.metadata[:clean_repo] # trust clean_repo if present
end
config.after(:each, type: :feature) do
Warden.test_reset!
Capybara.reset_sessions!
page.driver.reset!
end
config.include Capybara::RSpecMatchers, type: :input
config.include InputSupport, type: :input
config.include FactoryBot::Syntax::Methods
config.include OptionalExample
config.infer_spec_type_from_file_location!
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.formatter = 'LoggingFormatter'
config.default_formatter = 'doc' if config.files_to_run.one?
config.order = :random
Kernel.srand config.seed
config.shared_context_metadata_behavior = :apply_to_host_groups
config.filter_run_when_matching :focus
config.example_status_persistence_file_path = 'spec/examples.txt'
config.profile_examples = 10
config.before(:example, :clean_repo) do
clean_active_fedora_repository
end
# Use this example metadata when you want to perform jobs inline during testing.
#
# describe '#my_method`, :perform_enqueued do
# ...
# end
#
# If you pass an `Array` of job classes, they will be treated as the filter list.
#
# describe '#my_method`, perform_enqueued: [MyJobClass] do
# ...
# end
#
# Limit to specific job classes with:
#
# ActiveJob::Base.queue_adapter.filter = [JobClass]
#
config.around(:example, :perform_enqueued) do |example|
ActiveJob::Base.queue_adapter.filter =
example.metadata[:perform_enqueued].try(:to_a)
ActiveJob::Base.queue_adapter.perform_enqueued_jobs = true
ActiveJob::Base.queue_adapter.perform_enqueued_at_jobs = true
example.run
ActiveJob::Base.queue_adapter.filter = nil
ActiveJob::Base.queue_adapter.perform_enqueued_jobs = false
ActiveJob::Base.queue_adapter.perform_enqueued_at_jobs = false
end
config.before(:example, :valkyrie_adapter) do |example|
adapter_name = example.metadata[:valkyrie_adapter]
allow(Hyrax)
.to receive(:metadata_adapter)
.and_return(Valkyrie::MetadataAdapter.find(adapter_name))
end
# turn on the default nested reindexer; we use a null implementation for most
# tests because it's (supposedly?) much faster. why is it faster but doesn't
# impact most tests? maybe we should fix this in the implementation instead?
config.around(:example, :with_nested_reindexing) do |example|
original_indexer = Hyrax.config.nested_relationship_reindexer
Hyrax.config.nested_relationship_reindexer =
Hyrax.config.default_nested_relationship_reindexer
example.run
Hyrax.config.nested_relationship_reindexer = original_indexer
end
end
don't clean redis quite so aggressively in test
the test environment really doesn't need to clean redis so much. it's probably
costing us a bunch of time. skip it.
# frozen_string_literal: true
$VERBOSE = nil unless ENV['RUBY_LOUD'] # silence loud Ruby 2.7 deprecations
ENV['RAILS_ENV'] = 'test'
ENV['DATABASE_URL'] = ENV['DATABASE_TEST_URL'] if ENV['DATABASE_TEST_URL']
require "bundler/setup"
def ci_build?
ENV['CI']
end
require 'simplecov'
SimpleCov.root(File.expand_path('../..', __FILE__))
SimpleCov.start('rails') do
add_filter '/.internal_test_app'
add_filter '/lib/generators'
add_filter '/spec'
add_filter '/tasks'
add_filter '/lib/hyrax/version.rb'
add_filter '/lib/hyrax/engine.rb'
end
require 'factory_bot'
if ENV['IN_DOCKER']
require File.expand_path("config/environment", '../hyrax-webapp')
db_config = ActiveRecord::Base.configurations[ENV['RAILS_ENV']]
ActiveRecord::Tasks::DatabaseTasks.create(db_config)
ActiveRecord::Migrator.migrations_paths = [Pathname.new(ENV['RAILS_ROOT']).join('db', 'migrate').to_s]
ActiveRecord::Tasks::DatabaseTasks.migrate
ActiveRecord::Base.descendants.each(&:reset_column_information)
else
require 'engine_cart'
EngineCart.load_application!
end
ActiveRecord::Migration.maintain_test_schema!
require 'active_fedora/cleaner'
require 'devise'
require 'devise/version'
require 'mida'
require 'rails-controller-testing'
require 'rspec/rails'
require 'rspec/its'
require 'rspec/matchers'
require 'rspec/active_model/mocks'
require 'equivalent-xml'
require 'equivalent-xml/rspec_matchers'
require 'database_cleaner'
require 'hyrax/specs/capybara'
require 'hyrax/specs/clamav'
require 'hyrax/specs/engine_routes'
# ensure Hyrax::Schema gets loaded is resolvable for `support/` models
Hyrax::Schema # rubocop:disable Lint/Void
Valkyrie::MetadataAdapter
.register(Valkyrie::Persistence::Memory::MetadataAdapter.new, :test_adapter)
# Require supporting ruby files from spec/support/ and subdirectories. Note: engine, not Rails.root context.
Dir[File.join(File.dirname(__FILE__), "support/**/*.rb")].each { |f| require f }
require 'webmock/rspec'
allowed_hosts = %w[chrome chromedriver.storage.googleapis.com fcrepo solr]
WebMock.disable_net_connect!(allow_localhost: true, allow: allowed_hosts)
require 'i18n/debug' if ENV['I18N_DEBUG']
require 'byebug' unless ci_build?
require 'hyrax/specs/shared_specs/factories/strategies/json_strategy'
require 'hyrax/specs/shared_specs/factories/strategies/valkyrie_resource'
FactoryBot.register_strategy(:valkyrie_create, ValkyrieCreateStrategy)
FactoryBot.register_strategy(:create_using_test_adapter, ValkyrieTestAdapterCreateStrategy)
FactoryBot.register_strategy(:json, JsonStrategy)
FactoryBot.definition_file_paths = [File.expand_path("../factories", __FILE__)]
FactoryBot.find_definitions
require 'shoulda/matchers'
require 'shoulda/callback/matchers'
Shoulda::Matchers.configure do |config|
config.integrate do |with|
with.test_framework :rspec
with.library :rails
end
end
query_registration_target =
Valkyrie::MetadataAdapter.find(:test_adapter).query_service.custom_queries
[Hyrax::CustomQueries::Navigators::CollectionMembers,
Hyrax::CustomQueries::Navigators::ChildWorksNavigator,
Hyrax::CustomQueries::FindAccessControl,
Hyrax::CustomQueries::FindCollectionsByType,
Hyrax::CustomQueries::FindManyByAlternateIds,
Hyrax::CustomQueries::FindFileMetadata,
Hyrax::CustomQueries::Navigators::FindFiles].each do |handler|
query_registration_target.register_query_handler(handler)
end
ActiveJob::Base.queue_adapter = :test
def clean_active_fedora_repository
ActiveFedora::Cleaner.clean!
# The JS is executed in a different thread, so that other thread
# may think the root path has already been created:
ActiveFedora.fedora.connection.send(:init_base_path)
end
RSpec.configure do |config|
config.disable_monkey_patching!
config.include Shoulda::Matchers::ActiveRecord, type: :model
config.include Shoulda::Matchers::ActiveModel, type: :form
config.include Shoulda::Callback::Matchers::ActiveModel
config.include Hyrax::Matchers
config.full_backtrace = true if ci_build?
config.expect_with :rspec do |c|
c.syntax = :expect
end
config.fixture_path = File.expand_path("../fixtures", __FILE__)
config.use_transactional_fixtures = false
config.before :suite do
Hyrax::RedisEventStore.instance.redis.flushdb
DatabaseCleaner.clean_with(:truncation)
# Noid minting causes extra LDP requests which slow the test suite.
Hyrax.config.enable_noids = false
# Don't use the nested relationship reindexer. Null is much faster
Hyrax.config.nested_relationship_reindexer = ->(id:, extent:) {}
end
config.before do |example|
if example.metadata[:type] == :feature && Capybara.current_driver != :rack_test
DatabaseCleaner.strategy = :truncation
else
DatabaseCleaner.strategy = :transaction
DatabaseCleaner.start
end
# using :workflow is preferable to :clean_repo, use the former if possible
# It's important that this comes after DatabaseCleaner.start
ensure_deposit_available_for(user) if example.metadata[:workflow]
end
config.include(ControllerLevelHelpers, type: :view)
config.before(:each, type: :view) do
initialize_controller_helpers(view)
# disallow network connections to services within the stack for view specs;
# no db/metadata/index calls
WebMock.disable_net_connect!(allow_localhost: false, allow: 'chromedriver.storage.googleapis.com')
allow(Hyrax)
.to receive(:metadata_adapter)
.and_return(Valkyrie::MetadataAdapter.find(:test_adapter))
end
config.after(:each, type: :view) do
WebMock.disable_net_connect!(allow_localhost: true, allow: allowed_hosts)
end
config.before(:all, type: :feature) do
# Assets take a long time to compile. This causes two problems:
# 1) the profile will show the first feature test taking much longer than it
# normally would.
# 2) The first feature test will trigger rack-timeout
#
# Precompile the assets to prevent these issues.
visit "/assets/application.css"
visit "/assets/application.js"
end
config.after do
DatabaseCleaner.clean
# Ensuring we have a clear queue between each spec.
ActiveJob::Base.queue_adapter.enqueued_jobs = []
ActiveJob::Base.queue_adapter.performed_jobs = []
end
# If true, the base class of anonymous controllers will be inferred
# automatically. This will be the default behavior in future versions of
# rspec-rails.
config.infer_base_class_for_anonymous_controllers = false
config.include Shoulda::Matchers::Independent
config.include Devise::Test::ControllerHelpers, type: :controller
config.include EngineRoutes, type: :controller
config.include Warden::Test::Helpers, type: :request
config.include Warden::Test::Helpers, type: :feature
config.before(:each, type: :feature) do |example|
clean_active_fedora_repository unless
example.metadata[:clean_repo] # trust clean_repo if present
end
config.after(:each, type: :feature) do
Warden.test_reset!
Capybara.reset_sessions!
page.driver.reset!
end
config.include Capybara::RSpecMatchers, type: :input
config.include InputSupport, type: :input
config.include FactoryBot::Syntax::Methods
config.include OptionalExample
config.infer_spec_type_from_file_location!
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.formatter = 'LoggingFormatter'
config.default_formatter = 'doc' if config.files_to_run.one?
config.order = :random
Kernel.srand config.seed
config.shared_context_metadata_behavior = :apply_to_host_groups
config.filter_run_when_matching :focus
config.example_status_persistence_file_path = 'spec/examples.txt'
config.profile_examples = 10
config.before(:example, :clean_repo) do
clean_active_fedora_repository
Hyrax::RedisEventStore.instance.redis.flushdb
end
# Use this example metadata when you want to perform jobs inline during testing.
#
# describe '#my_method`, :perform_enqueued do
# ...
# end
#
# If you pass an `Array` of job classes, they will be treated as the filter list.
#
# describe '#my_method`, perform_enqueued: [MyJobClass] do
# ...
# end
#
# Limit to specific job classes with:
#
# ActiveJob::Base.queue_adapter.filter = [JobClass]
#
config.around(:example, :perform_enqueued) do |example|
ActiveJob::Base.queue_adapter.filter =
example.metadata[:perform_enqueued].try(:to_a)
ActiveJob::Base.queue_adapter.perform_enqueued_jobs = true
ActiveJob::Base.queue_adapter.perform_enqueued_at_jobs = true
example.run
ActiveJob::Base.queue_adapter.filter = nil
ActiveJob::Base.queue_adapter.perform_enqueued_jobs = false
ActiveJob::Base.queue_adapter.perform_enqueued_at_jobs = false
end
config.before(:example, :valkyrie_adapter) do |example|
adapter_name = example.metadata[:valkyrie_adapter]
allow(Hyrax)
.to receive(:metadata_adapter)
.and_return(Valkyrie::MetadataAdapter.find(adapter_name))
end
# turn on the default nested reindexer; we use a null implementation for most
# tests because it's (supposedly?) much faster. why is it faster but doesn't
# impact most tests? maybe we should fix this in the implementation instead?
config.around(:example, :with_nested_reindexing) do |example|
original_indexer = Hyrax.config.nested_relationship_reindexer
Hyrax.config.nested_relationship_reindexer =
Hyrax.config.default_nested_relationship_reindexer
example.run
Hyrax.config.nested_relationship_reindexer = original_indexer
end
end
|
require 'factory_girl_rails'
require 'database_cleaner'
require "email_spec"
Dir["./spec/support/**/*.rb"].sort.each { |f| require f}
RSpec.configure do |config|
config.use_transactional_fixtures = false
config.filter_run :focus
config.run_all_when_everything_filtered = true
config.include FactoryGirl::Syntax::Methods
config.include(EmailSpec::Helpers)
config.include(EmailSpec::Matchers)
config.include(CommonActions)
config.before(:suite) do
DatabaseCleaner.clean_with :truncation
end
config.before(:each) do |example|
DatabaseCleaner.strategy= example.metadata[:js] ? :truncation : :transaction
DatabaseCleaner.start
end
config.after(:each) do
DatabaseCleaner.clean
end
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options.
config.example_status_persistence_file_path = "spec/examples.txt"
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
end
Include devise helpers in controller specs
require 'factory_girl_rails'
require 'database_cleaner'
require "email_spec"
require 'devise'
Dir["./spec/support/**/*.rb"].sort.each { |f| require f}
RSpec.configure do |config|
config.use_transactional_fixtures = false
config.filter_run :focus
config.run_all_when_everything_filtered = true
config.include Devise::TestHelpers, :type => :controller
config.include FactoryGirl::Syntax::Methods
config.include(EmailSpec::Helpers)
config.include(EmailSpec::Matchers)
config.include(CommonActions)
config.before(:suite) do
DatabaseCleaner.clean_with :truncation
end
config.before(:each) do |example|
DatabaseCleaner.strategy= example.metadata[:js] ? :truncation : :transaction
DatabaseCleaner.start
end
config.after(:each) do
DatabaseCleaner.clean
end
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options.
config.example_status_persistence_file_path = "spec/examples.txt"
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
end
|
require 'geometry'
require 'robot'
require 'dispatcher'
require 'board'
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# # This allows you to limit a spec run to individual examples or groups
# # you care about by tagging them with `:focus` metadata. When nothing
# # is tagged with `:focus`, all examples get run. RSpec also provides
# # aliases for `it`, `describe`, and `context` that include `:focus`
# # metadata: `fit`, `fdescribe` and `fcontext`, respectively.
# config.filter_run_when_matching :focus
#
# # Allows RSpec to persist some state between runs in order to support
# # the `--only-failures` and `--next-failure` CLI options. We recommend
# # you configure your source control system to ignore this file.
# config.example_status_persistence_file_path = "spec/examples.txt"
#
# # Limits the available syntax to the non-monkey patched syntax that is
# # recommended. For more details, see:
# # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
# config.disable_monkey_patching!
#
# # This setting enables warnings. It's recommended, but in some cases may
# # be too noisy due to issues in dependencies.
# config.warnings = true
#
# # Many RSpec users commonly either run the entire suite or an individual
# # file, and it's useful to allow more verbose output when running an
# # individual spec file.
# if config.files_to_run.one?
# # Use the documentation formatter for detailed output,
# # unless a formatter has already been configured
# # (e.g. via a command-line flag).
# config.default_formatter = 'doc'
# end
#
# # Print the 10 slowest examples and example groups at the
# # end of the spec run, to help surface which specs are running
# # particularly slow.
# config.profile_examples = 10
#
# # Run specs in random order to surface order dependencies. If you find an
# # order dependency and want to debug it, you can fix the order by providing
# # the seed, which is printed after each run.
# # --seed 1234
# config.order = :random
#
# # Seed global randomization in this process using the `--seed` CLI option.
# # Setting this allows you to use `--seed` to deterministically reproduce
# # test failures related to randomization by passing the same `--seed` value
# # as the one that triggered the failure.
# Kernel.srand config.seed
end
chore(simplecov): enable for rspec
require 'simplecov'
SimpleCov.start
require 'geometry'
require 'robot'
require 'dispatcher'
require 'board'
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# # This allows you to limit a spec run to individual examples or groups
# # you care about by tagging them with `:focus` metadata. When nothing
# # is tagged with `:focus`, all examples get run. RSpec also provides
# # aliases for `it`, `describe`, and `context` that include `:focus`
# # metadata: `fit`, `fdescribe` and `fcontext`, respectively.
# config.filter_run_when_matching :focus
#
# # Allows RSpec to persist some state between runs in order to support
# # the `--only-failures` and `--next-failure` CLI options. We recommend
# # you configure your source control system to ignore this file.
# config.example_status_persistence_file_path = "spec/examples.txt"
#
# # Limits the available syntax to the non-monkey patched syntax that is
# # recommended. For more details, see:
# # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
# config.disable_monkey_patching!
#
# # This setting enables warnings. It's recommended, but in some cases may
# # be too noisy due to issues in dependencies.
# config.warnings = true
#
# # Many RSpec users commonly either run the entire suite or an individual
# # file, and it's useful to allow more verbose output when running an
# # individual spec file.
# if config.files_to_run.one?
# # Use the documentation formatter for detailed output,
# # unless a formatter has already been configured
# # (e.g. via a command-line flag).
# config.default_formatter = 'doc'
# end
#
# # Print the 10 slowest examples and example groups at the
# # end of the spec run, to help surface which specs are running
# # particularly slow.
# config.profile_examples = 10
#
# # Run specs in random order to surface order dependencies. If you find an
# # order dependency and want to debug it, you can fix the order by providing
# # the seed, which is printed after each run.
# # --seed 1234
# config.order = :random
#
# # Seed global randomization in this process using the `--seed` CLI option.
# # Setting this allows you to use `--seed` to deterministically reproduce
# # test failures related to randomization by passing the same `--seed` value
# # as the one that triggered the failure.
# Kernel.srand config.seed
end
|
require 'simplecov'
require 'simplecov-gem-adapter'
require 'simplecov-rcov'
SimpleCov.formatter = SimpleCov::Formatter::RcovFormatter
SimpleCov.start 'gem' if ENV['COVERAGE']
require 'awesome_print'
require 'pathname'
require 'douglas/ne/checks'
RSpec.configure do |config|
config.treat_symbols_as_metadata_keys_with_true_values = true
config.run_all_when_everything_filtered = true
config.filter_run :focus
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = 'random'
end
# root from spec/data
def data_file(name)
File.expand_path("#{File.dirname(__FILE__)}/data/#{name}")
end
def data_files
dir = "#{File.dirname(__FILE__)}/data/#{dir}"
Dir.glob("#{dir}/*").select { |e| File.file? e }
end
Adding support/ to spec_helper for shared examples
require 'simplecov'
require 'simplecov-gem-adapter'
require 'simplecov-rcov'
SimpleCov.formatter = SimpleCov::Formatter::RcovFormatter
SimpleCov.start 'gem' if ENV['COVERAGE']
require 'awesome_print'
require 'pathname'
require 'douglas/ne/checks'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each { |f| require f }
RSpec.configure do |config|
config.treat_symbols_as_metadata_keys_with_true_values = true
config.run_all_when_everything_filtered = true
config.filter_run :focus
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = 'random'
end
# root from spec/data
def data_file(name)
File.expand_path("#{File.dirname(__FILE__)}/data/#{name}")
end
def data_files
dir = "#{File.dirname(__FILE__)}/data/#{dir}"
Dir.glob("#{dir}/*").select { |e| File.file? e }
end
|
require 'coveralls'
Coveralls.wear!
RSpec.configure do |config|
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
config.filter_run :focus
config.run_all_when_everything_filtered = true
config.disable_monkey_patching!
# config.warnings = true
config.default_formatter = 'doc' if config.files_to_run.one?
# config.profile_examples = 10
config.order = :random
Kernel.srand config.seed
config.before do
allow(ENV).to receive(:[]) do |key|
fail "stub me: ENV[#{key.inspect}]"
end
allow(ENV).to receive(:[]=) do |key, value|
fail "stub me: ENV[#{key.inspect}] = #{value.inspect}"
end
end
config.after do
begin
Nenv.method(:reset).call
rescue NameError
end
end
end
unstub some ENV values for Pry to work
require 'coveralls'
Coveralls.wear!
RSpec.configure do |config|
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
config.filter_run :focus
config.run_all_when_everything_filtered = true
config.disable_monkey_patching!
# config.warnings = true
config.default_formatter = 'doc' if config.files_to_run.one?
# config.profile_examples = 10
config.order = :random
Kernel.srand config.seed
config.before do
allow(ENV).to receive(:[]) do |key|
fail "stub me: ENV[#{key.inspect}]"
end
allow(ENV).to receive(:[]=) do |key, value|
fail "stub me: ENV[#{key.inspect}] = #{value.inspect}"
end
allow(ENV).to receive(:[]).with('PRYRC').and_call_original
allow(ENV).to receive(:[]).with('DISABLE_PRY').and_call_original
allow(ENV).to receive(:[]).with('ANSICON').and_call_original
allow(ENV).to receive(:[]).with('TERM').and_call_original
end
config.after do
begin
Nenv.method(:reset).call
rescue NameError
end
end
end
|
# Configure Rails Envinronment
ENV["RAILS_ENV"] = "test"
require 'simplecov'
SimpleCov.start do
add_filter '/config/'
add_group 'Controllers', 'app/controllers'
add_group 'Models', 'app/models'
add_group 'Helpers', 'app/helpers'
add_group 'Libraries', 'lib'
add_group 'Specs', 'spec'
end
require File.expand_path('../dummy_app/config/environment', __FILE__)
require 'generator_spec/test_case'
require 'generators/rails_admin/install_migrations_generator'
require File.dirname(__FILE__) + '/../lib/rails_admin/tasks/install'
require File.dirname(__FILE__) + '/../lib/rails_admin/tasks/uninstall'
require 'generators/rails_admin/uninstall_migrations_generator'
require 'rspec/rails'
require 'factory_girl'
require 'factories'
require 'database_helpers'
require 'generator_helpers'
ActionMailer::Base.delivery_method = :test
ActionMailer::Base.perform_deliveries = true
ActionMailer::Base.default_url_options[:host] = "example.com"
Rails.backtrace_cleaner.remove_silencers!
include DatabaseHelpers
# Run any available migration
puts 'Setting up database...'
drop_all_tables
migrate_database
# Load support files
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each{|f| require f}
# Don't need passwords in test DB to be secure, but we would like 'em to be
# fast -- and the stretches mechanism is intended to make passwords
# computationally expensive.
module Devise
module Models
module DatabaseAuthenticatable
protected
def password_digest(password)
password
end
end
end
end
Devise.setup do |config|
config.stretches = 0
end
RSpec.configure do |config|
require 'rspec/expectations'
config.include RSpec::Matchers
config.include DatabaseHelpers
config.include GeneratorHelpers
config.include RailsAdmin::Engine.routes.url_helpers
config.include Warden::Test::Helpers
config.before(:each) do
RailsAdmin.setup
RailsAdmin::Config.excluded_models = [RelTest, FieldTest]
RailsAdmin::AbstractModel.all_models = nil
RailsAdmin::AbstractModel.all_abstract_models = nil
RailsAdmin::AbstractModel.new("Division").destroy_all!
RailsAdmin::AbstractModel.new("Draft").destroy_all!
RailsAdmin::AbstractModel.new("Fan").destroy_all!
RailsAdmin::AbstractModel.new("League").destroy_all!
RailsAdmin::AbstractModel.new("Player").destroy_all!
RailsAdmin::AbstractModel.new("Team").destroy_all!
RailsAdmin::AbstractModel.new("User").destroy_all!
RailsAdmin::History.destroy_all
user = RailsAdmin::AbstractModel.new("User").create(
:email => "username@example.com",
:password => "password"
)
login_as user
end
config.after(:each) do
RailsAdmin.test_reset!
Warden.test_reset!
end
end
Use SimpleCov's built-in Rails adapter
# Configure Rails Envinronment
ENV["RAILS_ENV"] = "test"
require 'simplecov'
SimpleCov.start 'rails'
require File.expand_path('../dummy_app/config/environment', __FILE__)
require 'generator_spec/test_case'
require 'generators/rails_admin/install_migrations_generator'
require File.dirname(__FILE__) + '/../lib/rails_admin/tasks/install'
require File.dirname(__FILE__) + '/../lib/rails_admin/tasks/uninstall'
require 'generators/rails_admin/uninstall_migrations_generator'
require 'rspec/rails'
require 'factory_girl'
require 'factories'
require 'database_helpers'
require 'generator_helpers'
ActionMailer::Base.delivery_method = :test
ActionMailer::Base.perform_deliveries = true
ActionMailer::Base.default_url_options[:host] = "example.com"
Rails.backtrace_cleaner.remove_silencers!
include DatabaseHelpers
# Run any available migration
puts 'Setting up database...'
drop_all_tables
migrate_database
# Load support files
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each{|f| require f}
# Don't need passwords in test DB to be secure, but we would like 'em to be
# fast -- and the stretches mechanism is intended to make passwords
# computationally expensive.
module Devise
module Models
module DatabaseAuthenticatable
protected
def password_digest(password)
password
end
end
end
end
Devise.setup do |config|
config.stretches = 0
end
RSpec.configure do |config|
require 'rspec/expectations'
config.include RSpec::Matchers
config.include DatabaseHelpers
config.include GeneratorHelpers
config.include RailsAdmin::Engine.routes.url_helpers
config.include Warden::Test::Helpers
config.before(:each) do
RailsAdmin.setup
RailsAdmin::Config.excluded_models = [RelTest, FieldTest]
RailsAdmin::AbstractModel.all_models = nil
RailsAdmin::AbstractModel.all_abstract_models = nil
RailsAdmin::AbstractModel.new("Division").destroy_all!
RailsAdmin::AbstractModel.new("Draft").destroy_all!
RailsAdmin::AbstractModel.new("Fan").destroy_all!
RailsAdmin::AbstractModel.new("League").destroy_all!
RailsAdmin::AbstractModel.new("Player").destroy_all!
RailsAdmin::AbstractModel.new("Team").destroy_all!
RailsAdmin::AbstractModel.new("User").destroy_all!
RailsAdmin::History.destroy_all
user = RailsAdmin::AbstractModel.new("User").create(
:email => "username@example.com",
:password => "password"
)
login_as user
end
config.after(:each) do
RailsAdmin.test_reset!
Warden.test_reset!
end
end
|
# -*- encoding : utf-8 -*-
require 'rubygems'
require 'spork'
#uncomment the following line to use spork with the debugger
#require 'spork/ext/ruby-debug'
require 'simplecov'
require 'coveralls'
# Generate coverage locally in html as well as in coveralls.io
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start('rails') do
add_filter 'commonlib'
add_filter 'vendor/plugins'
add_filter 'lib/attachment_to_html'
add_filter 'lib/has_tag_string'
add_filter 'lib/acts_as_xapian'
add_filter 'lib/themes'
add_filter '.bundle'
end
Spork.prefork do
# Loading more in this block will cause your tests to run faster. However,
# if you change any configuration or code from libraries loaded here, you'll
# need to restart spork for it take effect.
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join("spec/support/**/*.rb")].each {|f| require f}
# Use test-specific translations
AlaveteliLocalization.set_default_text_domain('app', File.join(File.dirname(__FILE__), 'fixtures', 'locale'))
RSpec.configure do |config|
# ## Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
config.infer_spec_type_from_file_location!
config.include Capybara::DSL, :type => :request
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# The order (!) of this is important thanks to foreign keys
config.global_fixtures = :users,
:public_bodies,
:public_body_translations,
:public_body_versions,
:info_requests,
:raw_emails,
:incoming_messages,
:outgoing_messages,
:comments,
:info_request_events,
:track_things,
:has_tag_string_tags,
:holidays
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# If true, the base class of anonymous controllers will be inferred
# automatically. This will be the default behavior in future versions of
# rspec-rails.
config.infer_base_class_for_anonymous_controllers = false
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = "random"
# This is a workaround for a strange thing where ActionMailer::Base.deliveries isn't being
# cleared out correctly in controller specs. So, do it here for everything.
config.before(:each) do
ActionMailer::Base.deliveries = []
end
# Any test that messes with the locale needs to restore the state afterwards so that it
# doesn't interfere with any subsequent tests. This is made more complicated by the
# ApplicationController#set_gettext_locale which sets the locale and so you may be setting
# the locale in your tests and not even realising it. So, let's make things easier for
# ourselves and just always restore the locale for all tests.
config.after(:each) do
AlaveteliLocalization.set_locales(AlaveteliConfiguration::available_locales,
AlaveteliConfiguration::default_locale)
end
# Turn routing-filter off in functional and unit tests as per
# https://github.com/svenfuchs/routing-filter/blob/master/README.markdown#testing
config.before(:each) do |example|
RoutingFilter.active = false if [:controller, :helper, :model].include? example.metadata[:type]
end
config.after(:each) do |example|
RoutingFilter.active = true if [:controller, :helper, :model].include? example.metadata[:type]
end
# This section makes the garbage collector run less often to speed up tests
last_gc_run = Time.now
config.before(:each) do
GC.disable
end
config.after(:each) do
if Time.now - last_gc_run > 4
GC.enable
GC.start
last_gc_run = Time.now
end
end
end
# Use the before create job hook to simulate a race condition with
# another process by creating an acts_as_xapian_job record for the
# same model:
def with_duplicate_xapian_job_creation
InfoRequestEvent.module_eval do
def xapian_before_create_job_hook(action, model, model_id)
ActsAsXapian::ActsAsXapianJob.create!(:model => model,
:model_id => model_id,
:action => action)
end
end
yield
ensure
InfoRequestEvent.module_eval do
def xapian_before_create_job_hook(action, model, model_id)
end
end
end
def with_env_tz(new_tz = 'US/Eastern')
old_tz, ENV['TZ'] = ENV['TZ'], new_tz
yield
ensure
old_tz ? ENV['TZ'] = old_tz : ENV.delete('TZ')
end
def with_active_record_default_timezone(zone)
old_zone, ActiveRecord::Base.default_timezone = ActiveRecord::Base.default_timezone, zone
yield
ensure
ActiveRecord::Base.default_timezone = old_zone
end
# To test the statistics calculations, it's helpful to have the
# request fixtures in different states, but changing the fixtures
# themselves disrupts many other tests. This function takes a
# block, and runs that block with the info requests for the
# Geraldine Quango altered so that one is hidden and there's a
# successful one.
def with_hidden_and_successful_requests
external = info_requests(:external_request)
chicken = info_requests(:naughty_chicken_request)
old_external_prominence = external.prominence
old_chicken_described_state = chicken.described_state
begin
external.prominence = 'hidden'
external.save!
chicken.described_state = 'successful'
chicken.save!
yield
ensure
external.prominence = old_external_prominence
external.save!
chicken.described_state = old_chicken_described_state
chicken.save!
end
end
# Reset the default locale, making sure that the previous default locale
# is also cleared from the fallbacks
def with_default_locale(locale)
original_default_locale = I18n.default_locale
original_fallbacks = I18n.fallbacks
I18n.fallbacks = nil
I18n.default_locale = locale
yield
ensure
I18n.fallbacks = original_fallbacks
I18n.default_locale = original_default_locale
end
def basic_auth_login(request, username = nil, password = nil)
username = AlaveteliConfiguration::admin_username if username.nil?
password = AlaveteliConfiguration::admin_password if password.nil?
request.env["HTTP_AUTHORIZATION"] = "Basic " + Base64::encode64("#{username}:#{password}")
end
end
Spork.each_run do
FactoryGirl.definition_file_paths = [ Rails.root.join('spec', 'factories') ]
FactoryGirl.reload
# This code will be run each time you run your specs.
end
def normalise_whitespace(s)
s = s.gsub(/\A\s+|\s+\Z/, "")
s = s.gsub(/\s+/, " ")
return s
end
def get_last_post_redirect
# TODO: yeuch - no other easy way of getting the token so we can check
# the redirect URL, as it is by definition opaque to the controller
# apart from in the place that it redirects to.
post_redirects = PostRedirect.order("id DESC").first
end
RSpec::Matchers.define :be_equal_modulo_whitespace_to do |expected|
match do |actual|
normalise_whitespace(actual) == normalise_whitespace(expected)
end
end
Prevent test-unit from interfering with rspec
Fix from https://github.com/rspec/rspec-rails/issues/1171
# -*- encoding : utf-8 -*-
require 'rubygems'
require 'spork'
#uncomment the following line to use spork with the debugger
#require 'spork/ext/ruby-debug'
require 'simplecov'
require 'coveralls'
# Generate coverage locally in html as well as in coveralls.io
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start('rails') do
add_filter 'commonlib'
add_filter 'vendor/plugins'
add_filter 'lib/attachment_to_html'
add_filter 'lib/has_tag_string'
add_filter 'lib/acts_as_xapian'
add_filter 'lib/themes'
add_filter '.bundle'
end
Spork.prefork do
# Loading more in this block will cause your tests to run faster. However,
# if you change any configuration or code from libraries loaded here, you'll
# need to restart spork for it take effect.
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
# prevent Test::Unit's AutoRunner from executing during RSpec's rake task on JRuby
Test::Unit.run = true if defined?(Test::Unit) && Test::Unit.respond_to?(:run=)
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join("spec/support/**/*.rb")].each {|f| require f}
# Use test-specific translations
AlaveteliLocalization.set_default_text_domain('app', File.join(File.dirname(__FILE__), 'fixtures', 'locale'))
RSpec.configure do |config|
# ## Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
config.infer_spec_type_from_file_location!
config.include Capybara::DSL, :type => :request
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# The order (!) of this is important thanks to foreign keys
config.global_fixtures = :users,
:public_bodies,
:public_body_translations,
:public_body_versions,
:info_requests,
:raw_emails,
:incoming_messages,
:outgoing_messages,
:comments,
:info_request_events,
:track_things,
:has_tag_string_tags,
:holidays
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# If true, the base class of anonymous controllers will be inferred
# automatically. This will be the default behavior in future versions of
# rspec-rails.
config.infer_base_class_for_anonymous_controllers = false
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = "random"
# This is a workaround for a strange thing where ActionMailer::Base.deliveries isn't being
# cleared out correctly in controller specs. So, do it here for everything.
config.before(:each) do
ActionMailer::Base.deliveries = []
end
# Any test that messes with the locale needs to restore the state afterwards so that it
# doesn't interfere with any subsequent tests. This is made more complicated by the
# ApplicationController#set_gettext_locale which sets the locale and so you may be setting
# the locale in your tests and not even realising it. So, let's make things easier for
# ourselves and just always restore the locale for all tests.
config.after(:each) do
AlaveteliLocalization.set_locales(AlaveteliConfiguration::available_locales,
AlaveteliConfiguration::default_locale)
end
# Turn routing-filter off in functional and unit tests as per
# https://github.com/svenfuchs/routing-filter/blob/master/README.markdown#testing
config.before(:each) do |example|
RoutingFilter.active = false if [:controller, :helper, :model].include? example.metadata[:type]
end
config.after(:each) do |example|
RoutingFilter.active = true if [:controller, :helper, :model].include? example.metadata[:type]
end
# This section makes the garbage collector run less often to speed up tests
last_gc_run = Time.now
config.before(:each) do
GC.disable
end
config.after(:each) do
if Time.now - last_gc_run > 4
GC.enable
GC.start
last_gc_run = Time.now
end
end
end
# Use the before create job hook to simulate a race condition with
# another process by creating an acts_as_xapian_job record for the
# same model:
def with_duplicate_xapian_job_creation
InfoRequestEvent.module_eval do
def xapian_before_create_job_hook(action, model, model_id)
ActsAsXapian::ActsAsXapianJob.create!(:model => model,
:model_id => model_id,
:action => action)
end
end
yield
ensure
InfoRequestEvent.module_eval do
def xapian_before_create_job_hook(action, model, model_id)
end
end
end
def with_env_tz(new_tz = 'US/Eastern')
old_tz, ENV['TZ'] = ENV['TZ'], new_tz
yield
ensure
old_tz ? ENV['TZ'] = old_tz : ENV.delete('TZ')
end
def with_active_record_default_timezone(zone)
old_zone, ActiveRecord::Base.default_timezone = ActiveRecord::Base.default_timezone, zone
yield
ensure
ActiveRecord::Base.default_timezone = old_zone
end
# To test the statistics calculations, it's helpful to have the
# request fixtures in different states, but changing the fixtures
# themselves disrupts many other tests. This function takes a
# block, and runs that block with the info requests for the
# Geraldine Quango altered so that one is hidden and there's a
# successful one.
def with_hidden_and_successful_requests
external = info_requests(:external_request)
chicken = info_requests(:naughty_chicken_request)
old_external_prominence = external.prominence
old_chicken_described_state = chicken.described_state
begin
external.prominence = 'hidden'
external.save!
chicken.described_state = 'successful'
chicken.save!
yield
ensure
external.prominence = old_external_prominence
external.save!
chicken.described_state = old_chicken_described_state
chicken.save!
end
end
# Reset the default locale, making sure that the previous default locale
# is also cleared from the fallbacks
def with_default_locale(locale)
original_default_locale = I18n.default_locale
original_fallbacks = I18n.fallbacks
I18n.fallbacks = nil
I18n.default_locale = locale
yield
ensure
I18n.fallbacks = original_fallbacks
I18n.default_locale = original_default_locale
end
def basic_auth_login(request, username = nil, password = nil)
username = AlaveteliConfiguration::admin_username if username.nil?
password = AlaveteliConfiguration::admin_password if password.nil?
request.env["HTTP_AUTHORIZATION"] = "Basic " + Base64::encode64("#{username}:#{password}")
end
end
Spork.each_run do
FactoryGirl.definition_file_paths = [ Rails.root.join('spec', 'factories') ]
FactoryGirl.reload
# This code will be run each time you run your specs.
end
def normalise_whitespace(s)
s = s.gsub(/\A\s+|\s+\Z/, "")
s = s.gsub(/\s+/, " ")
return s
end
def get_last_post_redirect
# TODO: yeuch - no other easy way of getting the token so we can check
# the redirect URL, as it is by definition opaque to the controller
# apart from in the place that it redirects to.
post_redirects = PostRedirect.order("id DESC").first
end
RSpec::Matchers.define :be_equal_modulo_whitespace_to do |expected|
match do |actual|
normalise_whitespace(actual) == normalise_whitespace(expected)
end
end
|
require_relative '../lib/octonore'
require 'webmock/rspec'
require 'vcr'
require 'coveralls'
Coveralls.wear!
VCR.configure do |c|
c.cassette_library_dir = 'spec/fixtures/octonore_cassettes'
c.hook_into :webmock
end
Move Coveralls.wear! to the very top of spec_helper.rb.
require 'coveralls'
Coveralls.wear!
require_relative '../lib/octonore'
require 'webmock/rspec'
require 'vcr'
VCR.configure do |c|
c.cassette_library_dir = 'spec/fixtures/octonore_cassettes'
c.hook_into :webmock
end
|
# Coverage
if ENV['CI']
require 'coveralls'
Coveralls.wear!
else
require 'simplecov'
SimpleCov.start do
add_filter "/spec"
end
end
# Load testing libraries
require 'minitest'
require 'minitest/autorun'
require 'minitest/spec'
require 'minitest/pride'
require 'minitest-spec-context'
require 'mocha/setup'
# Load utility libraries
require 'pry'
require 'yaml'
# Load myself
require 'resque_ring'
# Mock out all Redis calls
require 'mock_redis'
MOCK_REDIS = MockRedis.new
Redis.stubs(:new).returns(MOCK_REDIS)
add /spec filter for Coveralls too
# Coverage
if ENV['CI']
require 'coveralls'
Coveralls.wear! do
add_filter "/spec"
end
else
require 'simplecov'
SimpleCov.start do
add_filter "/spec"
end
end
# Load testing libraries
require 'minitest'
require 'minitest/autorun'
require 'minitest/spec'
require 'minitest/pride'
require 'minitest-spec-context'
require 'mocha/setup'
# Load utility libraries
require 'pry'
require 'yaml'
# Load myself
require 'resque_ring'
# Mock out all Redis calls
require 'mock_redis'
MOCK_REDIS = MockRedis.new
Redis.stubs(:new).returns(MOCK_REDIS)
|
require 'simplecov'
require 'mountain_berry_fields'
require 'mountain_berry_fields/command_line_interaction'
require 'stringio'
require 'surrogate/rspec'
require 'mountain_berry_fields/interface'
RSpec::Matchers.define :pass do
match { |matcher| matcher.pass? }
end
MountainBerryFields.override(:file_class) { MountainBerryFields::Interface::File.clone }
MountainBerryFields.override(:dir_class) { MountainBerryFields::Interface::Dir.clone }
MountainBerryFields.override(:interaction) { MountainBerryFields::Interface::Interaction.new }
MountainBerryFields.override(:evaluator_class) { MountainBerryFields::Interface::Evaluator.clone }
MountainBerryFields.override(:parser_class) { MountainBerryFields::Interface::Parser.clone }
MountainBerryFields::CommandLineInteraction.override(:stderr) { StringIO.new }
MountainBerryFields::Test::RSpec.override(:syntax_checker_class) { MountainBerryFields::Interface::SyntaxChecker }
MountainBerryFields::Test::MagicComments.override(:syntax_checker_class) { MountainBerryFields::Interface::SyntaxChecker }
Easier to read namespacing in dependency injection
require 'simplecov'
require 'mountain_berry_fields'
require 'mountain_berry_fields/command_line_interaction'
require 'stringio'
require 'surrogate/rspec'
require 'mountain_berry_fields/interface'
RSpec::Matchers.define :pass do
match { |matcher| matcher.pass? }
end
# dependency injection
class MountainBerryFields
override(:file_class) { Interface::File.clone }
override(:dir_class) { Interface::Dir.clone }
override(:interaction) { Interface::Interaction.new }
override(:evaluator_class) { Interface::Evaluator.clone }
override(:parser_class) { Interface::Parser.clone }
CommandLineInteraction.override(:stderr) { StringIO.new }
Test::RSpec.override(:syntax_checker_class) { Interface::SyntaxChecker }
Test::MagicComments.override(:syntax_checker_class) { Interface::SyntaxChecker }
end
|
ENV['RACK_ENV'] = 'test'
require 'bundler/setup'
Bundler.require :default
require 'timecop'
require 'webmock/rspec'
SimpleCov.start
require 'opbeat'
module Opbeat
class Configuration
# Override defaults to enable http (caught by WebMock) in test env
defaults = DEFAULTS.dup.merge enabled_environments: %{test}
remove_const(:DEFAULTS)
const_set(:DEFAULTS, defaults.freeze)
end
end
RSpec.configure do |config|
config.before :each do
@request_stub = stub_request(:post, /intake\.opbeat\.com/)
end
config.around :each, mock_time: true do |example|
@date = Time.local(1992, 1, 1)
def travel distance
Timecop.freeze(@date += distance / 1_000.0)
end
travel 0
example.run
Timecop.return
end
def build_config attrs = {}
Opbeat::Configuration.new({
app_id: 'x',
organization_id: 'y',
secret_token: 'z'
}.merge(attrs))
end
config.around :each, start: true do |example|
Opbeat.start! build_config
example.call
Opbeat.stop!
end
config.around :each, start_without_worker: true do |example|
Opbeat.start! build_config(disable_worker: true)
example.call
Opbeat.stop!
end
end
RSpec::Matchers.define :delegate do |method, opts|
to = opts[:to]
args = opts[:args]
match do |delegator|
unless to.respond_to?(method)
raise NoMethodError.new("no method :#{method} on #{to}")
end
if args
allow(to).to receive(method).with(*args) { true }
else
allow(to).to receive(method).with(no_args) { true }
end
delegator.send method, *args
end
description do
"delegate :#{method} to #{to}"
end
end
Always relase current transaction after examples
ENV['RACK_ENV'] = 'test'
require 'bundler/setup'
Bundler.require :default
require 'timecop'
require 'webmock/rspec'
SimpleCov.start
require 'opbeat'
module Opbeat
class Configuration
# Override defaults to enable http (caught by WebMock) in test env
defaults = DEFAULTS.dup.merge enabled_environments: %{test}
remove_const(:DEFAULTS)
const_set(:DEFAULTS, defaults.freeze)
end
end
RSpec.configure do |config|
config.before :each do
@request_stub = stub_request(:post, /intake\.opbeat\.com/)
end
config.around :each, mock_time: true do |example|
@date = Time.local(1992, 1, 1)
def travel distance
Timecop.freeze(@date += distance / 1_000.0)
end
travel 0
example.run
Timecop.return
end
def build_config attrs = {}
Opbeat::Configuration.new({
app_id: 'x',
organization_id: 'y',
secret_token: 'z'
}.merge(attrs))
end
config.around :each, start: true do |example|
Opbeat.start! build_config
example.call
Opbeat::Client.inst.current_transaction = nil
Opbeat.stop!
end
config.around :each, start_without_worker: true do |example|
Opbeat.start! build_config(disable_worker: true)
example.call
Opbeat::Client.inst.current_transaction = nil
Opbeat.stop!
end
end
RSpec::Matchers.define :delegate do |method, opts|
to = opts[:to]
args = opts[:args]
match do |delegator|
unless to.respond_to?(method)
raise NoMethodError.new("no method :#{method} on #{to}")
end
if args
allow(to).to receive(method).with(*args) { true }
else
allow(to).to receive(method).with(no_args) { true }
end
delegator.send method, *args
end
description do
"delegate :#{method} to #{to}"
end
end
|
if ENV["CI"]
require "simplecov"
require "coveralls"
SimpleCov.formatter = Coveralls::SimpleCov::Formatter
SimpleCov.start do
%w[spec].each do |ignore_path|
add_filter(ignore_path)
end
end
end
Bundler.require(:default, :test)
ENV["RACK_ENV"] = "test"
require_relative "../app"
require_relative "../lib/birthday_bot"
require_relative "../lib/on_air_bot"
require_relative "../lib/program_manager"
require_relative "../lib/today_on_air_bot"
require "webmock/rspec"
Time.zone = "Tokyo"
Dir["#{__dir__}/support/**/*.rb"].each {|f| require f }
def spec_dir
Pathname(__dir__)
end
Global.configure do |config|
config.environment = ENV["RACK_ENV"]
config.config_directory = "#{__dir__}/../config/global"
end
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# # This allows you to limit a spec run to individual examples or groups
# # you care about by tagging them with `:focus` metadata. When nothing
# # is tagged with `:focus`, all examples get run. RSpec also provides
# # aliases for `it`, `describe`, and `context` that include `:focus`
# # metadata: `fit`, `fdescribe` and `fcontext`, respectively.
# config.filter_run_when_matching :focus
#
# # Allows RSpec to persist some state between runs in order to support
# # the `--only-failures` and `--next-failure` CLI options. We recommend
# # you configure your source control system to ignore this file.
# config.example_status_persistence_file_path = "spec/examples.txt"
#
# # Limits the available syntax to the non-monkey patched syntax that is
# # recommended. For more details, see:
# # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
# config.disable_monkey_patching!
#
# # This setting enables warnings. It's recommended, but in some cases may
# # be too noisy due to issues in dependencies.
# config.warnings = true
#
# # Many RSpec users commonly either run the entire suite or an individual
# # file, and it's useful to allow more verbose output when running an
# # individual spec file.
# if config.files_to_run.one?
# # Use the documentation formatter for detailed output,
# # unless a formatter has already been configured
# # (e.g. via a command-line flag).
# config.default_formatter = 'doc'
# end
#
# # Print the 10 slowest examples and example groups at the
# # end of the spec run, to help surface which specs are running
# # particularly slow.
# config.profile_examples = 10
#
# # Run specs in random order to surface order dependencies. If you find an
# # order dependency and want to debug it, you can fix the order by providing
# # the seed, which is printed after each run.
# # --seed 1234
# config.order = :random
#
# # Seed global randomization in this process using the `--seed` CLI option.
# # Setting this allows you to use `--seed` to deterministically reproduce
# # test failures related to randomization by passing the same `--seed` value
# # as the one that triggered the failure.
# Kernel.srand config.seed
config.include StubUtil
config.include Rack::Test::Methods
config.after do
Timecop.return
end
include CacheUtils
config.before do
cache_client.flush_all
end
end
:cop: rubocop --auto-correct --only Lint/NonDeterministicRequireOrder
if ENV["CI"]
require "simplecov"
require "coveralls"
SimpleCov.formatter = Coveralls::SimpleCov::Formatter
SimpleCov.start do
%w[spec].each do |ignore_path|
add_filter(ignore_path)
end
end
end
Bundler.require(:default, :test)
ENV["RACK_ENV"] = "test"
require_relative "../app"
require_relative "../lib/birthday_bot"
require_relative "../lib/on_air_bot"
require_relative "../lib/program_manager"
require_relative "../lib/today_on_air_bot"
require "webmock/rspec"
Time.zone = "Tokyo"
Dir["#{__dir__}/support/**/*.rb"].sort.each {|f| require f }
def spec_dir
Pathname(__dir__)
end
Global.configure do |config|
config.environment = ENV["RACK_ENV"]
config.config_directory = "#{__dir__}/../config/global"
end
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# # This allows you to limit a spec run to individual examples or groups
# # you care about by tagging them with `:focus` metadata. When nothing
# # is tagged with `:focus`, all examples get run. RSpec also provides
# # aliases for `it`, `describe`, and `context` that include `:focus`
# # metadata: `fit`, `fdescribe` and `fcontext`, respectively.
# config.filter_run_when_matching :focus
#
# # Allows RSpec to persist some state between runs in order to support
# # the `--only-failures` and `--next-failure` CLI options. We recommend
# # you configure your source control system to ignore this file.
# config.example_status_persistence_file_path = "spec/examples.txt"
#
# # Limits the available syntax to the non-monkey patched syntax that is
# # recommended. For more details, see:
# # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
# config.disable_monkey_patching!
#
# # This setting enables warnings. It's recommended, but in some cases may
# # be too noisy due to issues in dependencies.
# config.warnings = true
#
# # Many RSpec users commonly either run the entire suite or an individual
# # file, and it's useful to allow more verbose output when running an
# # individual spec file.
# if config.files_to_run.one?
# # Use the documentation formatter for detailed output,
# # unless a formatter has already been configured
# # (e.g. via a command-line flag).
# config.default_formatter = 'doc'
# end
#
# # Print the 10 slowest examples and example groups at the
# # end of the spec run, to help surface which specs are running
# # particularly slow.
# config.profile_examples = 10
#
# # Run specs in random order to surface order dependencies. If you find an
# # order dependency and want to debug it, you can fix the order by providing
# # the seed, which is printed after each run.
# # --seed 1234
# config.order = :random
#
# # Seed global randomization in this process using the `--seed` CLI option.
# # Setting this allows you to use `--seed` to deterministically reproduce
# # test failures related to randomization by passing the same `--seed` value
# # as the one that triggered the failure.
# Kernel.srand config.seed
config.include StubUtil
config.include Rack::Test::Methods
config.after do
Timecop.return
end
include CacheUtils
config.before do
cache_client.flush_all
end
end
|
Added missed spec helper file
require 'protocolist'
require 'protocolist/model_additions'
require 'supermodel'
Bundler.require(:default)
RSpec.configure do |config|
end
|
require 'bundler'
require 'active_support/core_ext/object/blank'
require 'securerandom'
Bundler.require(:default, :development)
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[File.expand_path("../support/**/*.rb", __FILE__)].each {|f| require f }
if ENV['TOKEN']
Paperlex.token = ENV['TOKEN']
elsif ENV['CONTRACTABLE_TOKEN']
# we'll set Paperlex.token where needed in the specs
else
FakeWeb.allow_net_connect = false
end
if ENV['PAPERLEX_URL']
Paperlex.base_url = ENV['PAPERLEX_URL']
end
RSpec.configure do |config|
# == Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
config.mock_with :rspec
end
Correct env variable reference in the specs
require 'bundler'
require 'active_support/core_ext/object/blank'
require 'securerandom'
Bundler.require(:default, :development)
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[File.expand_path("../support/**/*.rb", __FILE__)].each {|f| require f }
if ENV['TOKEN']
Paperlex.token = ENV['TOKEN']
elsif ENV['REMOTE_SIGNATURE_TOKEN']
# we'll set Paperlex.token where needed in the specs
else
FakeWeb.allow_net_connect = false
end
if ENV['PAPERLEX_URL']
Paperlex.base_url = ENV['PAPERLEX_URL']
end
RSpec.configure do |config|
# == Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
config.mock_with :rspec
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.