CombinedText stringlengths 4 3.42M |
|---|
include gemspec for example rails app
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{rails-latex}
s.version = "1.0.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Geoff Jacobsen"]
s.date = %q{2011-01-19}
s.description = %q{rails-latex is a renderer for rails 3 which allows tex files with erb to be turned into an inline pdf.}
s.email = %q{geoffjacobsen@gmail.com}
s.extra_rdoc_files = [
"MIT-LICENSE",
"README.rdoc"
]
s.files = [
"MIT-LICENSE",
"lib/erb_latex.rb",
"lib/latex_to_pdf.rb",
"lib/rails-latex.rb",
"test/helper.rb",
"test/test_doc.tex",
"test/test_latex_to_pdf.rb"
]
s.rdoc_options = ["--main=README.rdoc"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{A LaTeX to pdf rails 3 renderer.}
s.test_files = [
"examples/rails-latex-demo/app/controllers/application_controller.rb",
"examples/rails-latex-demo/app/controllers/latex_example_controller.rb",
"examples/rails-latex-demo/config/application.rb",
"examples/rails-latex-demo/config/boot.rb",
"examples/rails-latex-demo/config/environment.rb",
"examples/rails-latex-demo/config/environments/development.rb",
"examples/rails-latex-demo/config/environments/production.rb",
"examples/rails-latex-demo/config/environments/test.rb",
"examples/rails-latex-demo/config/initializers/backtrace_silencers.rb",
"examples/rails-latex-demo/config/initializers/inflections.rb",
"examples/rails-latex-demo/config/initializers/mime_types.rb",
"examples/rails-latex-demo/config/initializers/secret_token.rb",
"examples/rails-latex-demo/config/initializers/session_store.rb",
"examples/rails-latex-demo/config/routes.rb",
"examples/rails-latex-demo/test/functional/latex_example_controller_test.rb",
"examples/rails-latex-demo/test/unit/helpers/latex_example_helper_test.rb",
"test/helper.rb",
"test/test_latex_to_pdf.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rails>, [">= 3.0.0"])
else
s.add_dependency(%q<rails>, [">= 3.0.0"])
end
else
s.add_dependency(%q<rails>, [">= 3.0.0"])
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{sugar-high}
s.version = "0.2.13"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Kristian Mandrup"]
s.date = %q{2010-10-20}
s.description = %q{More Ruby sugar - inspired by the 'zuker' project}
s.email = %q{kmandrup@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.markdown"
]
s.files = [
".document",
".gitignore",
".rspec",
"LICENSE",
"README.markdown",
"Rakefile",
"VERSION",
"lib/sugar-high.rb",
"lib/sugar-high/alias.rb",
"lib/sugar-high/arguments.rb",
"lib/sugar-high/array.rb",
"lib/sugar-high/blank.rb",
"lib/sugar-high/file.rb",
"lib/sugar-high/hash.rb",
"lib/sugar-high/includes.rb",
"lib/sugar-high/kind_of.rb",
"lib/sugar-high/metaclass.rb",
"lib/sugar-high/methods.rb",
"lib/sugar-high/module.rb",
"lib/sugar-high/not.rb",
"lib/sugar-high/path.rb",
"lib/sugar-high/regexp.rb",
"lib/sugar-high/rspec/configure.rb",
"lib/sugar-high/rspec/matchers/have_aliases.rb",
"spec/fixtures/empty.txt",
"spec/fixtures/non-empty.txt",
"spec/spec_helper.rb",
"spec/sugar-high/alias_spec.rb",
"spec/sugar-high/arguments_spec.rb",
"spec/sugar-high/array_spec.rb",
"spec/sugar-high/blank_spec.rb",
"spec/sugar-high/file/file_mutate_spec.rb",
"spec/sugar-high/file/file_spec.rb",
"spec/sugar-high/hash_spec.rb",
"spec/sugar-high/includes_spec.rb",
"spec/sugar-high/kind_of_spec.rb",
"spec/sugar-high/methods_spec.rb",
"spec/sugar-high/module_spec.rb",
"spec/sugar-high/path_spec.rb",
"spec/sugar-high/regexp_spec.rb",
"sugar-high.gemspec"
]
s.homepage = %q{http://github.com/kristianmandrup/sugar-high}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Ruby convenience sugar packs!}
s.test_files = [
"spec/spec_helper.rb",
"spec/sugar-high/alias_spec.rb",
"spec/sugar-high/arguments_spec.rb",
"spec/sugar-high/array_spec.rb",
"spec/sugar-high/blank_spec.rb",
"spec/sugar-high/file/file_mutate_spec.rb",
"spec/sugar-high/file/file_spec.rb",
"spec/sugar-high/hash_spec.rb",
"spec/sugar-high/includes_spec.rb",
"spec/sugar-high/kind_of_spec.rb",
"spec/sugar-high/methods_spec.rb",
"spec/sugar-high/module_spec.rb",
"spec/sugar-high/path_spec.rb",
"spec/sugar-high/regexp_spec.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 2.0.0.rc"])
s.add_runtime_dependency(%q<require_all>, ["~> 1.2.0"])
s.add_runtime_dependency(%q<mocha>, ["~> 0.9.8"])
else
s.add_dependency(%q<rspec>, [">= 2.0.0.rc"])
s.add_dependency(%q<require_all>, ["~> 1.2.0"])
s.add_dependency(%q<mocha>, ["~> 0.9.8"])
end
else
s.add_dependency(%q<rspec>, [">= 2.0.0.rc"])
s.add_dependency(%q<require_all>, ["~> 1.2.0"])
s.add_dependency(%q<mocha>, ["~> 0.9.8"])
end
end
Regenerated gemspec for version 0.3.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{sugar-high}
s.version = "0.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Kristian Mandrup"]
s.date = %q{2010-10-21}
s.description = %q{More Ruby sugar - inspired by the 'zuker' project}
s.email = %q{kmandrup@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.markdown"
]
s.files = [
".document",
".gitignore",
".rspec",
"LICENSE",
"README.markdown",
"Rakefile",
"VERSION",
"lib/sugar-high.rb",
"lib/sugar-high/alias.rb",
"lib/sugar-high/arguments.rb",
"lib/sugar-high/array.rb",
"lib/sugar-high/blank.rb",
"lib/sugar-high/file.rb",
"lib/sugar-high/hash.rb",
"lib/sugar-high/includes.rb",
"lib/sugar-high/kind_of.rb",
"lib/sugar-high/metaclass.rb",
"lib/sugar-high/methods.rb",
"lib/sugar-high/module.rb",
"lib/sugar-high/not.rb",
"lib/sugar-high/path.rb",
"lib/sugar-high/regexp.rb",
"lib/sugar-high/rspec/configure.rb",
"lib/sugar-high/rspec/matchers/have_aliases.rb",
"spec/fixtures/empty.txt",
"spec/fixtures/non-empty.txt",
"spec/spec_helper.rb",
"spec/sugar-high/alias_spec.rb",
"spec/sugar-high/arguments_spec.rb",
"spec/sugar-high/array_spec.rb",
"spec/sugar-high/blank_spec.rb",
"spec/sugar-high/file/file_mutate_spec.rb",
"spec/sugar-high/file/file_spec.rb",
"spec/sugar-high/hash_spec.rb",
"spec/sugar-high/includes_spec.rb",
"spec/sugar-high/kind_of_spec.rb",
"spec/sugar-high/methods_spec.rb",
"spec/sugar-high/module_spec.rb",
"spec/sugar-high/path_spec.rb",
"spec/sugar-high/regexp_spec.rb",
"sugar-high.gemspec"
]
s.homepage = %q{http://github.com/kristianmandrup/sugar-high}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Ruby convenience sugar packs!}
s.test_files = [
"spec/spec_helper.rb",
"spec/sugar-high/alias_spec.rb",
"spec/sugar-high/arguments_spec.rb",
"spec/sugar-high/array_spec.rb",
"spec/sugar-high/blank_spec.rb",
"spec/sugar-high/file/file_mutate_spec.rb",
"spec/sugar-high/file/file_spec.rb",
"spec/sugar-high/hash_spec.rb",
"spec/sugar-high/includes_spec.rb",
"spec/sugar-high/kind_of_spec.rb",
"spec/sugar-high/methods_spec.rb",
"spec/sugar-high/module_spec.rb",
"spec/sugar-high/path_spec.rb",
"spec/sugar-high/regexp_spec.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 2.0.0.rc"])
s.add_runtime_dependency(%q<require_all>, ["~> 1.2.0"])
s.add_runtime_dependency(%q<mocha>, ["~> 0.9.8"])
else
s.add_dependency(%q<rspec>, [">= 2.0.0.rc"])
s.add_dependency(%q<require_all>, ["~> 1.2.0"])
s.add_dependency(%q<mocha>, ["~> 0.9.8"])
end
else
s.add_dependency(%q<rspec>, [">= 2.0.0.rc"])
s.add_dependency(%q<require_all>, ["~> 1.2.0"])
s.add_dependency(%q<mocha>, ["~> 0.9.8"])
end
end
|
require_relative '../config/keys.local.rb'
require 'csv'
require 'uri'
require 'net/http'
require 'json'
require 'pg'
require 'openssl'
require 'algoliasearch'
require 'wannabe_bool'
db=PG.connect(
"dbname"=>PGNAME,
"user"=>PGUSER,
"password"=>PGPWD,
"host"=>PGHOST,
"port"=>PGPORT
)
Algolia.init :application_id=>ALGOLIA_ID, :api_key=>ALGOLIA_KEY
index_candidats=Algolia::Index.new("candidates")
index_citoyens=Algolia::Index.new("citizens")
candidates_list=<<END
SELECT ca.candidate_id,ca.user_id,ca.name,ca.gender,ca.verified,ca.date_added::DATE as date_added,date_part('day',now()-ca.date_added) as nb_days_added,ca.date_verified::DATE as date_verified,date_part('day',now() - ca.date_verified) as nb_days_verified,ca.qualified,ca.date_qualified,ca.official,ca.date_officialized,ca.photo,ca.trello,ca.website,ca.twitter,ca.facebook,ca.youtube,ca.linkedin,ca.tumblr,ca.blog,ca.wikipedia,ca.instagram, z.nb_views, z.nb_soutiens
FROM candidates as ca
LEFT JOIN (
SELECT y.candidate_id, y.nb_views, count(s.user_id) as nb_soutiens
FROM (
SELECT c.candidate_id, sum(cv.nb_views) as nb_views
FROM candidates as c
LEFT JOIN candidates_views as cv
ON (
cv.candidate_id=c.candidate_id
)
GROUP BY c.candidate_id
) as y
LEFT JOIN supporters as s
ON ( s.candidate_id=y.candidate_id)
GROUP BY y.candidate_id,y.nb_views
) as z
ON (z.candidate_id = ca.candidate_id)
END
sitemap=<<END
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
END
res=db.exec(candidates_list)
if not res.num_tuples.zero? then
res.each do |r|
qualified = r['qualified'].to_b ? "qualified" : "not_qualified"
verified = r['verified'].to_b ? "verified" : "not_verified"
official= r['official'].to_b ? "official" : "not_official"
gender= r['gender']=='M' ? "Homme" : "Femme"
if (r['verified'].to_b) then
index_candidats.save_object({
"objectID"=>r['candidate_id'],
"candidate_id"=>r['candidate_id'],
"name"=>r['name'],
"photo"=>r['photo'],
"gender"=>gender,
"trello"=>r['trello'],
"website"=>r['website'],
"twitter"=>r['twitter'],
"facebook"=>r['facebook'],
"youtube"=>r['youtube'],
"linkedin"=>r['linkedin'],
"tumblr"=>r['tumblr'],
"blog"=>r['blog'],
"wikipedia"=>r['wikipedia'],
"instagram"=>r['instagram'],
"date_added"=>r['date_added'],
"nb_days_added"=>r['nb_days_added'].to_i,
"verified"=>verified,
"date_verified"=>r['date_verified'],
"nb_days_verified"=>r['nb_days_verified'].to_i,
"qualified"=>qualified,
"date_qualified"=>r['date_qualified'],
"official"=>official,
"date_officializied"=>r['date_officializied'],
"nb_soutiens"=>r['nb_soutiens'].to_i,
"nb_views"=>r['nb_views'].to_i
})
sitemap+=<<END
<url>
<loc>https://laprimaire.org/candidat/#{r['candidate_id']}</loc>
<lastmod>#{r['date_verified']}</lastmod>
</url>
END
puts "Added candidat #{r['name']}"
elsif r['nb_soutiens'].to_i>1
index_citoyens.save_object({
"objectID"=>r['candidate_id'],
"candidate_id"=>r['candidate_id'],
"name"=>r['name'],
"photo"=>r['photo'],
"gender"=>gender,
"date_added"=>r['date_added'],
"nb_days_added"=>r['nb_days_added'].to_i,
"nb_soutiens"=>r['nb_soutiens'].to_i,
"nb_views"=>r['nb_views'].to_i
})
sitemap+=<<END
<url>
<loc>https://laprimaire.org/candidat/#{r['candidate_id']}</loc>
<lastmod>#{r['date_added']}</lastmod>
</url>
END
puts "Added citoyen #{r['name']}"
else
puts "Skipped citoyen #{r['name']}"
end
end
sitemap+="</urlset>\n"
end
File.write(ARGV[0],sitemap)
* Ajout nombre de soutiens recus les 7 derniers jours
require_relative '../config/keys.local.rb'
require 'csv'
require 'uri'
require 'net/http'
require 'json'
require 'pg'
require 'openssl'
require 'algoliasearch'
require 'wannabe_bool'
db=PG.connect(
"dbname"=>PGNAME,
"user"=>PGUSER,
"password"=>PGPWD,
"host"=>PGHOST,
"port"=>PGPORT
)
Algolia.init :application_id=>ALGOLIA_ID, :api_key=>ALGOLIA_KEY
index_candidats=Algolia::Index.new("candidates")
index_citoyens=Algolia::Index.new("citizens")
candidates_list=<<END
SELECT ca.candidate_id,ca.user_id,ca.name,ca.gender,ca.verified,ca.date_added::DATE as date_added,date_part('day',now()-ca.date_added) as nb_days_added,ca.date_verified::DATE as date_verified,date_part('day',now() - ca.date_verified) as nb_days_verified,ca.qualified,ca.date_qualified,ca.official,ca.date_officialized,ca.photo,ca.trello,ca.website,ca.twitter,ca.facebook,ca.youtube,ca.linkedin,ca.tumblr,ca.blog,ca.wikipedia,ca.instagram, z.nb_views, z.nb_soutiens, w.nb_soutiens_7j
FROM candidates as ca
LEFT JOIN (
SELECT y.candidate_id, y.nb_views, count(s.user_id) as nb_soutiens
FROM (
SELECT c.candidate_id, sum(cv.nb_views) as nb_views
FROM candidates as c
LEFT JOIN candidates_views as cv
ON (
cv.candidate_id=c.candidate_id
)
GROUP BY c.candidate_id
) as y
LEFT JOIN supporters as s
ON ( s.candidate_id=y.candidate_id)
GROUP BY y.candidate_id,y.nb_views
) as z
ON (z.candidate_id = ca.candidate_id)
LEFT JOIN (
SELECT y.candidate_id, y.nb_views, count(s.user_id) as nb_soutiens_7j
FROM (
SELECT c.candidate_id, sum(cv.nb_views) as nb_views
FROM candidates as c
LEFT JOIN candidates_views as cv
ON (
cv.candidate_id=c.candidate_id
)
GROUP BY c.candidate_id
) as y
LEFT JOIN supporters as s
ON ( s.candidate_id=y.candidate_id)
WHERE s.support_date> (now()::date-7)
GROUP BY y.candidate_id,y.nb_views
) as w
ON (w.candidate_id = ca.candidate_id)
ORDER BY z.nb_soutiens DESC
END
sitemap=<<END
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
END
res=db.exec(candidates_list)
if not res.num_tuples.zero? then
res.each do |r|
qualified = r['qualified'].to_b ? "qualified" : "not_qualified"
verified = r['verified'].to_b ? "verified" : "not_verified"
official= r['official'].to_b ? "official" : "not_official"
gender= r['gender']=='M' ? "Homme" : "Femme"
if (r['verified'].to_b) then
index_candidats.save_object({
"objectID"=>r['candidate_id'],
"candidate_id"=>r['candidate_id'],
"name"=>r['name'],
"photo"=>r['photo'],
"gender"=>gender,
"trello"=>r['trello'],
"website"=>r['website'],
"twitter"=>r['twitter'],
"facebook"=>r['facebook'],
"youtube"=>r['youtube'],
"linkedin"=>r['linkedin'],
"tumblr"=>r['tumblr'],
"blog"=>r['blog'],
"wikipedia"=>r['wikipedia'],
"instagram"=>r['instagram'],
"date_added"=>r['date_added'],
"nb_days_added"=>r['nb_days_added'].to_i,
"verified"=>verified,
"date_verified"=>r['date_verified'],
"nb_days_verified"=>r['nb_days_verified'].to_i,
"qualified"=>qualified,
"date_qualified"=>r['date_qualified'],
"official"=>official,
"date_officializied"=>r['date_officializied'],
"nb_soutiens"=>r['nb_soutiens'].to_i,
"nb_views"=>r['nb_views'].to_i
})
sitemap+=<<END
<url>
<loc>https://laprimaire.org/candidat/#{r['candidate_id']}</loc>
<lastmod>#{r['date_verified']}</lastmod>
</url>
END
puts "Added candidat #{r['name']}"
elsif r['nb_soutiens'].to_i>1
index_citoyens.save_object({
"objectID"=>r['candidate_id'],
"candidate_id"=>r['candidate_id'],
"name"=>r['name'],
"photo"=>r['photo'],
"gender"=>gender,
"date_added"=>r['date_added'],
"nb_days_added"=>r['nb_days_added'].to_i,
"nb_soutiens"=>r['nb_soutiens'].to_i,
"nb_views"=>r['nb_views'].to_i
})
sitemap+=<<END
<url>
<loc>https://laprimaire.org/candidat/#{r['candidate_id']}</loc>
<lastmod>#{r['date_added']}</lastmod>
</url>
END
puts "Added citoyen #{r['name']}"
else
puts "Skipped citoyen #{r['name']}"
end
end
sitemap+="</urlset>\n"
end
File.write(ARGV[0],sitemap)
|
#!/usr/bin/env ruby -Ku
# encoding: utf-8
require 'set'
require 'yaml'
require 'fileutils'
require 'pathname'
require 'thor'
require 'addressable/uri'
require 'ruby-github'
class ::Project
def self.command_names
%w[ sync bundle:install bundle:update bundle:show gem:install gem:uninstall spec release implode status list ]
end
def self.command_name(name)
command_fragments(name).join('_')
end
def self.command_class_name(name)
command_fragments(name).map { |fragment| fragment.capitalize }.join('::')
end
def self.command_fragments(name)
name.split(':').map { |fragment| fragment }
end
command_names.each do |name|
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def self.#{command_name(name)}(options = {})
new(options).send(:#{command_name(name)})
end
def #{command_name(name)}
start = Time.now if options[:benchmark]
self.class.invoke :before, '#{command_name(name)}', env, repos
@repos.each do |repo|
@logger.progress!
command_class('#{name}').new(repo, env, @logger).run
end
self.class.invoke :after, '#{command_name(name)}', env, repos
stop = Time.now if options[:benchmark]
if options[:benchmark]
puts '-------------------------------'
puts "Time elapsed: \#{stop - start}"
puts '-------------------------------'
end
end
RUBY
end
attr_reader :env
attr_reader :root
attr_reader :repos
attr_reader :options
attr_accessor :commands
def initialize(options = {})
@options = options
@env = environment_class.new(name, @options)
@root = @env.root
@repos = Repositories.new(@root, name, @env.included, @env.excluded + excluded_repos)
@logger = Logger.new(@env, @repos.count)
@commands = {}
end
def environment_class
Environment
end
def command_class(name)
return commands[name] if commands[name]
Utils.full_const_get(self.class.command_class_name(name), Command)
end
def self.before(command_name, &block)
((@before ||= {})[command_name] ||= []) << block
end
def self.after(command_name, &block)
((@after ||= {})[command_name] ||= []) << block
end
def self.invoke(kind, name, *args)
hooks = instance_variable_get("@#{kind}")
return unless hooks && hooks[name]
hooks[name].each { |hook| hook.call(*args) }
end
class Metadata
attr_reader :root
attr_reader :name
attr_reader :repositories
def self.fetch(root, name)
new(root, name).repositories
end
def initialize(root, name)
@root, @name = root, name
@repositories = fetch
end
def fetch
filename = root.join(config_file_name)
if filename.file?
load_from_yaml(filename)
else
load_from_github(filename)
end
end
def config_file_name
'dm-dev.yml'
end
def load_from_github(filename)
cache(GitHub::API.user(name).repositories, filename)
end
def load_from_yaml(filename)
YAML.load(File.open(filename))['repositories'].map do |repo|
Struct.new(:name, :url).new(repo['name'], repo['url'])
end
end
private
def cache(repos, filename)
File.open(filename, 'w') do |f|
f.write(YAML.dump({
'repositories' => repos.map { |repo| { 'name' => repo.name, 'url' => repo.url } }
}))
end
repos
end
end
module Utils
def self.full_const_get(name, root = Object)
obj = root
namespaces(name).each do |x|
# This is required because const_get tries to look for constants in the
# ancestor chain, but we only want constants that are HERE
obj = obj.const_defined?(x) ? obj.const_get(x) : obj.const_missing(x)
end
obj
end
def self.namespaced?(const_name)
namespaces(const_name).size > 1
end
def self.namespaces(const_name)
path = const_name.to_s.split('::')
path.shift if path.first.empty?
path
end
end
class Repositories
include Enumerable
def initialize(root, user, repos, excluded_repos)
@root, @user = root, user
@repos = repos
@excluded_repos = excluded_repos
@metadata = Metadata.fetch(@root, @user)
@repositories = selected_repositories.map do |repo|
Repository.new(@root, repo)
end
end
def each
@repositories.each { |repo| yield(repo) }
end
private
def selected_repositories
if use_current_directory?
@metadata.select { |repo| managed_repo?(repo) }
else
@metadata.select { |repo| include_repo?(repo) }
end
end
def managed_repo?(repo)
repo.name == relative_path_name
end
def include_repo?(repo)
if @repos
!excluded_repo?(repo) && (include_all? || @repos.include?(repo.name))
else
!excluded_repo?(repo)
end
end
def excluded_repo?(repo)
@excluded_repos.include?(repo.name)
end
def use_current_directory?
@repos.nil? && inside_available_repo? && !include_all?
end
def inside_available_repo?
@metadata.map(&:name).include?(relative_path_name)
end
def include_all?
explicitly_specified = @repos.respond_to?(:each) && @repos.count == 1 && @repos.first == 'all'
if inside_available_repo?
explicitly_specified
else
@repos.nil? || explicitly_specified
end
end
def relative_path_name
Pathname(Dir.pwd).relative_path_from(@root).to_s
end
end
class Repository
attr_reader :path
attr_reader :name
attr_reader :uri
def initialize(root, repo)
@name = repo.name
@path = root.join(@name)
@uri = Addressable::URI.parse(repo.url)
end
def installable?
path.join('Gemfile').file?
end
end
class Environment
attr_reader :name
attr_reader :options
attr_reader :root
attr_reader :included
attr_reader :excluded
attr_reader :rubies
attr_reader :bundle_root
def initialize(name, options)
@name = name
@options = options
@root = Pathname(@options[:root ] || ENV['DM_DEV_ROOT' ] || Dir.pwd)
@bundle_root = Pathname(@options[:bundle_root] || ENV['DM_DEV_BUNDLE_ROOT'] || @root.join(default_bundle_root))
@included = @options[:include] || (ENV['INCLUDE'] ? normalize(ENV['INCLUDE']) : default_included)
@excluded = @options[:exclude] || (ENV['EXCLUDE'] ? normalize(ENV['EXCLUDE']) : default_excluded)
@rubies = @options[:rubies ] || (ENV['RUBIES' ] ? normalize(ENV['RUBIES' ]) : default_rubies)
@verbose = @options[:verbose] || (ENV['VERBOSE'] == 'true')
@pretend = @options[:pretend] || (ENV['PRETEND'] == 'true')
end
def default_bundle_root
'DM_DEV_BUNDLE_ROOT'
end
def default_included
nil # means all
end
def default_excluded
[] # overwrite in subclasses
end
def default_rubies
%w[ 1.8.7 1.9.2 jruby rbx ]
end
def verbose?
@verbose
end
def pretend?
@pretend
end
private
def normalize(string)
string.gsub(',', ' ').split(' ')
end
end
class Logger
attr_reader :progress
def initialize(env, repo_count)
@env = env
@progress = 0
@total = repo_count
@padding = @total.to_s.length
@verbose = @env.verbose?
@pretend = @env.pretend?
end
def log(repo, action, command = nil, msg = nil)
command = command.to_s.squeeze(' ').strip # TODO also do for actually executed commands
if @pretend || @verbose
puts command
else
puts '[%0*d/%d] %s %s %s%s' % format(repo, action, command, msg)
end
end
def progress!
@progress += 1
end
def format(repo, action, command, msg)
[ @padding, @progress, @total, action, repo.name, msg, @verbose ? ": #{command}" : '' ]
end
end
class Command
attr_reader :repo
attr_reader :env
attr_reader :root
attr_reader :path
attr_reader :uri
attr_reader :logger
def initialize(repo, env, logger)
@repo = repo
@env = env
@root = @env.root
@path = @root.join(@repo.name)
@uri = @repo.uri
@logger = logger
@verbose = @env.verbose?
end
def before
# overwrite in subclasses
end
def run
log_directory_change
FileUtils.cd(working_dir) do
if block_given?
yield
else
execute
end
end
end
def after
# overwrite in subclasses
end
def execute
if executable?
before
unless suppress_log?
log(command)
end
unless pretend?
sleep(timeout)
system(command)
end
after
else
if verbose? && !pretend?
log(command, "SKIPPED! - #{explanation}")
end
end
end
# overwrite in subclasses
def command
raise NotImplementedError
end
# overwrite in subclasses
def executable?
true
end
# overwrite in subclasses
def suppress_log?
false
end
# overwrite in subclasses
def explanation
'reason unknown'
end
def log_directory_change
if needs_directory_change? && (verbose? || pretend?)
log "cd #{working_dir}"
end
end
def needs_directory_change?
Dir.pwd != working_dir.to_s
end
def ignored?
ignored_repos.include?(repo.name)
end
# overwrite in subclasses
def ignored_repos
[]
end
# overwrite in subclasses
def working_dir
path
end
def verbose?
@verbose
end
def pretend?
@env.pretend?
end
def verbosity
verbose? ? verbose : silent
end
# overwrite in subclasses
def verbose
end
def silent
'>& /dev/null'
end
# overwrite in subclasses
def timeout
0
end
# overwrite in subclasses
def action
end
def log(command = nil, msg = nil)
logger.log(repo, action, command, msg)
end
class List < ::Project::Command
def run
log
end
end
class Sync < Command
def self.new(repo, env, logger)
return super unless self == Sync
if env.root.join(repo.name).directory?
Pull.new(repo, env, logger)
else
Clone.new(repo, env, logger)
end
end
class Clone < Sync
def initialize(repo, env, logger)
super
@git_uri = uri.dup
@git_uri.scheme = 'git'
if env.options[:development]
@git_uri.to_s.sub!('://', '@').sub!('/', ':')
end
end
def command
"git clone #{@git_uri}.git #{verbosity}"
end
def working_dir
root
end
def action
'Cloning'
end
end
class Pull < Sync
def command
"git checkout master #{verbosity}; git pull --rebase #{verbosity}"
end
def action
'Pulling'
end
end
end
class Rvm < Command
attr_reader :rubies
def initialize(repo, env, logger)
super
@rubies = env.rubies
end
def command
"rvm #{rubies.join(',')}"
end
class Exec < Rvm
attr_reader :ruby
def run
super do
rubies.each do |ruby|
@ruby = ruby
if block_given?
yield(ruby)
else
execute
end
end
end
end
private
def command
"rvm #{@ruby} exec bash -c"
end
def action
"[#{@ruby}]"
end
end
end
class Bundle < Rvm::Exec
class Install < Bundle
def bundle_command
'install'
end
def action
"#{super} bundle install"
end
end
class Update < Bundle
def bundle_command
'update'
end
def action
"#{super} bundle update"
end
end
class Show < Bundle
def bundle_command
'show'
end
def action
"#{super} bundle show"
end
end
def initialize(repo, env, logger)
super
@bundle_root = env.bundle_root
rubies.each { |ruby| bundle_path(ruby).mkpath }
end
def before
super
make_gemfile
end
def executable?
!ignored? && repo.installable?
end
def command
"#{super} \"#{environment} bundle #{bundle_command} #{options} #{verbosity}\""
end
def environment
"BUNDLE_PATH='#{bundle_path(ruby)}' BUNDLE_GEMFILE='#{gemfile}'"
end
def bundle_path(ruby)
@bundle_root.join(ruby)
end
def gemfile
"Gemfile.#{ruby}"
end
def make_gemfile
unless working_dir.join(gemfile).file?
master = working_dir.join(master_gemfile)
log "cp #{master} #{gemfile}"
unless pretend?
FileUtils.cp(master, gemfile)
end
end
end
def master_gemfile
'Gemfile'
end
def options
nil
end
def explanation
if ignored?
"because it's ignored"
elsif !repo.installable?
"because it's missing a Gemfile"
else
"reason unknown"
end
end
end
class Spec < Bundle
def run
if print_matrix?
puts "\nh2. %s\n\n" % repo.name
puts '| RUBY | %s |' % env.adapters.join(' | ')
end
super do |ruby|
print '| %s |' % ruby if print_matrix?
if block_given?
yield ruby
else
execute
if print_matrix?
print ' %s |' % [ $?.success? ? 'pass' : 'fail' ]
end
end
end
end
def bundle_command
'exec rake spec'
end
def action
"#{super} Testing"
end
def print_matrix?
executable? && !verbose? && !pretend?
end
def suppress_log?
!executable? || print_matrix?
end
end
class Gem < Rvm
class Install < Gem
def command
"#{super} gem build #{gemspec_file}; gem install #{gem}"
end
def action
'Installing'
end
end
class Uninstall < Gem
def command
"#{super} gem uninstall #{repo.name} --version #{version}"
end
def action
'Uninstalling'
end
end
def gem
"#{working_dir.join(repo.name)}-#{version}.gem"
end
def gemspec_file
"#{working_dir.join(repo.name)}.gemspec"
end
def version
::Gem::Specification.load(working_dir.join(gemspec_file)).version.to_s
end
end
class Release < Command
def run
# TODO move to its own command
clean_repository(project_name)
FileUtils.cd(working_dir) do
log(command)
system(command) unless pretend?
end
end
def command
'rake release'
end
def action
'Releasing'
end
end
class Implode < Command
def run
log command
system command unless pretend?
end
def command
"rm -rf #{working_dir} #{verbosity}"
end
def action
'Deleting'
end
end
class Status < Command
def run
log "cd #{working_dir}" if verbose? || pretend?
FileUtils.cd(working_dir) do
log command
system command unless pretend?
end
end
def command
"git status"
end
def action
'git status'
end
end
end
end
module DataMapper
class Project < ::Project
def initialize(options = {})
super
commands['bundle:install'] = DataMapper::Project::Bundle::Install
commands['bundle:update' ] = DataMapper::Project::Bundle::Update
commands['bundle:show' ] = DataMapper::Project::Bundle::Show
commands['spec'] = DataMapper::Project::Spec
end
def name
'datamapper'
end
def environment_class
DataMapper::Project::Environment
end
def excluded_repos
%w[ dm-more ]
end
before 'implode' do |env, repos|
FileUtils.rm_rf env.bundle_root if env.included.nil? && !env.pretend?
end
class Environment < ::Project::Environment
attr_reader :adapters
def initialize(name, options)
super
@adapters ||= options[:adapters] || (ENV['ADAPTERS'] ? normalize(ENV['ADAPTERS']) : default_adapters)
end
def default_adapters
%w[ in_memory yaml sqlite postgres mysql ]
end
def default_excluded
%w[ dm-oracle-adapter dm-sqlserver-adapter ]
end
end
module Bundle
def environment
"#{super} #{support_lib}"
end
def support_lib
ruby == '1.8.6' ? 'EXTLIB="true"' : ''
end
def adapters
env.adapters.join(' ')
end
def master_gemfile
'Gemfile.local'
end
def gemfile
"#{super}#{local_install? ? '.local' : ''}"
end
def local_install?
working_dir.join("Gemfile.local").file?
end
def ignored_repos
%w[ dm-dev data_mapper datamapper.github.com dm-ferret-adapter rails_datamapper ]
end
def timeout
2
end
module Manipulation
def environment
"#{super} ADAPTERS='#{adapters}'"
end
end
class Install < ::Project::Command::Bundle::Install
include DataMapper::Project::Bundle
include DataMapper::Project::Bundle::Manipulation
def before
unless local_install?
log local_gemfile_command
system local_gemfile_command unless pretend?
end
super
end
def local_gemfile_command
"rake local_gemfile #{verbosity}"
end
def options
'--without quality'
end
end
class Update < ::Project::Command::Bundle::Update
include DataMapper::Project::Bundle
include DataMapper::Project::Bundle::Manipulation
end
class Show < ::Project::Command::Bundle::Show
include DataMapper::Project::Bundle
include DataMapper::Project::Bundle::Manipulation
end
end
class Spec < ::Project::Command::Spec
include DataMapper::Project::Bundle
def run
super do |ruby|
env.adapters.each do |adapter|
@adapter = adapter # HACK?
execute
if print_matrix?
print ' %s |' % [ $?.success? ? 'pass' : 'fail' ]
end
end
puts if print_matrix?
end
end
def environment
"#{super} ADAPTER=#{@adapter} TZ=utc"
end
end
# The tasks
class Tasks < ::Thor
module CommonOptions
def self.included(host)
host.class_eval do
class_option :root, :type => :string, :aliases => '-r', :desc => 'The directory where all DM source code is stored (overwrites DM_DEV_ROOT)'
class_option :bundle_root, :type => :string, :aliases => '-B', :desc => 'The directory where bundler stores all its data (overwrites DM_DEV_BUNDLE_ROOT)'
class_option :rubies, :type => :array, :aliases => '-R', :desc => 'The rvm ruby interpreters to use with this command (overwrites RUBIES)'
class_option :include, :type => :array, :aliases => '-i', :desc => 'The DM gems to include with this command (overwrites INCLUDE)'
class_option :exclude, :type => :array, :aliases => '-e', :desc => 'The DM gems to exclude with this command (overwrites EXCLUDE)'
class_option :adapters, :type => :array, :aliases => '-a', :desc => 'The DM adapters to use with this command (overwrites ADAPTERS)'
class_option :pretend, :type => :boolean, :aliases => '-p', :desc => 'Print the shell commands that would get executed'
class_option :verbose, :type => :boolean, :aliases => '-v', :desc => 'Print the shell commands being executed'
class_option :benchmark, :type => :boolean, :aliases => '-b', :desc => 'Print the time the command took to execute'
end
end
end
namespace :dm
include Thor::Actions
include CommonOptions
desc 'sync', 'Sync with the DM repositories'
method_option :development, :type => :boolean, :aliases => '-d', :desc => 'Use the private github clone url if you have push access'
def sync
DataMapper::Project.sync(options)
end
desc 'spec', 'Run specs for DM gems'
def spec
DataMapper::Project.spec(options)
end
desc 'release', 'Release all DM gems to rubygems'
def release
DataMapper::Project.release(options)
end
desc 'implode', 'Delete all DM gems'
def implode
if implode_confirmed?
DataMapper::Project.implode(options)
end
end
desc 'status', 'Show git status information'
def status
DataMapper::Project.status(options)
end
class Bundle < ::Thor
namespace 'dm:bundle'
include CommonOptions
desc 'install', 'Bundle the DM repositories'
def install
DataMapper::Project.bundle_install(options)
end
desc 'update', 'Update the bundled DM repositories'
def update
DataMapper::Project.bundle_update(options)
end
desc 'show', 'Show the bundle content'
def show
DataMapper::Project.bundle_show(options)
end
end
class Gem < ::Thor
namespace 'dm:gem'
include CommonOptions
desc 'install', 'Install all included gems into the specified rubies'
def install
DataMapper::Project.gem_install(options)
end
desc 'uninstall', 'Uninstall all included gems from the specified rubies'
def uninstall
DataMapper::Project.gem_uninstall(options)
end
end
class Meta < ::Thor
namespace 'dm:meta'
desc 'list', 'List locally known DM repositories'
def list
DataMapper::Project.list
end
end
private
def implode_confirmed?
return true if options[:pretend]
question = "Are you really sure? This will destroy #{affected_repositories}! (yes)"
ask(question) == 'yes'
end
def affected_repositories
included = options[:include]
if include_all?(included)
'not only all repositories, but also everything below DM_DEV_BUNDLE_ROOT!'
else
"the following repositories: #{included.join(', ')}!"
end
end
def include_all?(included)
include_all_implicitly? || include_all_explicitly?
end
def include_all_implicitly?(included)
included.nil?
end
def include_all_explicitly?(included)
included.respond_to?(:each) && included.count == 1 && included.first == 'all'
end
end
end
end
DM = DataMapper::Project
Actually install gems into the specified rvm ruby
#!/usr/bin/env ruby -Ku
# encoding: utf-8
require 'set'
require 'yaml'
require 'fileutils'
require 'pathname'
require 'thor'
require 'addressable/uri'
require 'ruby-github'
class ::Project
def self.command_names
%w[ sync bundle:install bundle:update bundle:show gem:install gem:uninstall spec release implode status list ]
end
def self.command_name(name)
command_fragments(name).join('_')
end
def self.command_class_name(name)
command_fragments(name).map { |fragment| fragment.capitalize }.join('::')
end
def self.command_fragments(name)
name.split(':').map { |fragment| fragment }
end
command_names.each do |name|
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def self.#{command_name(name)}(options = {})
new(options).send(:#{command_name(name)})
end
def #{command_name(name)}
start = Time.now if options[:benchmark]
self.class.invoke :before, '#{command_name(name)}', env, repos
@repos.each do |repo|
@logger.progress!
command_class('#{name}').new(repo, env, @logger).run
end
self.class.invoke :after, '#{command_name(name)}', env, repos
stop = Time.now if options[:benchmark]
if options[:benchmark]
puts '-------------------------------'
puts "Time elapsed: \#{stop - start}"
puts '-------------------------------'
end
end
RUBY
end
attr_reader :env
attr_reader :root
attr_reader :repos
attr_reader :options
attr_accessor :commands
def initialize(options = {})
@options = options
@env = environment_class.new(name, @options)
@root = @env.root
@repos = Repositories.new(@root, name, @env.included, @env.excluded + excluded_repos)
@logger = Logger.new(@env, @repos.count)
@commands = {}
end
def environment_class
Environment
end
def command_class(name)
return commands[name] if commands[name]
Utils.full_const_get(self.class.command_class_name(name), Command)
end
def self.before(command_name, &block)
((@before ||= {})[command_name] ||= []) << block
end
def self.after(command_name, &block)
((@after ||= {})[command_name] ||= []) << block
end
def self.invoke(kind, name, *args)
hooks = instance_variable_get("@#{kind}")
return unless hooks && hooks[name]
hooks[name].each { |hook| hook.call(*args) }
end
class Metadata
attr_reader :root
attr_reader :name
attr_reader :repositories
def self.fetch(root, name)
new(root, name).repositories
end
def initialize(root, name)
@root, @name = root, name
@repositories = fetch
end
def fetch
filename = root.join(config_file_name)
if filename.file?
load_from_yaml(filename)
else
load_from_github(filename)
end
end
def config_file_name
'dm-dev.yml'
end
def load_from_github(filename)
cache(GitHub::API.user(name).repositories, filename)
end
def load_from_yaml(filename)
YAML.load(File.open(filename))['repositories'].map do |repo|
Struct.new(:name, :url).new(repo['name'], repo['url'])
end
end
private
def cache(repos, filename)
File.open(filename, 'w') do |f|
f.write(YAML.dump({
'repositories' => repos.map { |repo| { 'name' => repo.name, 'url' => repo.url } }
}))
end
repos
end
end
module Utils
def self.full_const_get(name, root = Object)
obj = root
namespaces(name).each do |x|
# This is required because const_get tries to look for constants in the
# ancestor chain, but we only want constants that are HERE
obj = obj.const_defined?(x) ? obj.const_get(x) : obj.const_missing(x)
end
obj
end
def self.namespaced?(const_name)
namespaces(const_name).size > 1
end
def self.namespaces(const_name)
path = const_name.to_s.split('::')
path.shift if path.first.empty?
path
end
end
class Repositories
include Enumerable
def initialize(root, user, repos, excluded_repos)
@root, @user = root, user
@repos = repos
@excluded_repos = excluded_repos
@metadata = Metadata.fetch(@root, @user)
@repositories = selected_repositories.map do |repo|
Repository.new(@root, repo)
end
end
def each
@repositories.each { |repo| yield(repo) }
end
private
def selected_repositories
if use_current_directory?
@metadata.select { |repo| managed_repo?(repo) }
else
@metadata.select { |repo| include_repo?(repo) }
end
end
def managed_repo?(repo)
repo.name == relative_path_name
end
def include_repo?(repo)
if @repos
!excluded_repo?(repo) && (include_all? || @repos.include?(repo.name))
else
!excluded_repo?(repo)
end
end
def excluded_repo?(repo)
@excluded_repos.include?(repo.name)
end
def use_current_directory?
@repos.nil? && inside_available_repo? && !include_all?
end
def inside_available_repo?
@metadata.map(&:name).include?(relative_path_name)
end
def include_all?
explicitly_specified = @repos.respond_to?(:each) && @repos.count == 1 && @repos.first == 'all'
if inside_available_repo?
explicitly_specified
else
@repos.nil? || explicitly_specified
end
end
def relative_path_name
Pathname(Dir.pwd).relative_path_from(@root).to_s
end
end
class Repository
attr_reader :path
attr_reader :name
attr_reader :uri
def initialize(root, repo)
@name = repo.name
@path = root.join(@name)
@uri = Addressable::URI.parse(repo.url)
end
def installable?
path.join('Gemfile').file?
end
end
class Environment
attr_reader :name
attr_reader :options
attr_reader :root
attr_reader :included
attr_reader :excluded
attr_reader :rubies
attr_reader :bundle_root
def initialize(name, options)
@name = name
@options = options
@root = Pathname(@options[:root ] || ENV['DM_DEV_ROOT' ] || Dir.pwd)
@bundle_root = Pathname(@options[:bundle_root] || ENV['DM_DEV_BUNDLE_ROOT'] || @root.join(default_bundle_root))
@included = @options[:include] || (ENV['INCLUDE'] ? normalize(ENV['INCLUDE']) : default_included)
@excluded = @options[:exclude] || (ENV['EXCLUDE'] ? normalize(ENV['EXCLUDE']) : default_excluded)
@rubies = @options[:rubies ] || (ENV['RUBIES' ] ? normalize(ENV['RUBIES' ]) : default_rubies)
@verbose = @options[:verbose] || (ENV['VERBOSE'] == 'true')
@pretend = @options[:pretend] || (ENV['PRETEND'] == 'true')
end
def default_bundle_root
'DM_DEV_BUNDLE_ROOT'
end
def default_included
nil # means all
end
def default_excluded
[] # overwrite in subclasses
end
def default_rubies
%w[ 1.8.7 1.9.2 jruby rbx ]
end
def verbose?
@verbose
end
def pretend?
@pretend
end
private
def normalize(string)
string.gsub(',', ' ').split(' ')
end
end
class Logger
attr_reader :progress
def initialize(env, repo_count)
@env = env
@progress = 0
@total = repo_count
@padding = @total.to_s.length
@verbose = @env.verbose?
@pretend = @env.pretend?
end
def log(repo, action, command = nil, msg = nil)
command = command.to_s.squeeze(' ').strip # TODO also do for actually executed commands
if @pretend || @verbose
puts command
else
puts '[%0*d/%d] %s %s %s%s' % format(repo, action, command, msg)
end
end
def progress!
@progress += 1
end
def format(repo, action, command, msg)
[ @padding, @progress, @total, action, repo.name, msg, @verbose ? ": #{command}" : '' ]
end
end
class Command
attr_reader :repo
attr_reader :env
attr_reader :root
attr_reader :path
attr_reader :uri
attr_reader :logger
def initialize(repo, env, logger)
@repo = repo
@env = env
@root = @env.root
@path = @root.join(@repo.name)
@uri = @repo.uri
@logger = logger
@verbose = @env.verbose?
end
def before
# overwrite in subclasses
end
def run
log_directory_change
FileUtils.cd(working_dir) do
if block_given?
yield
else
execute
end
end
end
def after
# overwrite in subclasses
end
def execute
if executable?
before
unless suppress_log?
log(command)
end
unless pretend?
sleep(timeout)
system(command)
end
after
else
if verbose? && !pretend?
log(command, "SKIPPED! - #{explanation}")
end
end
end
# overwrite in subclasses
def command
raise NotImplementedError
end
# overwrite in subclasses
def executable?
true
end
# overwrite in subclasses
def suppress_log?
false
end
# overwrite in subclasses
def explanation
'reason unknown'
end
def log_directory_change
if needs_directory_change? && (verbose? || pretend?)
log "cd #{working_dir}"
end
end
def needs_directory_change?
Dir.pwd != working_dir.to_s
end
def ignored?
ignored_repos.include?(repo.name)
end
# overwrite in subclasses
def ignored_repos
[]
end
# overwrite in subclasses
def working_dir
path
end
def verbose?
@verbose
end
def pretend?
@env.pretend?
end
def verbosity
verbose? ? verbose : silent
end
# overwrite in subclasses
def verbose
end
def silent
'>& /dev/null'
end
# overwrite in subclasses
def timeout
0
end
# overwrite in subclasses
def action
end
def log(command = nil, msg = nil)
logger.log(repo, action, command, msg)
end
class List < ::Project::Command
def run
log
end
end
class Sync < Command
def self.new(repo, env, logger)
return super unless self == Sync
if env.root.join(repo.name).directory?
Pull.new(repo, env, logger)
else
Clone.new(repo, env, logger)
end
end
class Clone < Sync
def initialize(repo, env, logger)
super
@git_uri = uri.dup
@git_uri.scheme = 'git'
if env.options[:development]
@git_uri.to_s.sub!('://', '@').sub!('/', ':')
end
end
def command
"git clone #{@git_uri}.git #{verbosity}"
end
def working_dir
root
end
def action
'Cloning'
end
end
class Pull < Sync
def command
"git checkout master #{verbosity}; git pull --rebase #{verbosity}"
end
def action
'Pulling'
end
end
end
class Rvm < Command
attr_reader :rubies
def initialize(repo, env, logger)
super
@rubies = env.rubies
end
def command
"rvm #{rubies.join(',')}"
end
class Exec < Rvm
attr_reader :ruby
def run
super do
rubies.each do |ruby|
@ruby = ruby
if block_given?
yield(ruby)
else
execute
end
end
end
end
private
def command
"rvm #{@ruby} exec bash -c"
end
def action
"[#{@ruby}]"
end
end
end
class Bundle < Rvm::Exec
class Install < Bundle
def bundle_command
'install'
end
def action
"#{super} bundle install"
end
end
class Update < Bundle
def bundle_command
'update'
end
def action
"#{super} bundle update"
end
end
class Show < Bundle
def bundle_command
'show'
end
def action
"#{super} bundle show"
end
end
def initialize(repo, env, logger)
super
@bundle_root = env.bundle_root
rubies.each { |ruby| bundle_path(ruby).mkpath }
end
def before
super
make_gemfile
end
def executable?
!ignored? && repo.installable?
end
def command
"#{super} \"#{environment} bundle #{bundle_command} #{options} #{verbosity}\""
end
def environment
"BUNDLE_PATH='#{bundle_path(ruby)}' BUNDLE_GEMFILE='#{gemfile}'"
end
def bundle_path(ruby)
@bundle_root.join(ruby)
end
def gemfile
"Gemfile.#{ruby}"
end
def make_gemfile
unless working_dir.join(gemfile).file?
master = working_dir.join(master_gemfile)
log "cp #{master} #{gemfile}"
unless pretend?
FileUtils.cp(master, gemfile)
end
end
end
def master_gemfile
'Gemfile'
end
def options
nil
end
def explanation
if ignored?
"because it's ignored"
elsif !repo.installable?
"because it's missing a Gemfile"
else
"reason unknown"
end
end
end
class Spec < Bundle
def run
if print_matrix?
puts "\nh2. %s\n\n" % repo.name
puts '| RUBY | %s |' % env.adapters.join(' | ')
end
super do |ruby|
print '| %s |' % ruby if print_matrix?
if block_given?
yield ruby
else
execute
if print_matrix?
print ' %s |' % [ $?.success? ? 'pass' : 'fail' ]
end
end
end
end
def bundle_command
'exec rake spec'
end
def action
"#{super} Testing"
end
def print_matrix?
executable? && !verbose? && !pretend?
end
def suppress_log?
!executable? || print_matrix?
end
end
class Gem < Rvm
class Install < Gem
def command
"#{super} gem build #{gemspec_file}; #{super} gem install #{gem}"
end
def action
'Installing'
end
end
class Uninstall < Gem
def command
"#{super} gem uninstall #{repo.name} --version #{version}"
end
def action
'Uninstalling'
end
end
def gem
"#{working_dir.join(repo.name)}-#{version}.gem"
end
def gemspec_file
"#{working_dir.join(repo.name)}.gemspec"
end
def version
::Gem::Specification.load(working_dir.join(gemspec_file)).version.to_s
end
end
class Release < Command
def run
# TODO move to its own command
clean_repository(project_name)
FileUtils.cd(working_dir) do
log(command)
system(command) unless pretend?
end
end
def command
'rake release'
end
def action
'Releasing'
end
end
class Implode < Command
def run
log command
system command unless pretend?
end
def command
"rm -rf #{working_dir} #{verbosity}"
end
def action
'Deleting'
end
end
class Status < Command
def run
log "cd #{working_dir}" if verbose? || pretend?
FileUtils.cd(working_dir) do
log command
system command unless pretend?
end
end
def command
"git status"
end
def action
'git status'
end
end
end
end
module DataMapper
class Project < ::Project
def initialize(options = {})
super
commands['bundle:install'] = DataMapper::Project::Bundle::Install
commands['bundle:update' ] = DataMapper::Project::Bundle::Update
commands['bundle:show' ] = DataMapper::Project::Bundle::Show
commands['spec'] = DataMapper::Project::Spec
end
def name
'datamapper'
end
def environment_class
DataMapper::Project::Environment
end
def excluded_repos
%w[ dm-more ]
end
before 'implode' do |env, repos|
FileUtils.rm_rf env.bundle_root if env.included.nil? && !env.pretend?
end
class Environment < ::Project::Environment
attr_reader :adapters
def initialize(name, options)
super
@adapters ||= options[:adapters] || (ENV['ADAPTERS'] ? normalize(ENV['ADAPTERS']) : default_adapters)
end
def default_adapters
%w[ in_memory yaml sqlite postgres mysql ]
end
def default_excluded
%w[ dm-oracle-adapter dm-sqlserver-adapter ]
end
end
module Bundle
def environment
"#{super} #{support_lib}"
end
def support_lib
ruby == '1.8.6' ? 'EXTLIB="true"' : ''
end
def adapters
env.adapters.join(' ')
end
def master_gemfile
'Gemfile.local'
end
def gemfile
"#{super}#{local_install? ? '.local' : ''}"
end
def local_install?
working_dir.join("Gemfile.local").file?
end
def ignored_repos
%w[ dm-dev data_mapper datamapper.github.com dm-ferret-adapter rails_datamapper ]
end
def timeout
2
end
module Manipulation
def environment
"#{super} ADAPTERS='#{adapters}'"
end
end
class Install < ::Project::Command::Bundle::Install
include DataMapper::Project::Bundle
include DataMapper::Project::Bundle::Manipulation
def before
unless local_install?
log local_gemfile_command
system local_gemfile_command unless pretend?
end
super
end
def local_gemfile_command
"rake local_gemfile #{verbosity}"
end
def options
'--without quality'
end
end
class Update < ::Project::Command::Bundle::Update
include DataMapper::Project::Bundle
include DataMapper::Project::Bundle::Manipulation
end
class Show < ::Project::Command::Bundle::Show
include DataMapper::Project::Bundle
include DataMapper::Project::Bundle::Manipulation
end
end
class Spec < ::Project::Command::Spec
include DataMapper::Project::Bundle
def run
super do |ruby|
env.adapters.each do |adapter|
@adapter = adapter # HACK?
execute
if print_matrix?
print ' %s |' % [ $?.success? ? 'pass' : 'fail' ]
end
end
puts if print_matrix?
end
end
def environment
"#{super} ADAPTER=#{@adapter} TZ=utc"
end
end
# The tasks
class Tasks < ::Thor
module CommonOptions
def self.included(host)
host.class_eval do
class_option :root, :type => :string, :aliases => '-r', :desc => 'The directory where all DM source code is stored (overwrites DM_DEV_ROOT)'
class_option :bundle_root, :type => :string, :aliases => '-B', :desc => 'The directory where bundler stores all its data (overwrites DM_DEV_BUNDLE_ROOT)'
class_option :rubies, :type => :array, :aliases => '-R', :desc => 'The rvm ruby interpreters to use with this command (overwrites RUBIES)'
class_option :include, :type => :array, :aliases => '-i', :desc => 'The DM gems to include with this command (overwrites INCLUDE)'
class_option :exclude, :type => :array, :aliases => '-e', :desc => 'The DM gems to exclude with this command (overwrites EXCLUDE)'
class_option :adapters, :type => :array, :aliases => '-a', :desc => 'The DM adapters to use with this command (overwrites ADAPTERS)'
class_option :pretend, :type => :boolean, :aliases => '-p', :desc => 'Print the shell commands that would get executed'
class_option :verbose, :type => :boolean, :aliases => '-v', :desc => 'Print the shell commands being executed'
class_option :benchmark, :type => :boolean, :aliases => '-b', :desc => 'Print the time the command took to execute'
end
end
end
namespace :dm
include Thor::Actions
include CommonOptions
desc 'sync', 'Sync with the DM repositories'
method_option :development, :type => :boolean, :aliases => '-d', :desc => 'Use the private github clone url if you have push access'
def sync
DataMapper::Project.sync(options)
end
desc 'spec', 'Run specs for DM gems'
def spec
DataMapper::Project.spec(options)
end
desc 'release', 'Release all DM gems to rubygems'
def release
DataMapper::Project.release(options)
end
desc 'implode', 'Delete all DM gems'
def implode
if implode_confirmed?
DataMapper::Project.implode(options)
end
end
desc 'status', 'Show git status information'
def status
DataMapper::Project.status(options)
end
class Bundle < ::Thor
namespace 'dm:bundle'
include CommonOptions
desc 'install', 'Bundle the DM repositories'
def install
DataMapper::Project.bundle_install(options)
end
desc 'update', 'Update the bundled DM repositories'
def update
DataMapper::Project.bundle_update(options)
end
desc 'show', 'Show the bundle content'
def show
DataMapper::Project.bundle_show(options)
end
end
class Gem < ::Thor
namespace 'dm:gem'
include CommonOptions
desc 'install', 'Install all included gems into the specified rubies'
def install
DataMapper::Project.gem_install(options)
end
desc 'uninstall', 'Uninstall all included gems from the specified rubies'
def uninstall
DataMapper::Project.gem_uninstall(options)
end
end
class Meta < ::Thor
namespace 'dm:meta'
desc 'list', 'List locally known DM repositories'
def list
DataMapper::Project.list
end
end
private
def implode_confirmed?
return true if options[:pretend]
question = "Are you really sure? This will destroy #{affected_repositories}! (yes)"
ask(question) == 'yes'
end
def affected_repositories
included = options[:include]
if include_all?(included)
'not only all repositories, but also everything below DM_DEV_BUNDLE_ROOT!'
else
"the following repositories: #{included.join(', ')}!"
end
end
def include_all?(included)
include_all_implicitly? || include_all_explicitly?
end
def include_all_implicitly?(included)
included.nil?
end
def include_all_explicitly?(included)
included.respond_to?(:each) && included.count == 1 && included.first == 'all'
end
end
end
end
DM = DataMapper::Project
|
# -*- ruby -*-
# encoding: utf-8
$LOAD_PATH.push File.expand_path('../lib', __FILE__)
require 'grpc/version'
Gem::Specification.new do |s|
s.name = 'grpc'
s.version = GRPC::VERSION
s.authors = ['gRPC Authors']
s.email = 'temiola@google.com'
s.homepage = 'https://github.com/google/grpc/tree/master/src/ruby'
s.summary = 'GRPC system in Ruby'
s.description = 'Send RPCs from Ruby using GRPC'
s.license = 'BSD-3-Clause'
s.required_ruby_version = '>= 2.0.0'
s.requirements << 'libgrpc ~> 0.6.0 needs to be installed'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.executables = `git ls-files -- bin/*.rb`.split("\n").map do |f|
File.basename(f)
end
s.require_paths = ['lib']
s.platform = Gem::Platform::RUBY
s.add_dependency 'google-protobuf', '~> 3.0.0alpha.1.1'
s.add_dependency 'googleauth', '~> 0.4' # reqd for interop tests
s.add_dependency 'logging', '~> 2.0'
s.add_dependency 'minitest', '~> 5.4' # reqd for interop tests
s.add_development_dependency 'simplecov', '~> 0.9'
s.add_development_dependency 'bundler', '~> 1.9'
s.add_development_dependency 'rake', '~> 10.4'
s.add_development_dependency 'rake-compiler', '~> 0.9'
s.add_development_dependency 'rspec', '~> 3.2'
s.add_development_dependency 'rubocop', '~> 0.30'
s.extensions = %w(ext/grpc/extconf.rb)
end
Pin the version of rubocop used in grpc.gemspec
# -*- ruby -*-
# encoding: utf-8
$LOAD_PATH.push File.expand_path('../lib', __FILE__)
require 'grpc/version'
Gem::Specification.new do |s|
s.name = 'grpc'
s.version = GRPC::VERSION
s.authors = ['gRPC Authors']
s.email = 'temiola@google.com'
s.homepage = 'https://github.com/google/grpc/tree/master/src/ruby'
s.summary = 'GRPC system in Ruby'
s.description = 'Send RPCs from Ruby using GRPC'
s.license = 'BSD-3-Clause'
s.required_ruby_version = '>= 2.0.0'
s.requirements << 'libgrpc ~> 0.6.0 needs to be installed'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.executables = `git ls-files -- bin/*.rb`.split("\n").map do |f|
File.basename(f)
end
s.require_paths = ['lib']
s.platform = Gem::Platform::RUBY
s.add_dependency 'google-protobuf', '~> 3.0.0alpha.1.1'
s.add_dependency 'googleauth', '~> 0.4' # reqd for interop tests
s.add_dependency 'logging', '~> 2.0'
s.add_dependency 'minitest', '~> 5.4' # reqd for interop tests
s.add_development_dependency 'simplecov', '~> 0.9'
s.add_development_dependency 'bundler', '~> 1.9'
s.add_development_dependency 'rake', '~> 10.4'
s.add_development_dependency 'rake-compiler', '~> 0.9'
s.add_development_dependency 'rspec', '~> 3.2'
s.add_development_dependency 'rubocop', '~> 0.30.0'
s.extensions = %w(ext/grpc/extconf.rb)
end
|
require 'spec_helper'
class TestCheckpointV1 < CheckpointV1; end
describe "Identities" do
include Rack::Test::Methods
def app
TestCheckpointV1
end
let :realm do
realm = Realm.create!(:label => "area51")
Domain.create!(:realm => realm, :name => 'example.org')
realm
end
let :me do
identity = Identity.create!(:realm => realm)
account = Account.create!(:identity => identity,
:realm => realm,
:provider => 'twitter',
:uid => '1',
:token => 'token',
:secret => 'secret',
:nickname => 'nickname',
:name => 'name',
:profile_url => 'profile_url',
:image_url => 'image_url')
identity.primary_account = account
identity.save!
identity
end
let :god do
Identity.create!(:god => true, :realm => realm)
end
let :me_session do
Session.create!(:identity => me).key
end
let :god_session do
Session.create!(:identity => god).key
end
let(:json_output) { JSON.parse(last_response.body) }
describe "GET /identities/me" do
it "is possible to set current session with a http parameter" do
get "/identities/me", :session => me_session
identity = JSON.parse(last_response.body)['identity']
identity['id'].should eq me.id
end
it "describes me as a json hash" do
get "/identities/me", :session => me_session
identity = json_output['identity']
identity['id'].should eq me.id
identity['realm'].should eq me.realm.label
json_output['accounts'].should eq ['twitter']
profile = json_output['profile']
profile['provider'].should eq 'twitter'
profile['nickname'].should eq 'nickname'
profile['name'].should eq 'name'
profile['profile_url'].should eq 'profile_url'
profile['image_url'].should eq 'image_url'
# And the result is the same if I ask for me by id
former_response_body = last_response.body
get "/identities/#{me.id}", :session => me_session
last_response.body.should eq former_response_body
end
it "hands me my balls if I ask for current user when there is no current user" do
get "/identities/me"
last_response.body.should eq "{}"
end
end
describe "GET /identities/:identity_or_identities" do
it "describes an identity as a json hash" do
get "/identities/#{god.id}", :session => me_session
JSON.parse(last_response.body)['identity']['id'].should eq god.id
end
it "returns multiple identities" do
get "/identities/#{god.id},#{me.id}", :session => me_session
result = JSON.parse(last_response.body)['identities']
result.first['identity']['id'].should eq god.id
result.first['accounts'].should eq([])
result.first['profile'].should be_nil
result.last['identity']['id'].should eq me.id
result.last['accounts'].should eq(['twitter'])
result.last['profile']['provider'].should eq('twitter')
end
it "returns empty identities if requested ids do not exist" do
get "/identities/1024,1025,1026", :session => me_session
empty_hash = {}
result = JSON.parse(last_response.body)
result['identities'].length.should eq 3
result['identities'][0]['identity'].should eq empty_hash
result['identities'][1]['identity'].should eq empty_hash
result['identities'][2]['identity'].should eq empty_hash
end
it "can mix existing and non-existant identities" do
get "/identities/1024,#{me.id},1026,#{god.id}", :session => me_session
empty_hash = {}
result = JSON.parse(last_response.body)
result['identities'].length.should eq 4
result['identities'][0]['identity'].should eq empty_hash
result['identities'][1]['identity']['id'].should eq me.id
result['identities'][2]['identity'].should eq empty_hash
result['identities'].last['identity']['id'].should eq god.id
end
it "hands me a list of a single identity if I ask for it using a comma" do
get "/identities/#{god.id},", :session => me_session
result = JSON.parse(last_response.body)
JSON.parse(last_response.body)['identities'].first['identity']['id'].should eq god.id
end
end
describe "POST /identities" do
it "creates an identity with an account" do
parameters = {:session => god_session, :account => {:provider => 'twitter', :nickname => 'nick', :uid => '1'}}
post '/identities', parameters
last_response.status.should eq(200)
result = JSON.parse(last_response.body)
json_output['identity']['id'].should_not be_nil
json_output['profile']["nickname"].should eq('nick')
end
it "can create god users" do
parameters = {:session => god_session, :identity => {:god => true}, :account => {:provider => 'twitter', :nickname => 'nick', :uid => '1'}}
post '/identities', parameters
last_response.status.should eq(200)
identity = json_output['identity']
identity['id'].should_not be_nil
identity['god'].should be_true
json_output['profile']["nickname"].should eq('nick')
end
it "ignores any realm that is passed in" do
god # trigger
parameters = {:session => god_session, :identity => {:realm => 'rock_and_roll'}, :account => {:provider => 'twitter', :nickname => 'nick', :uid => '1'}}
post '/identities', parameters
last_response.status.should eq(200)
identity = json_output['identity']
identity['id'].should_not be_nil
identity['realm'].should eq('area51')
profile = json_output['profile']
profile["nickname"].should eq('nick')
end
it "fails to create identities if not god" do
parameters = {:session => me_session, :account => {:provider => 'twitter', :nickname => 'nick', :uid => '1'}}
post '/identities', parameters
last_response.status.should eq(403)
end
end
describe "last_seen_at" do
it "stamps the user with a date for when it was last seen" do
get "/identities/me", :session => me_session
identities = Identity.cached_find_by_id(me.id)
identities.last_seen_at.to_date.should eq Time.now.to_date
end
it "updates last_seen_at timestamp when it is old" do
me.last_seen_at = Time.now.to_date-2
me.save!
get "/identities/me", :session => me_session
identity = Identity.cached_find_by_id(me.id)
identity.last_seen_at.to_date.should eq Time.now.to_date
end
end
end
Test invalid session hashes
require 'spec_helper'
class TestCheckpointV1 < CheckpointV1; end
describe "Identities" do
include Rack::Test::Methods
def app
TestCheckpointV1
end
let :realm do
realm = Realm.create!(:label => "area51")
Domain.create!(:realm => realm, :name => 'example.org')
realm
end
let :me do
identity = Identity.create!(:realm => realm)
account = Account.create!(:identity => identity,
:realm => realm,
:provider => 'twitter',
:uid => '1',
:token => 'token',
:secret => 'secret',
:nickname => 'nickname',
:name => 'name',
:profile_url => 'profile_url',
:image_url => 'image_url')
identity.primary_account = account
identity.save!
identity
end
let :god do
Identity.create!(:god => true, :realm => realm)
end
let :me_session do
Session.create!(:identity => me).key
end
let :god_session do
Session.create!(:identity => god).key
end
let(:json_output) { JSON.parse(last_response.body) }
describe "GET /identities/me" do
it "is possible to set current session with a http parameter" do
get "/identities/me", :session => me_session
identity = JSON.parse(last_response.body)['identity']
identity['id'].should eq me.id
end
it "handles invalid sessions gracefully" do
get "/identities/me", :session => "invalidsessionhash"
last_response.status.should eq 200
last_response.body.should eq '{}'
end
it "describes me as a json hash" do
get "/identities/me", :session => me_session
identity = json_output['identity']
identity['id'].should eq me.id
identity['realm'].should eq me.realm.label
json_output['accounts'].should eq ['twitter']
profile = json_output['profile']
profile['provider'].should eq 'twitter'
profile['nickname'].should eq 'nickname'
profile['name'].should eq 'name'
profile['profile_url'].should eq 'profile_url'
profile['image_url'].should eq 'image_url'
# And the result is the same if I ask for me by id
former_response_body = last_response.body
get "/identities/#{me.id}", :session => me_session
last_response.body.should eq former_response_body
end
it "hands me my balls if I ask for current user when there is no current user" do
get "/identities/me"
last_response.body.should eq "{}"
end
end
describe "GET /identities/:identity_or_identities" do
it "describes an identity as a json hash" do
get "/identities/#{god.id}", :session => me_session
JSON.parse(last_response.body)['identity']['id'].should eq god.id
end
it "returns multiple identities" do
get "/identities/#{god.id},#{me.id}", :session => me_session
result = JSON.parse(last_response.body)['identities']
result.first['identity']['id'].should eq god.id
result.first['accounts'].should eq([])
result.first['profile'].should be_nil
result.last['identity']['id'].should eq me.id
result.last['accounts'].should eq(['twitter'])
result.last['profile']['provider'].should eq('twitter')
end
it "returns empty identities if requested ids do not exist" do
get "/identities/1024,1025,1026", :session => me_session
empty_hash = {}
result = JSON.parse(last_response.body)
result['identities'].length.should eq 3
result['identities'][0]['identity'].should eq empty_hash
result['identities'][1]['identity'].should eq empty_hash
result['identities'][2]['identity'].should eq empty_hash
end
it "can mix existing and non-existant identities" do
get "/identities/1024,#{me.id},1026,#{god.id}", :session => me_session
empty_hash = {}
result = JSON.parse(last_response.body)
result['identities'].length.should eq 4
result['identities'][0]['identity'].should eq empty_hash
result['identities'][1]['identity']['id'].should eq me.id
result['identities'][2]['identity'].should eq empty_hash
result['identities'].last['identity']['id'].should eq god.id
end
it "hands me a list of a single identity if I ask for it using a comma" do
get "/identities/#{god.id},", :session => me_session
result = JSON.parse(last_response.body)
JSON.parse(last_response.body)['identities'].first['identity']['id'].should eq god.id
end
end
describe "POST /identities" do
it "creates an identity with an account" do
parameters = {:session => god_session, :account => {:provider => 'twitter', :nickname => 'nick', :uid => '1'}}
post '/identities', parameters
last_response.status.should eq(200)
result = JSON.parse(last_response.body)
json_output['identity']['id'].should_not be_nil
json_output['profile']["nickname"].should eq('nick')
end
it "can create god users" do
parameters = {:session => god_session, :identity => {:god => true}, :account => {:provider => 'twitter', :nickname => 'nick', :uid => '1'}}
post '/identities', parameters
last_response.status.should eq(200)
identity = json_output['identity']
identity['id'].should_not be_nil
identity['god'].should be_true
json_output['profile']["nickname"].should eq('nick')
end
it "ignores any realm that is passed in" do
god # trigger
parameters = {:session => god_session, :identity => {:realm => 'rock_and_roll'}, :account => {:provider => 'twitter', :nickname => 'nick', :uid => '1'}}
post '/identities', parameters
last_response.status.should eq(200)
identity = json_output['identity']
identity['id'].should_not be_nil
identity['realm'].should eq('area51')
profile = json_output['profile']
profile["nickname"].should eq('nick')
end
it "fails to create identities if not god" do
parameters = {:session => me_session, :account => {:provider => 'twitter', :nickname => 'nick', :uid => '1'}}
post '/identities', parameters
last_response.status.should eq(403)
end
end
describe "last_seen_at" do
it "stamps the user with a date for when it was last seen" do
get "/identities/me", :session => me_session
identities = Identity.cached_find_by_id(me.id)
identities.last_seen_at.to_date.should eq Time.now.to_date
end
it "updates last_seen_at timestamp when it is old" do
me.last_seen_at = Time.now.to_date-2
me.save!
get "/identities/me", :session => me_session
identity = Identity.cached_find_by_id(me.id)
identity.last_seen_at.to_date.should eq Time.now.to_date
end
end
end
|
require File.expand_path('../application', __FILE__)
App::Application.initialize!
Try to fix Rails 3 with JRuby
require 'tzinfo'
require File.expand_path('../application', __FILE__)
App::Application.initialize!
|
#!/usr/bin/env rspec
require 'spec_helper'
describe 'snmp', :type => 'class' do
context 'on a non-supported osfamily' do
let(:params) {{}}
let :facts do {
:osfamily => 'foo',
:operatingsystem => 'bar'
}
end
it 'should fail' do
expect {
should raise_error(Puppet::Error, /Module snmp is not supported on bar/)
}
end
end
redhatish = ['RedHat']
#redhatish = ['RedHat', 'Fedora']
debianish = ['Debian']
#debianish = ['Debian', 'Ubuntu']
suseish = ['Suse']
freebsdish = ['FreeBSD']
context 'on a supported osfamily, default parameters' do
redhatish.each do |os|
describe "for osfamily RedHat, operatingsystem #{os}" do
let(:params) {{}}
let :facts do {
:osfamily => 'RedHat',
:operatingsystem => os,
:operatingsystemrelease => '6.4',
:fqdn => 'myhost.localdomain'
}
end
it { should contain_package('snmpd').with(
:ensure => 'present',
:name => 'net-snmp'
)}
it { should_not contain_class('snmp::client') }
it { should contain_file('var-net-snmp').with(
:ensure => 'directory',
:mode => '0755',
:owner => 'root',
:group => 'root',
:path => '/var/lib/net-snmp',
:require => 'Package[snmpd]'
)}
it { should contain_file('snmpd.conf').with(
:ensure => 'present',
:mode => '0644',
:owner => 'root',
:group => 'root',
:path => '/etc/snmp/snmpd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
# TODO add more contents for File[snmpd.conf]
it 'should contain File[snmpd.conf] with expected contents' do
verify_contents(subject, 'snmpd.conf', [
'agentaddress udp:127.0.0.1:161',
'#rocommunity public 127.0.0.1',
'com2sec notConfigUser default public',
'group notConfigGroup v1 notConfigUser',
'group notConfigGroup v2c notConfigUser',
'view systemview included .1.3.6.1.2.1.1',
'view systemview included .1.3.6.1.2.1.25.1.1',
'access notConfigGroup "" any noauth exact systemview none none',
'sysLocation Unknown',
'sysContact Unknown',
'sysServices 72',
'sysName myhost.localdomain',
'dontLogTCPWrappersConnects no',
])
end
it { should contain_file('snmpd.sysconfig').with(
:ensure => 'present',
:mode => '0644',
:owner => 'root',
:group => 'root',
:path => '/etc/sysconfig/snmpd',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
it 'should contain File[snmpd.sysconfig] with contents "OPTIONS="-LS0-6d -Lf /dev/null -p /var/run/snmpd.pid""' do
verify_contents(subject, 'snmpd.sysconfig', [
'OPTIONS="-LS0-6d -Lf /dev/null -p /var/run/snmpd.pid"',
])
end
it { should contain_service('snmpd').with(
:ensure => 'running',
:name => 'snmpd',
:enable => true,
:hasstatus => true,
:hasrestart => true,
:require => [ 'Package[snmpd]', 'File[var-net-snmp]', ]
)}
it { should contain_file('snmptrapd.conf').with(
:ensure => 'present',
:mode => '0644',
:owner => 'root',
:group => 'root',
:path => '/etc/snmp/snmptrapd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmptrapd]'
)}
# TODO add more contents for File[snmptrapd.conf]
it 'should contain File[snmptrapd.conf] with correct contents' do
verify_contents(subject, 'snmptrapd.conf', [
'doNotLogTraps no',
'authCommunity log,execute,net public',
'disableAuthorization no',
])
end
it { should contain_file('snmptrapd.sysconfig').with(
:ensure => 'present',
:mode => '0644',
:owner => 'root',
:group => 'root',
:path => '/etc/sysconfig/snmptrapd',
:require => 'Package[snmpd]',
:notify => 'Service[snmptrapd]'
)}
it 'should contain File[snmptrapd.sysconfig] with contents "OPTIONS="-Lsd -p /var/run/snmptrapd.pid""' do
verify_contents(subject, 'snmptrapd.sysconfig', [
'OPTIONS="-Lsd -p /var/run/snmptrapd.pid"',
])
end
it { should contain_service('snmptrapd').with(
:ensure => 'stopped',
:name => 'snmptrapd',
:enable => false,
:hasstatus => true,
:hasrestart => true,
:require => [ 'Package[snmpd]', 'File[var-net-snmp]', ]
)}
end
end
debianish.each do |os|
describe "for osfamily Debian, operatingsystem #{os}" do
let(:params) {{}}
let :facts do {
:osfamily => 'Debian',
:operatingsystem => os,
:operatingsystemrelease => '6.0.7',
:fqdn => 'myhost2.localdomain'
}
end
it { should contain_package('snmpd').with(
:ensure => 'present',
:name => 'snmpd'
)}
it { should_not contain_class('snmp::client') }
it { should contain_file('var-net-snmp').with(
:ensure => 'directory',
:mode => '0755',
:owner => 'snmp',
:group => 'snmp',
:path => '/var/lib/snmp',
:require => 'Package[snmpd]'
)}
it { should contain_file('snmpd.conf').with(
:ensure => 'present',
:mode => '0600',
:owner => 'root',
:group => 'root',
:path => '/etc/snmp/snmpd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
# TODO add more contents for File[snmpd.conf]
it 'should contain File[snmpd.conf] with expected contents' do
verify_contents(subject, 'snmpd.conf', [
'agentaddress udp:127.0.0.1:161',
'#rocommunity public 127.0.0.1',
'com2sec notConfigUser default public',
'group notConfigGroup v1 notConfigUser',
'group notConfigGroup v2c notConfigUser',
'view systemview included .1.3.6.1.2.1.1',
'view systemview included .1.3.6.1.2.1.25.1.1',
'access notConfigGroup "" any noauth exact systemview none none',
'sysLocation Unknown',
'sysContact Unknown',
'sysServices 72',
'sysName myhost2.localdomain',
'dontLogTCPWrappersConnects no',
])
end
it { should contain_file('snmpd.sysconfig').with(
:ensure => 'present',
:mode => '0644',
:owner => 'root',
:group => 'root',
:path => '/etc/default/snmpd',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
it 'should contain File[snmpd.sysconfig] with contents "SNMPDOPTS=\'-Lsd -Lf /dev/null -u snmp -g snmp -I -smux -p /var/run/snmpd.pid\'"' do
verify_contents(subject, 'snmpd.sysconfig', [
'SNMPDRUN=yes',
'SNMPDOPTS=\'-Lsd -Lf /dev/null -u snmp -g snmp -I -smux -p /var/run/snmpd.pid\'',
])
end
it { should contain_service('snmpd').with(
:ensure => 'running',
:name => 'snmpd',
:enable => true,
:hasstatus => true,
:hasrestart => true,
:require => [ 'Package[snmpd]', 'File[var-net-snmp]', ]
)}
it { should contain_file('snmptrapd.conf').with(
:ensure => 'present',
:mode => '0600',
:owner => 'root',
:group => 'root',
:path => '/etc/snmp/snmptrapd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
# TODO add more contents for File[snmptrapd.conf]
it 'should contain File[snmptrapd.conf] with correct contents' do
verify_contents(subject, 'snmptrapd.conf', [
'doNotLogTraps no',
'authCommunity log,execute,net public',
'disableAuthorization no',
])
end
it { should_not contain_file('snmptrapd.sysconfig') }
it 'should contain File[snmpd.sysconfig] with contents "TRAPDOPTS=\'-Lsd -p /var/run/snmptrapd.pid\'"' do
verify_contents(subject, 'snmpd.sysconfig', [
'TRAPDRUN=no',
'TRAPDOPTS=\'-Lsd -p /var/run/snmptrapd.pid\'',
])
end
it { should_not contain_service('snmptrapd') }
end
end
suseish.each do |os|
describe "for osfamily RedHat, operatingsystem #{os}" do
let(:params) {{}}
let :facts do {
:osfamily => 'Suse',
:operatingsystem => os,
:operatingsystemrelease => '11.1',
:fqdn => 'myhost3.localdomain'
}
end
it { should contain_package('snmpd').with(
:ensure => 'present',
:name => 'net-snmp'
)}
it { should_not contain_class('snmp::client') }
it { should contain_file('var-net-snmp').with(
:ensure => 'directory',
:mode => '0755',
:owner => 'root',
:group => 'root',
:path => '/var/lib/net-snmp',
:require => 'Package[snmpd]'
)}
it { should contain_file('snmpd.conf').with(
:ensure => 'present',
:mode => '0600',
:owner => 'root',
:group => 'root',
:path => '/etc/snmp/snmpd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
# TODO add more contents for File[snmpd.conf]
it 'should contain File[snmpd.conf] with expected contents' do
verify_contents(subject, 'snmpd.conf', [
'agentaddress udp:127.0.0.1:161',
'#rocommunity public 127.0.0.1',
'com2sec notConfigUser default public',
'group notConfigGroup v1 notConfigUser',
'group notConfigGroup v2c notConfigUser',
'view systemview included .1.3.6.1.2.1.1',
'view systemview included .1.3.6.1.2.1.25.1.1',
'access notConfigGroup "" any noauth exact systemview none none',
'sysLocation Unknown',
'sysContact Unknown',
'sysServices 72',
'sysName myhost3.localdomain',
'dontLogTCPWrappersConnects no',
])
end
it { should contain_file('snmpd.sysconfig').with(
:ensure => 'present',
:mode => '0644',
:owner => 'root',
:group => 'root',
:path => '/etc/sysconfig/net-snmp',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
it 'should contain File[snmpd.sysconfig] with contents "SNMPD_LOGLEVEL="d""' do
verify_contents(subject, 'snmpd.sysconfig', [
'SNMPD_LOGLEVEL="d"',
])
end
it { should contain_service('snmpd').with(
:ensure => 'running',
:name => 'snmpd',
:enable => true,
:hasstatus => true,
:hasrestart => true,
:require => [ 'Package[snmpd]', 'File[var-net-snmp]', ]
)}
it { should contain_file('snmptrapd.conf').with(
:ensure => 'present',
:mode => '0600',
:owner => 'root',
:group => 'root',
:path => '/etc/snmp/snmptrapd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmptrapd]'
)}
# TODO add more contents for File[snmptrapd.conf]
it 'should contain File[snmptrapd.conf] with correct contents' do
verify_contents(subject, 'snmptrapd.conf', [
'doNotLogTraps no',
'authCommunity log,execute,net public',
'disableAuthorization no',
])
end
it { should_not contain_file('snmptrapd.sysconfig') }
it { should contain_exec('install /etc/init.d/snmptrapd').with(
:command => '/usr/bin/install -o 0 -g 0 -m0755 -p /usr/share/doc/packages/net-snmp/rc.snmptrapd /etc/init.d/snmptrapd',
:creates => '/etc/init.d/snmptrapd',
:require => 'Package[snmpd]'
)}
it { should contain_service('snmptrapd').with(
:ensure => 'stopped',
:name => 'snmptrapd',
:enable => false,
:hasstatus => true,
:hasrestart => true,
:require => [ 'Package[snmpd]', 'File[var-net-snmp]', 'Exec[install /etc/init.d/snmptrapd]', ]
)}
end
end
freebsdish.each do |os|
describe "for osfamily FreeBSD, operatingsystem #{os}" do
let(:params) {{}}
let :facts do {
:osfamily => 'FreeBSD',
:operatingsystem => os,
:operatingsystemrelease => '9.2',
:fqdn => 'myhost4.localdomain'
}
end
it { should contain_package('snmpd').with(
:ensure => 'present',
:name => 'net-mgmt/net-snmp'
)}
it { should_not contain_class('snmp::client') }
it { should contain_file('var-net-snmp').with(
:ensure => 'directory',
:mode => '0600',
:owner => 'root',
:group => 'wheel',
:path => '/var/net-snmp',
:require => 'Package[snmpd]'
)}
it { should contain_file('snmpd.conf').with(
:ensure => 'present',
:mode => '0755',
:owner => 'root',
:group => 'wheel',
:path => '/usr/local/etc/snmp/snmpd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
# TODO add more contents for File[snmpd.conf]
it 'should contain File[snmpd.conf] with expected contents' do
verify_contents(subject, 'snmpd.conf', [
'agentaddress udp:127.0.0.1:161',
'#rocommunity public 127.0.0.1',
'com2sec notConfigUser default public',
'group notConfigGroup v1 notConfigUser',
'group notConfigGroup v2c notConfigUser',
'view systemview included .1.3.6.1.2.1.1',
'view systemview included .1.3.6.1.2.1.25.1.1',
'access notConfigGroup "" any noauth exact systemview none none',
'sysLocation Unknown',
'sysContact Unknown',
'sysServices 72',
'sysName myhost4.localdomain',
'dontLogTCPWrappersConnects no',
])
end
it { should contain_service('snmpd').with(
:ensure => 'running',
:name => 'snmpd',
:enable => true,
:hasstatus => true,
:hasrestart => true,
:require => [ 'Package[snmpd]', 'File[var-net-snmp]', ]
)}
it { should contain_file('snmptrapd.conf').with(
:ensure => 'present',
:mode => '0755',
:owner => 'root',
:group => 'wheel',
:path => '/usr/local/etc/snmp/snmptrapd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmptrapd]'
)}
# TODO add more contents for File[snmptrapd.conf]
it 'should contain File[snmptrapd.conf] with correct contents' do
verify_contents(subject, 'snmptrapd.conf', [
'doNotLogTraps no',
'authCommunity log,execute,net public',
'disableAuthorization no',
])
end
it { should contain_service('snmptrapd').with(
:ensure => 'stopped',
:name => 'snmptrapd',
:enable => false,
:hasstatus => true,
:hasrestart => true,
:require => [ 'Package[snmpd]', 'File[var-net-snmp]', ]
)}
end
end
end
context 'on a supported osfamily (RedHat), custom parameters' do
let :facts do {
:osfamily => 'RedHat',
:operatingsystem => 'RedHat',
:operatingsystemrelease => '6.4'
}
end
describe 'ensure => absent' do
let(:params) {{ :ensure => 'absent' }}
it { should contain_package('snmpd').with_ensure('absent') }
it { should_not contain_class('snmp::client') }
it { should contain_file('var-net-snmp').with_ensure('directory') }
it { should contain_file('snmpd.conf').with_ensure('absent') }
it { should contain_file('snmpd.sysconfig').with_ensure('absent') }
it { should contain_service('snmpd').with_ensure('stopped') }
it { should contain_file('snmptrapd.conf').with_ensure('absent') }
it { should contain_file('snmptrapd.sysconfig').with_ensure('absent') }
it { should contain_service('snmptrapd').with_ensure('stopped') }
end
describe 'ensure => badvalue' do
let(:params) {{ :ensure => 'badvalue' }}
it 'should fail' do
expect {
should raise_error(Puppet::Error, /ensure parameter must be present or absent/)
}
end
end
describe 'autoupgrade => true' do
let(:params) {{ :autoupgrade => true }}
it { should contain_package('snmpd').with_ensure('latest') }
it { should_not contain_class('snmp::client') }
it { should contain_file('var-net-snmp').with_ensure('directory') }
it { should contain_file('snmpd.conf').with_ensure('present') }
it { should contain_file('snmpd.sysconfig').with_ensure('present') }
it { should contain_service('snmpd').with_ensure('running') }
it { should contain_file('snmptrapd.conf').with_ensure('present') }
it { should contain_file('snmptrapd.sysconfig').with_ensure('present') }
it { should contain_service('snmptrapd').with_ensure('stopped') }
end
describe 'autoupgrade => badvalue' do
let(:params) {{ :autoupgrade => 'badvalue' }}
it 'should fail' do
expect {
should raise_error(Puppet::Error, /"badvalue" is not a boolean./)
}
end
end
describe 'service_ensure => badvalue' do
let(:params) {{ :service_ensure => 'badvalue' }}
it 'should fail' do
expect {
should raise_error(Puppet::Error, /service_ensure parameter must be running or stopped/)
}
end
end
describe 'install_client => true' do
let(:params) {{ :install_client => true }}
it { should contain_class('snmp::client').with(
:ensure => 'present',
:autoupgrade => 'false',
:snmp_config => []
)}
end
describe 'manage_client => true' do
let(:params) {{ :manage_client => true }}
it { should contain_class('snmp::client').with(
:ensure => 'present',
:autoupgrade => 'false',
:snmp_config => []
)}
end
describe 'manage_client => true, snmp_config => [ "defVersion 2c", "defCommunity public" ], ensure => absent, and autoupgrade => true' do
let :params do {
:manage_client => true,
:ensure => 'absent',
:autoupgrade => true,
:snmp_config => [ 'defVersion 2c', 'defCommunity public' ]
}
end
it { should contain_class('snmp::client').with(
:ensure => 'absent',
:autoupgrade => 'true',
:snmp_config => [ 'defVersion 2c', 'defCommunity public' ]
)}
end
describe 'service_ensure => stopped' do
let(:params) {{ :service_ensure => 'stopped' }}
it { should contain_service('snmpd').with_ensure('stopped') }
it { should contain_service('snmptrapd').with_ensure('stopped') }
end
describe 'trap_service_ensure => running' do
let(:params) {{ :trap_service_ensure => 'running' }}
it { should contain_service('snmpd').with_ensure('running') }
it { should contain_service('snmptrapd').with_ensure('running') }
end
describe 'service_ensure => stopped and trap_service_ensure => running' do
let :params do {
:service_ensure => 'stopped',
:trap_service_ensure => 'running'
}
end
it { should contain_service('snmpd').with_ensure('stopped') }
it { should contain_service('snmptrapd').with_ensure('running') }
end
describe 'snmpd_options => blah' do
let(:params) {{ :snmpd_options => 'blah' }}
it { should contain_file('snmpd.sysconfig') }
it 'should contain File[snmpd.sysconfig] with contents "OPTIONS=\'blah\'"' do
verify_contents(subject, 'snmpd.sysconfig', [
'OPTIONS="blah"',
])
end
end
describe 'snmptrapd_options => bleh' do
let(:params) {{ :snmptrapd_options => 'bleh' }}
it { should contain_file('snmptrapd.sysconfig') }
it 'should contain File[snmptrapd.sysconfig] with contents "OPTIONS=\'bleh\'"' do
verify_contents(subject, 'snmptrapd.sysconfig', [
'OPTIONS="bleh"',
])
end
end
describe 'com2sec => [ SomeString ]' do
let(:params) {{ :com2sec => [ 'SomeString', ] }}
it 'should contain File[snmpd.conf] with contents "com2sec SomeString"' do
verify_contents(subject, 'snmpd.conf', [
'com2sec SomeString',
])
end
end
describe 'groups => [ SomeString ]' do
let(:params) {{ :groups => [ 'SomeString', ] }}
it 'should contain File[snmpd.conf] with contents "groups SomeString"' do
verify_contents(subject, 'snmpd.conf', [
'group SomeString',
])
end
end
describe 'dlmod => [ SomeString ]' do
let(:params) {{ :dlmod => [ 'SomeString', ] }}
it 'should contain File[snmpd.conf] with contents "dlmod SomeString"' do
verify_contents(subject, 'snmpd.conf', [
'dlmod SomeString',
])
end
end
describe 'agentaddress => [ "1.2.3.4", "8.6.7.5:222" ]' do
let(:params) {{ :agentaddress => ['1.2.3.4','8.6.7.5:222'] }}
it 'should contain File[snmpd.conf] with contents "agentaddress 1.2.3.4,8.6.7.5:222"' do
verify_contents(subject, 'snmpd.conf', [
'agentaddress 1.2.3.4,8.6.7.5:222',
])
end
end
describe 'snmptrapdaddr => [ "5.6.7.8", "2.3.4.5:3333" ]' do
let(:params) {{ :snmptrapdaddr => ['5.6.7.8','2.3.4.5:3333'] }}
it 'should contain File[snmptrapd.conf] with contents "snmpTrapdAddr 5.6.7.8,2.3.4.5:3333"' do
verify_contents(subject, 'snmptrapd.conf', [
'snmpTrapdAddr 5.6.7.8,2.3.4.5:3333',
])
end
end
describe 'snmpd_config => [ "option 1", "option 2", ]' do
let(:params) {{ :snmpd_config => [ 'option 1', 'option 2', ] }}
it 'should contain File[snmpd.conf] with contents "option1" and "option 2"' do
verify_contents(subject, 'snmpd.conf', [
'option 1',
'option 2',
])
end
end
describe 'snmptrapd_config => [ "option 3", "option 4", ]' do
let(:params) {{ :snmptrapd_config => [ 'option 3', 'option 4', ] }}
it 'should contain File[snmptrapd.conf] with contents "option 3" and "option 4"' do
verify_contents(subject, 'snmptrapd.conf', [
'option 3',
'option 4',
])
end
end
end
context 'on a supported osfamily (Debian), custom parameters' do
let :facts do {
:osfamily => 'Debian',
:operatingsystem => 'Debian',
:operatingsystemrelease => '7.0'
}
end
describe 'service_ensure => stopped and trap_service_ensure => running' do
let :params do {
:service_ensure => 'stopped',
:trap_service_ensure => 'running'
}
end
it { should contain_service('snmpd').with_ensure('running') }
it { should_not contain_service('snmptrapd') }
it 'should contain File[snmpd.sysconfig] with contents "SNMPDRUN=no" and "TRAPDRUN=yes"' do
verify_contents(subject, 'snmpd.sysconfig', [
'SNMPDRUN=no',
'TRAPDRUN=yes',
])
end
end
describe 'snmpd_options => blah' do
let(:params) {{ :snmpd_options => 'blah' }}
it { should contain_file('snmpd.sysconfig') }
it 'should contain File[snmpd.sysconfig] with contents "SNMPDOPTS=\'blah\'"' do
verify_contents(subject, 'snmpd.sysconfig', [
'SNMPDOPTS=\'blah\'',
])
end
end
describe 'snmptrapd_options => bleh' do
let(:params) {{ :snmptrapd_options => 'bleh' }}
it { should contain_file('snmpd.sysconfig') }
it 'should contain File[snmpd.sysconfig] with contents "TRAPDOPTS=\'bleh\'"' do
verify_contents(subject, 'snmpd.sysconfig', [
'TRAPDOPTS=\'bleh\'',
])
end
end
end
context 'on a supported osfamily (Suse), custom parameters' do
let :facts do {
:osfamily => 'Suse',
:operatingsystem => 'Suse',
:operatingsystemrelease => '11.1'
}
end
describe 'service_ensure => stopped' do
let(:params) {{ :service_ensure => 'stopped' }}
it { should contain_service('snmpd').with_ensure('stopped') }
it { should contain_service('snmptrapd').with_ensure('stopped') }
end
describe 'trap_service_ensure => running' do
let(:params) {{ :trap_service_ensure => 'running' }}
it { should contain_service('snmpd').with_ensure('running') }
it { should contain_service('snmptrapd').with_ensure('running') }
end
describe 'service_ensure => stopped and trap_service_ensure => running' do
let :params do {
:service_ensure => 'stopped',
:trap_service_ensure => 'running'
}
end
it { should contain_service('snmpd').with_ensure('stopped') }
it { should contain_service('snmptrapd').with_ensure('running') }
end
describe 'snmpd_options => blah' do
let(:params) {{ :snmpd_options => 'blah' }}
it { should contain_file('snmpd.sysconfig') }
it 'should contain File[snmpd.sysconfig] with contents "SNMPD_LOGLEVEL="blah""' do
verify_contents(subject, 'snmpd.sysconfig', [
'SNMPD_LOGLEVEL="blah"',
])
end
end
end
end
Additional test for when do_not_log_tcpwrappers is set to yes.
#!/usr/bin/env rspec
require 'spec_helper'
describe 'snmp', :type => 'class' do
context 'on a non-supported osfamily' do
let(:params) {{}}
let :facts do {
:osfamily => 'foo',
:operatingsystem => 'bar'
}
end
it 'should fail' do
expect {
should raise_error(Puppet::Error, /Module snmp is not supported on bar/)
}
end
end
redhatish = ['RedHat']
#redhatish = ['RedHat', 'Fedora']
debianish = ['Debian']
#debianish = ['Debian', 'Ubuntu']
suseish = ['Suse']
freebsdish = ['FreeBSD']
context 'on a supported osfamily, default parameters' do
redhatish.each do |os|
describe "for osfamily RedHat, operatingsystem #{os}" do
let(:params) {{}}
let :facts do {
:osfamily => 'RedHat',
:operatingsystem => os,
:operatingsystemrelease => '6.4',
:fqdn => 'myhost.localdomain'
}
end
it { should contain_package('snmpd').with(
:ensure => 'present',
:name => 'net-snmp'
)}
it { should_not contain_class('snmp::client') }
it { should contain_file('var-net-snmp').with(
:ensure => 'directory',
:mode => '0755',
:owner => 'root',
:group => 'root',
:path => '/var/lib/net-snmp',
:require => 'Package[snmpd]'
)}
it { should contain_file('snmpd.conf').with(
:ensure => 'present',
:mode => '0644',
:owner => 'root',
:group => 'root',
:path => '/etc/snmp/snmpd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
# TODO add more contents for File[snmpd.conf]
it 'should contain File[snmpd.conf] with expected contents' do
verify_contents(subject, 'snmpd.conf', [
'agentaddress udp:127.0.0.1:161',
'#rocommunity public 127.0.0.1',
'com2sec notConfigUser default public',
'group notConfigGroup v1 notConfigUser',
'group notConfigGroup v2c notConfigUser',
'view systemview included .1.3.6.1.2.1.1',
'view systemview included .1.3.6.1.2.1.25.1.1',
'access notConfigGroup "" any noauth exact systemview none none',
'sysLocation Unknown',
'sysContact Unknown',
'sysServices 72',
'sysName myhost.localdomain',
'dontLogTCPWrappersConnects no',
])
end
it { should contain_file('snmpd.sysconfig').with(
:ensure => 'present',
:mode => '0644',
:owner => 'root',
:group => 'root',
:path => '/etc/sysconfig/snmpd',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
it 'should contain File[snmpd.sysconfig] with contents "OPTIONS="-LS0-6d -Lf /dev/null -p /var/run/snmpd.pid""' do
verify_contents(subject, 'snmpd.sysconfig', [
'OPTIONS="-LS0-6d -Lf /dev/null -p /var/run/snmpd.pid"',
])
end
it { should contain_service('snmpd').with(
:ensure => 'running',
:name => 'snmpd',
:enable => true,
:hasstatus => true,
:hasrestart => true,
:require => [ 'Package[snmpd]', 'File[var-net-snmp]', ]
)}
it { should contain_file('snmptrapd.conf').with(
:ensure => 'present',
:mode => '0644',
:owner => 'root',
:group => 'root',
:path => '/etc/snmp/snmptrapd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmptrapd]'
)}
# TODO add more contents for File[snmptrapd.conf]
it 'should contain File[snmptrapd.conf] with correct contents' do
verify_contents(subject, 'snmptrapd.conf', [
'doNotLogTraps no',
'authCommunity log,execute,net public',
'disableAuthorization no',
])
end
it { should contain_file('snmptrapd.sysconfig').with(
:ensure => 'present',
:mode => '0644',
:owner => 'root',
:group => 'root',
:path => '/etc/sysconfig/snmptrapd',
:require => 'Package[snmpd]',
:notify => 'Service[snmptrapd]'
)}
it 'should contain File[snmptrapd.sysconfig] with contents "OPTIONS="-Lsd -p /var/run/snmptrapd.pid""' do
verify_contents(subject, 'snmptrapd.sysconfig', [
'OPTIONS="-Lsd -p /var/run/snmptrapd.pid"',
])
end
it { should contain_service('snmptrapd').with(
:ensure => 'stopped',
:name => 'snmptrapd',
:enable => false,
:hasstatus => true,
:hasrestart => true,
:require => [ 'Package[snmpd]', 'File[var-net-snmp]', ]
)}
end
end
debianish.each do |os|
describe "for osfamily Debian, operatingsystem #{os}" do
let(:params) {{}}
let :facts do {
:osfamily => 'Debian',
:operatingsystem => os,
:operatingsystemrelease => '6.0.7',
:fqdn => 'myhost2.localdomain'
}
end
it { should contain_package('snmpd').with(
:ensure => 'present',
:name => 'snmpd'
)}
it { should_not contain_class('snmp::client') }
it { should contain_file('var-net-snmp').with(
:ensure => 'directory',
:mode => '0755',
:owner => 'snmp',
:group => 'snmp',
:path => '/var/lib/snmp',
:require => 'Package[snmpd]'
)}
it { should contain_file('snmpd.conf').with(
:ensure => 'present',
:mode => '0600',
:owner => 'root',
:group => 'root',
:path => '/etc/snmp/snmpd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
# TODO add more contents for File[snmpd.conf]
it 'should contain File[snmpd.conf] with expected contents' do
verify_contents(subject, 'snmpd.conf', [
'agentaddress udp:127.0.0.1:161',
'#rocommunity public 127.0.0.1',
'com2sec notConfigUser default public',
'group notConfigGroup v1 notConfigUser',
'group notConfigGroup v2c notConfigUser',
'view systemview included .1.3.6.1.2.1.1',
'view systemview included .1.3.6.1.2.1.25.1.1',
'access notConfigGroup "" any noauth exact systemview none none',
'sysLocation Unknown',
'sysContact Unknown',
'sysServices 72',
'sysName myhost2.localdomain',
'dontLogTCPWrappersConnects no',
])
end
it { should contain_file('snmpd.sysconfig').with(
:ensure => 'present',
:mode => '0644',
:owner => 'root',
:group => 'root',
:path => '/etc/default/snmpd',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
it 'should contain File[snmpd.sysconfig] with contents "SNMPDOPTS=\'-Lsd -Lf /dev/null -u snmp -g snmp -I -smux -p /var/run/snmpd.pid\'"' do
verify_contents(subject, 'snmpd.sysconfig', [
'SNMPDRUN=yes',
'SNMPDOPTS=\'-Lsd -Lf /dev/null -u snmp -g snmp -I -smux -p /var/run/snmpd.pid\'',
])
end
it { should contain_service('snmpd').with(
:ensure => 'running',
:name => 'snmpd',
:enable => true,
:hasstatus => true,
:hasrestart => true,
:require => [ 'Package[snmpd]', 'File[var-net-snmp]', ]
)}
it { should contain_file('snmptrapd.conf').with(
:ensure => 'present',
:mode => '0600',
:owner => 'root',
:group => 'root',
:path => '/etc/snmp/snmptrapd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
# TODO add more contents for File[snmptrapd.conf]
it 'should contain File[snmptrapd.conf] with correct contents' do
verify_contents(subject, 'snmptrapd.conf', [
'doNotLogTraps no',
'authCommunity log,execute,net public',
'disableAuthorization no',
])
end
it { should_not contain_file('snmptrapd.sysconfig') }
it 'should contain File[snmpd.sysconfig] with contents "TRAPDOPTS=\'-Lsd -p /var/run/snmptrapd.pid\'"' do
verify_contents(subject, 'snmpd.sysconfig', [
'TRAPDRUN=no',
'TRAPDOPTS=\'-Lsd -p /var/run/snmptrapd.pid\'',
])
end
it { should_not contain_service('snmptrapd') }
end
end
suseish.each do |os|
describe "for osfamily RedHat, operatingsystem #{os}" do
let(:params) {{}}
let :facts do {
:osfamily => 'Suse',
:operatingsystem => os,
:operatingsystemrelease => '11.1',
:fqdn => 'myhost3.localdomain'
}
end
it { should contain_package('snmpd').with(
:ensure => 'present',
:name => 'net-snmp'
)}
it { should_not contain_class('snmp::client') }
it { should contain_file('var-net-snmp').with(
:ensure => 'directory',
:mode => '0755',
:owner => 'root',
:group => 'root',
:path => '/var/lib/net-snmp',
:require => 'Package[snmpd]'
)}
it { should contain_file('snmpd.conf').with(
:ensure => 'present',
:mode => '0600',
:owner => 'root',
:group => 'root',
:path => '/etc/snmp/snmpd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
# TODO add more contents for File[snmpd.conf]
it 'should contain File[snmpd.conf] with expected contents' do
verify_contents(subject, 'snmpd.conf', [
'agentaddress udp:127.0.0.1:161',
'#rocommunity public 127.0.0.1',
'com2sec notConfigUser default public',
'group notConfigGroup v1 notConfigUser',
'group notConfigGroup v2c notConfigUser',
'view systemview included .1.3.6.1.2.1.1',
'view systemview included .1.3.6.1.2.1.25.1.1',
'access notConfigGroup "" any noauth exact systemview none none',
'sysLocation Unknown',
'sysContact Unknown',
'sysServices 72',
'sysName myhost3.localdomain',
'dontLogTCPWrappersConnects no',
])
end
it { should contain_file('snmpd.sysconfig').with(
:ensure => 'present',
:mode => '0644',
:owner => 'root',
:group => 'root',
:path => '/etc/sysconfig/net-snmp',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
it 'should contain File[snmpd.sysconfig] with contents "SNMPD_LOGLEVEL="d""' do
verify_contents(subject, 'snmpd.sysconfig', [
'SNMPD_LOGLEVEL="d"',
])
end
it { should contain_service('snmpd').with(
:ensure => 'running',
:name => 'snmpd',
:enable => true,
:hasstatus => true,
:hasrestart => true,
:require => [ 'Package[snmpd]', 'File[var-net-snmp]', ]
)}
it { should contain_file('snmptrapd.conf').with(
:ensure => 'present',
:mode => '0600',
:owner => 'root',
:group => 'root',
:path => '/etc/snmp/snmptrapd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmptrapd]'
)}
# TODO add more contents for File[snmptrapd.conf]
it 'should contain File[snmptrapd.conf] with correct contents' do
verify_contents(subject, 'snmptrapd.conf', [
'doNotLogTraps no',
'authCommunity log,execute,net public',
'disableAuthorization no',
])
end
it { should_not contain_file('snmptrapd.sysconfig') }
it { should contain_exec('install /etc/init.d/snmptrapd').with(
:command => '/usr/bin/install -o 0 -g 0 -m0755 -p /usr/share/doc/packages/net-snmp/rc.snmptrapd /etc/init.d/snmptrapd',
:creates => '/etc/init.d/snmptrapd',
:require => 'Package[snmpd]'
)}
it { should contain_service('snmptrapd').with(
:ensure => 'stopped',
:name => 'snmptrapd',
:enable => false,
:hasstatus => true,
:hasrestart => true,
:require => [ 'Package[snmpd]', 'File[var-net-snmp]', 'Exec[install /etc/init.d/snmptrapd]', ]
)}
end
end
freebsdish.each do |os|
describe "for osfamily FreeBSD, operatingsystem #{os}" do
let(:params) {{}}
let :facts do {
:osfamily => 'FreeBSD',
:operatingsystem => os,
:operatingsystemrelease => '9.2',
:fqdn => 'myhost4.localdomain'
}
end
it { should contain_package('snmpd').with(
:ensure => 'present',
:name => 'net-mgmt/net-snmp'
)}
it { should_not contain_class('snmp::client') }
it { should contain_file('var-net-snmp').with(
:ensure => 'directory',
:mode => '0600',
:owner => 'root',
:group => 'wheel',
:path => '/var/net-snmp',
:require => 'Package[snmpd]'
)}
it { should contain_file('snmpd.conf').with(
:ensure => 'present',
:mode => '0755',
:owner => 'root',
:group => 'wheel',
:path => '/usr/local/etc/snmp/snmpd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmpd]'
)}
# TODO add more contents for File[snmpd.conf]
it 'should contain File[snmpd.conf] with expected contents' do
verify_contents(subject, 'snmpd.conf', [
'agentaddress udp:127.0.0.1:161',
'#rocommunity public 127.0.0.1',
'com2sec notConfigUser default public',
'group notConfigGroup v1 notConfigUser',
'group notConfigGroup v2c notConfigUser',
'view systemview included .1.3.6.1.2.1.1',
'view systemview included .1.3.6.1.2.1.25.1.1',
'access notConfigGroup "" any noauth exact systemview none none',
'sysLocation Unknown',
'sysContact Unknown',
'sysServices 72',
'sysName myhost4.localdomain',
'dontLogTCPWrappersConnects no',
])
end
it { should contain_service('snmpd').with(
:ensure => 'running',
:name => 'snmpd',
:enable => true,
:hasstatus => true,
:hasrestart => true,
:require => [ 'Package[snmpd]', 'File[var-net-snmp]', ]
)}
it { should contain_file('snmptrapd.conf').with(
:ensure => 'present',
:mode => '0755',
:owner => 'root',
:group => 'wheel',
:path => '/usr/local/etc/snmp/snmptrapd.conf',
:require => 'Package[snmpd]',
:notify => 'Service[snmptrapd]'
)}
# TODO add more contents for File[snmptrapd.conf]
it 'should contain File[snmptrapd.conf] with correct contents' do
verify_contents(subject, 'snmptrapd.conf', [
'doNotLogTraps no',
'authCommunity log,execute,net public',
'disableAuthorization no',
])
end
it { should contain_service('snmptrapd').with(
:ensure => 'stopped',
:name => 'snmptrapd',
:enable => false,
:hasstatus => true,
:hasrestart => true,
:require => [ 'Package[snmpd]', 'File[var-net-snmp]', ]
)}
end
end
end
context 'on a supported osfamily (RedHat), custom parameters' do
let :facts do {
:osfamily => 'RedHat',
:operatingsystem => 'RedHat',
:operatingsystemrelease => '6.4'
}
end
describe 'ensure => absent' do
let(:params) {{ :ensure => 'absent' }}
it { should contain_package('snmpd').with_ensure('absent') }
it { should_not contain_class('snmp::client') }
it { should contain_file('var-net-snmp').with_ensure('directory') }
it { should contain_file('snmpd.conf').with_ensure('absent') }
it { should contain_file('snmpd.sysconfig').with_ensure('absent') }
it { should contain_service('snmpd').with_ensure('stopped') }
it { should contain_file('snmptrapd.conf').with_ensure('absent') }
it { should contain_file('snmptrapd.sysconfig').with_ensure('absent') }
it { should contain_service('snmptrapd').with_ensure('stopped') }
end
describe 'ensure => badvalue' do
let(:params) {{ :ensure => 'badvalue' }}
it 'should fail' do
expect {
should raise_error(Puppet::Error, /ensure parameter must be present or absent/)
}
end
end
describe 'autoupgrade => true' do
let(:params) {{ :autoupgrade => true }}
it { should contain_package('snmpd').with_ensure('latest') }
it { should_not contain_class('snmp::client') }
it { should contain_file('var-net-snmp').with_ensure('directory') }
it { should contain_file('snmpd.conf').with_ensure('present') }
it { should contain_file('snmpd.sysconfig').with_ensure('present') }
it { should contain_service('snmpd').with_ensure('running') }
it { should contain_file('snmptrapd.conf').with_ensure('present') }
it { should contain_file('snmptrapd.sysconfig').with_ensure('present') }
it { should contain_service('snmptrapd').with_ensure('stopped') }
end
describe 'autoupgrade => badvalue' do
let(:params) {{ :autoupgrade => 'badvalue' }}
it 'should fail' do
expect {
should raise_error(Puppet::Error, /"badvalue" is not a boolean./)
}
end
end
describe 'service_ensure => badvalue' do
let(:params) {{ :service_ensure => 'badvalue' }}
it 'should fail' do
expect {
should raise_error(Puppet::Error, /service_ensure parameter must be running or stopped/)
}
end
end
describe 'install_client => true' do
let(:params) {{ :install_client => true }}
it { should contain_class('snmp::client').with(
:ensure => 'present',
:autoupgrade => 'false',
:snmp_config => []
)}
end
describe 'manage_client => true' do
let(:params) {{ :manage_client => true }}
it { should contain_class('snmp::client').with(
:ensure => 'present',
:autoupgrade => 'false',
:snmp_config => []
)}
end
describe 'manage_client => true, snmp_config => [ "defVersion 2c", "defCommunity public" ], ensure => absent, and autoupgrade => true' do
let :params do {
:manage_client => true,
:ensure => 'absent',
:autoupgrade => true,
:snmp_config => [ 'defVersion 2c', 'defCommunity public' ]
}
end
it { should contain_class('snmp::client').with(
:ensure => 'absent',
:autoupgrade => 'true',
:snmp_config => [ 'defVersion 2c', 'defCommunity public' ]
)}
end
describe 'service_ensure => stopped' do
let(:params) {{ :service_ensure => 'stopped' }}
it { should contain_service('snmpd').with_ensure('stopped') }
it { should contain_service('snmptrapd').with_ensure('stopped') }
end
describe 'trap_service_ensure => running' do
let(:params) {{ :trap_service_ensure => 'running' }}
it { should contain_service('snmpd').with_ensure('running') }
it { should contain_service('snmptrapd').with_ensure('running') }
end
describe 'service_ensure => stopped and trap_service_ensure => running' do
let :params do {
:service_ensure => 'stopped',
:trap_service_ensure => 'running'
}
end
it { should contain_service('snmpd').with_ensure('stopped') }
it { should contain_service('snmptrapd').with_ensure('running') }
end
describe 'snmpd_options => blah' do
let(:params) {{ :snmpd_options => 'blah' }}
it { should contain_file('snmpd.sysconfig') }
it 'should contain File[snmpd.sysconfig] with contents "OPTIONS=\'blah\'"' do
verify_contents(subject, 'snmpd.sysconfig', [
'OPTIONS="blah"',
])
end
end
describe 'snmptrapd_options => bleh' do
let(:params) {{ :snmptrapd_options => 'bleh' }}
it { should contain_file('snmptrapd.sysconfig') }
it 'should contain File[snmptrapd.sysconfig] with contents "OPTIONS=\'bleh\'"' do
verify_contents(subject, 'snmptrapd.sysconfig', [
'OPTIONS="bleh"',
])
end
end
describe 'com2sec => [ SomeString ]' do
let(:params) {{ :com2sec => [ 'SomeString', ] }}
it 'should contain File[snmpd.conf] with contents "com2sec SomeString"' do
verify_contents(subject, 'snmpd.conf', [
'com2sec SomeString',
])
end
end
describe 'groups => [ SomeString ]' do
let(:params) {{ :groups => [ 'SomeString', ] }}
it 'should contain File[snmpd.conf] with contents "groups SomeString"' do
verify_contents(subject, 'snmpd.conf', [
'group SomeString',
])
end
end
describe 'dlmod => [ SomeString ]' do
let(:params) {{ :dlmod => [ 'SomeString', ] }}
it 'should contain File[snmpd.conf] with contents "dlmod SomeString"' do
verify_contents(subject, 'snmpd.conf', [
'dlmod SomeString',
])
end
end
describe 'agentaddress => [ "1.2.3.4", "8.6.7.5:222" ]' do
let(:params) {{ :agentaddress => ['1.2.3.4','8.6.7.5:222'] }}
it 'should contain File[snmpd.conf] with contents "agentaddress 1.2.3.4,8.6.7.5:222"' do
verify_contents(subject, 'snmpd.conf', [
'agentaddress 1.2.3.4,8.6.7.5:222',
])
end
end
describe 'do_not_log_tcpwrappers => "yes"' do
let(:params) {{:do_not_log_tcpwrappers => 'yes'}}
it 'should contain File[snmpd.conf] with contents "dontLogTCPWrappersConnects yes' do
verify_contents(subject, 'snmpd.conf', [
'dontLogTCPWrappersConnects yes',
])
end
end
describe 'snmptrapdaddr => [ "5.6.7.8", "2.3.4.5:3333" ]' do
let(:params) {{ :snmptrapdaddr => ['5.6.7.8','2.3.4.5:3333'] }}
it 'should contain File[snmptrapd.conf] with contents "snmpTrapdAddr 5.6.7.8,2.3.4.5:3333"' do
verify_contents(subject, 'snmptrapd.conf', [
'snmpTrapdAddr 5.6.7.8,2.3.4.5:3333',
])
end
end
describe 'snmpd_config => [ "option 1", "option 2", ]' do
let(:params) {{ :snmpd_config => [ 'option 1', 'option 2', ] }}
it 'should contain File[snmpd.conf] with contents "option1" and "option 2"' do
verify_contents(subject, 'snmpd.conf', [
'option 1',
'option 2',
])
end
end
describe 'snmptrapd_config => [ "option 3", "option 4", ]' do
let(:params) {{ :snmptrapd_config => [ 'option 3', 'option 4', ] }}
it 'should contain File[snmptrapd.conf] with contents "option 3" and "option 4"' do
verify_contents(subject, 'snmptrapd.conf', [
'option 3',
'option 4',
])
end
end
end
context 'on a supported osfamily (Debian), custom parameters' do
let :facts do {
:osfamily => 'Debian',
:operatingsystem => 'Debian',
:operatingsystemrelease => '7.0'
}
end
describe 'service_ensure => stopped and trap_service_ensure => running' do
let :params do {
:service_ensure => 'stopped',
:trap_service_ensure => 'running'
}
end
it { should contain_service('snmpd').with_ensure('running') }
it { should_not contain_service('snmptrapd') }
it 'should contain File[snmpd.sysconfig] with contents "SNMPDRUN=no" and "TRAPDRUN=yes"' do
verify_contents(subject, 'snmpd.sysconfig', [
'SNMPDRUN=no',
'TRAPDRUN=yes',
])
end
end
describe 'snmpd_options => blah' do
let(:params) {{ :snmpd_options => 'blah' }}
it { should contain_file('snmpd.sysconfig') }
it 'should contain File[snmpd.sysconfig] with contents "SNMPDOPTS=\'blah\'"' do
verify_contents(subject, 'snmpd.sysconfig', [
'SNMPDOPTS=\'blah\'',
])
end
end
describe 'snmptrapd_options => bleh' do
let(:params) {{ :snmptrapd_options => 'bleh' }}
it { should contain_file('snmpd.sysconfig') }
it 'should contain File[snmpd.sysconfig] with contents "TRAPDOPTS=\'bleh\'"' do
verify_contents(subject, 'snmpd.sysconfig', [
'TRAPDOPTS=\'bleh\'',
])
end
end
end
context 'on a supported osfamily (Suse), custom parameters' do
let :facts do {
:osfamily => 'Suse',
:operatingsystem => 'Suse',
:operatingsystemrelease => '11.1'
}
end
describe 'service_ensure => stopped' do
let(:params) {{ :service_ensure => 'stopped' }}
it { should contain_service('snmpd').with_ensure('stopped') }
it { should contain_service('snmptrapd').with_ensure('stopped') }
end
describe 'trap_service_ensure => running' do
let(:params) {{ :trap_service_ensure => 'running' }}
it { should contain_service('snmpd').with_ensure('running') }
it { should contain_service('snmptrapd').with_ensure('running') }
end
describe 'service_ensure => stopped and trap_service_ensure => running' do
let :params do {
:service_ensure => 'stopped',
:trap_service_ensure => 'running'
}
end
it { should contain_service('snmpd').with_ensure('stopped') }
it { should contain_service('snmptrapd').with_ensure('running') }
end
describe 'snmpd_options => blah' do
let(:params) {{ :snmpd_options => 'blah' }}
it { should contain_file('snmpd.sysconfig') }
it 'should contain File[snmpd.sysconfig] with contents "SNMPD_LOGLEVEL="blah""' do
verify_contents(subject, 'snmpd.sysconfig', [
'SNMPD_LOGLEVEL="blah"',
])
end
end
end
end
|
require 'spec_helper'
set :os, { :family => nil }
describe get_command(:get_user_uid, 'foo') do
it { should eq 'id -u foo' }
end
describe get_command(:get_user_gid, 'foo') do
it { should eq 'id -g foo' }
end
describe get_command(:get_user_home_directory, 'foo') do
it { should eq "getent passwd foo | cut -f 6 -d ':'" }
end
describe get_command(:update_user_home_directory, 'user', 'dir') do
it { should eq "usermod -d dir user" }
end
describe get_command(:update_user_uid, 'foo', 100) do
it { should eq 'usermod -u 100 foo' }
end
describe get_command(:update_user_gid, 'foo', 100) do
it { should eq 'usermod -g 100 foo' }
end
describe get_command(:add_user, 'foo', :home_directory => '/home/foo', :password => '$6$foo/bar', :shell => '/bin/tcsh', :create_home => true) do
it { should eq 'useradd -d /home/foo -p \$6\$foo/bar -s /bin/tcsh -m foo' }
end
describe get_command(:update_user_encrypted_password, 'foo', 'xxxxxxxx') do
it { should eq 'echo foo:xxxxxxxx | chpasswd -e' }
end
describe get_command(:get_user_encrypted_password, 'foo') do
it { should eq "getent shadow foo | cut -f 2 -d ':'" }
end
describe get_command(:check_user_has_login_shell, 'foo', '/bin/sh') do
it { should eq "getent passwd foo | cut -f 7 -d ':' | grep -w -- /bin/sh" }
end
describe get_command(:get_user_minimum_days_between_password_change, 'foo') do
it { should eq "chage -l foo | sed -n 's/^Minimum.*: //p'" }
end
describe get_command(:get_user_maximum_days_between_password_change, 'foo') do
it { should eq "chage -l foo | sed -n 's/^Maximum.*: //p'" }
end
describe get_command(:get_user_login_shell, 'foo') do
it { should eq "getent passwd foo | cut -f 7 -d ':'" }
end
describe get_command(:update_user_login_shell, 'foo', '/bin/bash') do
it { should eq 'usermod -s /bin/bash foo' }
end
describe get_command(:check_user_is_system_user, 'foo') do
it { should eq "getent passwd foo > /dev/null 2>&1 && test \"$(getent passwd foo | cut -f 3 -d ':')\" -ge \"$(awk 'BEGIN{sys_uid_min=101} {if($1~/^SYS_UID_MIN/){sys_uid_min=$2}} END{print sys_uid_min}' /etc/login.defs)\" && test \"$(getent passwd foo | cut -f 3 -d ':')\" -le \"$(awk 'BEGIN{sys_uid_max=0;uid_min=1000} {if($1~/^SYS_UID_MAX/){sys_uid_max=$2}if($1~/^UID_MIN/){uid_min=$2}} END{if(sys_uid_max!=0){print sys_uid_max}else{print uid_min-1}}' /etc/login.defs)\"" }
end
Fix spec
require 'spec_helper'
set :os, { :family => nil }
describe get_command(:get_user_uid, 'foo') do
it { should eq "getent passwd foo | cut -f 3 -d ':'" }
end
describe get_command(:get_user_gid, 'foo') do
it { should eq "getent passwd foo | cut -f 4 -d ':'" }
end
describe get_command(:get_user_home_directory, 'foo') do
it { should eq "getent passwd foo | cut -f 6 -d ':'" }
end
describe get_command(:update_user_home_directory, 'user', 'dir') do
it { should eq "usermod -d dir user" }
end
describe get_command(:update_user_uid, 'foo', 100) do
it { should eq 'usermod -u 100 foo' }
end
describe get_command(:update_user_gid, 'foo', 100) do
it { should eq 'usermod -g 100 foo' }
end
describe get_command(:add_user, 'foo', :home_directory => '/home/foo', :password => '$6$foo/bar', :shell => '/bin/tcsh', :create_home => true) do
it { should eq 'useradd -d /home/foo -p \$6\$foo/bar -s /bin/tcsh -m foo' }
end
describe get_command(:update_user_encrypted_password, 'foo', 'xxxxxxxx') do
it { should eq 'echo foo:xxxxxxxx | chpasswd -e' }
end
describe get_command(:get_user_encrypted_password, 'foo') do
it { should eq "getent shadow foo | cut -f 2 -d ':'" }
end
describe get_command(:check_user_has_login_shell, 'foo', '/bin/sh') do
it { should eq "getent passwd foo | cut -f 7 -d ':' | grep -w -- /bin/sh" }
end
describe get_command(:get_user_minimum_days_between_password_change, 'foo') do
it { should eq "chage -l foo | sed -n 's/^Minimum.*: //p'" }
end
describe get_command(:get_user_maximum_days_between_password_change, 'foo') do
it { should eq "chage -l foo | sed -n 's/^Maximum.*: //p'" }
end
describe get_command(:get_user_login_shell, 'foo') do
it { should eq "getent passwd foo | cut -f 7 -d ':'" }
end
describe get_command(:update_user_login_shell, 'foo', '/bin/bash') do
it { should eq 'usermod -s /bin/bash foo' }
end
describe get_command(:check_user_is_system_user, 'foo') do
it { should eq "getent passwd foo > /dev/null 2>&1 && test \"$(getent passwd foo | cut -f 3 -d ':')\" -ge \"$(awk 'BEGIN{sys_uid_min=101} {if($1~/^SYS_UID_MIN/){sys_uid_min=$2}} END{print sys_uid_min}' /etc/login.defs)\" && test \"$(getent passwd foo | cut -f 3 -d ':')\" -le \"$(awk 'BEGIN{sys_uid_max=0;uid_min=1000} {if($1~/^SYS_UID_MAX/){sys_uid_max=$2}if($1~/^UID_MIN/){uid_min=$2}} END{if(sys_uid_max!=0){print sys_uid_max}else{print uid_min-1}}' /etc/login.defs)\"" }
end
|
# frozen_string_literal: true
RSpec.describe "bundle binstubs <gem>" do
context "when the gem exists in the lockfile" do
it "sets up the binstub" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack"
expect(bundled_app("bin/rackup")).to exist
end
it "does not install other binstubs" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
gem "rails"
G
bundle "binstubs rails"
expect(bundled_app("bin/rackup")).not_to exist
expect(bundled_app("bin/rails")).to exist
end
it "does install multiple binstubs" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
gem "rails"
G
bundle "binstubs rails rack"
expect(bundled_app("bin/rackup")).to exist
expect(bundled_app("bin/rails")).to exist
end
it "allows installing all binstubs" do
install_gemfile! <<-G
source "file://#{gem_repo1}"
gem "rails"
G
bundle! :binstubs, :all => true
expect(bundled_app("bin/rails")).to exist
expect(bundled_app("bin/rake")).to exist
end
it "displays an error when used without any gem" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs"
expect(exitstatus).to eq(1) if exitstatus
expect(out).to include("`bundle binstubs` needs at least one gem to run.")
end
it "displays an error when used with --all and gems" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack", :all => true
expect(last_command).to be_failure
expect(last_command.bundler_err).to include("Cannot specify --all with specific gems")
end
context "when generating bundle binstub outside bundler" do
it "should abort" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack"
File.open("bin/bundle", "wb") do |file|
file.print "OMG"
end
sys_exec "bin/rackup"
expect(last_command.stderr).to include("was not generated by Bundler")
end
end
context "the bundle binstub" do
before do
if system_bundler_version == :bundler
system_gems :bundler
elsif system_bundler_version
build_repo4 do
build_gem "bundler", system_bundler_version do |s|
s.executables = "bundle"
s.bindir = "exe"
s.write "exe/bundle", "puts %(system bundler #{system_bundler_version}\\n\#{ARGV.inspect})"
end
end
system_gems "bundler-#{system_bundler_version}", :gem_repo => gem_repo4
end
build_repo2 do
build_gem "prints_loaded_gems", "1.0" do |s|
s.executables = "print_loaded_gems"
s.bindir = "exe"
s.write "exe/print_loaded_gems", <<-R
specs = Gem.loaded_specs.values.reject {|s| Bundler.rubygems.spec_default_gem?(s) }
puts specs.map(&:full_name).sort.inspect
R
end
end
install_gemfile! <<-G
source "file://#{gem_repo2}"
gem "rack"
gem "prints_loaded_gems"
G
bundle! "binstubs bundler rack prints_loaded_gems"
end
# When environment has a same version of bundler as default gems.
# `system_gems "bundler-x.y.z"` will detect system binstub.
# We need to avoid it by virtual version of bundler.
let(:system_bundler_version) { Gem::Version.new(Bundler::VERSION).bump.to_s }
context "when system bundler was used" do
# Support master branch of bundler
if ENV["BUNDLER_SPEC_SUB_VERSION"]
let(:system_bundler_version) { Bundler::VERSION }
end
before do
gemfile <<-G
source "file:///Users/colby/Projects/bundler/tmp/gems/remote2"
gem "rack"
gem "prints_loaded_gems"
G
lockfile <<-G
GEM
remote: file:///Users/colby/Projects/bundler/tmp/gems/remote2/
specs:
prints_loaded_gems (1.0)
rack (1.2)
PLATFORMS
ruby
DEPENDENCIES
prints_loaded_gems
rack
BUNDLED WITH
#{system_bundler_version}
G
end
it "runs bundler" do
sys_exec! "#{bundled_app("bin/bundle")} install"
expect(out).to eq %(system bundler #{system_bundler_version}\n["install"])
end
end
context "when BUNDLER_VERSION is set" do
let(:system_bundler_version) { Bundler::VERSION }
it "runs the correct version of bundler" do
sys_exec "BUNDLER_VERSION='999.999.999' #{bundled_app("bin/bundle")} install"
expect(exitstatus).to eq(42) if exitstatus
expect(last_command.stderr).to include("Activating bundler (999.999.999) failed:").
and include("To install the version of bundler this project requires, run `gem install bundler -v '999.999.999'`")
end
end
context "when a lockfile exists with a locked bundler version" do
let(:system_bundler_version) { Bundler::VERSION }
it "runs the correct version of bundler when the version is newer" do
lockfile lockfile.gsub(system_bundler_version, "999.999.999")
sys_exec "#{bundled_app("bin/bundle")} install"
expect(exitstatus).to eq(42) if exitstatus
expect(last_command.stderr).to include("Activating bundler (999.999.999) failed:").
and include("To install the version of bundler this project requires, run `gem install bundler -v '999.999.999'`")
end
it "runs the correct version of bundler when the version is older" do
simulate_bundler_version "55"
lockfile lockfile.gsub(system_bundler_version, "44.0")
sys_exec "#{bundled_app("bin/bundle")} install"
expect(exitstatus).to eq(42) if exitstatus
expect(last_command.stderr).to include("Activating bundler (44.0) failed:").
and include("To install the version of bundler this project requires, run `gem install bundler -v '44.0'`")
end
it "runs the correct version of bundler when the version is a pre-release" do
simulate_bundler_version "55"
lockfile lockfile.gsub(system_bundler_version, "2.12.0.a")
sys_exec "#{bundled_app("bin/bundle")} install"
expect(exitstatus).to eq(42) if exitstatus
expect(last_command.stderr).to include("Activating bundler (2.12.0.a) failed:").
and include("To install the version of bundler this project requires, run `gem install bundler -v '2.12.0.a'`")
end
end
context "when update --bundler is called" do
before { lockfile.gsub(system_bundler_version, "1.1.1") }
it "calls through to the latest bundler version" do
sys_exec! "#{bundled_app("bin/bundle")} update --bundler"
expect(last_command.stdout).to eq %(system bundler #{system_bundler_version}\n["update", "--bundler"])
end
it "calls through to the explicit bundler version" do
sys_exec "#{bundled_app("bin/bundle")} update --bundler=999.999.999"
expect(exitstatus).to eq(42) if exitstatus
expect(last_command.stderr).to include("Activating bundler (999.999.999) failed:").
and include("To install the version of bundler this project requires, run `gem install bundler -v '999.999.999'`")
end
end
context "without a lockfile" do
it "falls back to the latest installed bundler" do
FileUtils.rm bundled_app("Gemfile.lock")
sys_exec! bundled_app("bin/bundle").to_s
expect(out).to eq "system bundler #{system_bundler_version}\n[]"
end
end
context "using another binstub" do
let(:system_bundler_version) { :bundler }
it "loads all gems" do
sys_exec! bundled_app("bin/print_loaded_gems").to_s
# RG < 2.0.14 didn't have a `Gem::Specification#default_gem?`
# This is dirty detection for old RG versions.
if File.dirname(Bundler.load.specs["bundler"][0].loaded_from) =~ %r{specifications/default}
expect(out).to eq %(["prints_loaded_gems-1.0", "rack-1.2"])
else
expect(out).to eq %(["bundler-#{Bundler::VERSION}", "prints_loaded_gems-1.0", "rack-1.2"])
end
end
context "when requesting a different bundler version" do
before { lockfile lockfile.gsub(Bundler::VERSION, "999.999.999") }
it "attempts to load that version", :ruby_repo do
sys_exec bundled_app("bin/rackup").to_s
expect(exitstatus).to eq(42) if exitstatus
expect(last_command.stderr).to include("Activating bundler (999.999.999) failed:").
and include("To install the version of bundler this project requires, run `gem install bundler -v '999.999.999'`")
end
end
end
end
it "installs binstubs from git gems" do
FileUtils.mkdir_p(lib_path("foo/bin"))
FileUtils.touch(lib_path("foo/bin/foo"))
build_git "foo", "1.0", :path => lib_path("foo") do |s|
s.executables = %w[foo]
end
install_gemfile <<-G
gem "foo", :git => "#{lib_path("foo")}"
G
bundle "binstubs foo"
expect(bundled_app("bin/foo")).to exist
end
it "installs binstubs from path gems" do
FileUtils.mkdir_p(lib_path("foo/bin"))
FileUtils.touch(lib_path("foo/bin/foo"))
build_lib "foo", "1.0", :path => lib_path("foo") do |s|
s.executables = %w[foo]
end
install_gemfile <<-G
gem "foo", :path => "#{lib_path("foo")}"
G
bundle "binstubs foo"
expect(bundled_app("bin/foo")).to exist
end
it "sets correct permissions for binstubs" do
with_umask(0o002) do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack"
binary = bundled_app("bin/rackup")
expect(File.stat(binary).mode.to_s(8)).to eq("100775")
end
end
context "when using --shebang" do
it "sets the specified shebang for the the binstub" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack --shebang jruby"
expect(File.open("bin/rackup").gets).to eq("#!/usr/bin/env jruby\n")
end
end
end
context "when the gem doesn't exist" do
it "displays an error with correct status" do
install_gemfile <<-G
source "file://#{gem_repo1}"
G
bundle "binstubs doesnt_exist"
expect(exitstatus).to eq(7) if exitstatus
expect(out).to include("Could not find gem 'doesnt_exist'.")
end
end
context "--path" do
it "sets the binstubs dir" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack --path exec"
expect(bundled_app("exec/rackup")).to exist
end
it "setting is saved for bundle install", :bundler => "< 3" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
gem "rails"
G
bundle! "binstubs rack", forgotten_command_line_options([:path, :bin] => "exec")
bundle! :install
expect(bundled_app("exec/rails")).to exist
end
end
context "with --standalone option" do
before do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
end
it "generates a standalone binstub" do
bundle! "binstubs rack --standalone"
expect(bundled_app("bin/rackup")).to exist
end
it "generates a binstub that does not depend on rubygems or bundler" do
bundle! "binstubs rack --standalone"
expect(File.read(bundled_app("bin/rackup"))).to_not include("Gem.bin_path")
end
context "when specified --path option" do
it "generates a standalone binstub at the given path" do
bundle! "binstubs rack --standalone --path foo"
expect(bundled_app("foo/rackup")).to exist
end
end
end
context "when the bin already exists" do
it "doesn't overwrite and warns" do
FileUtils.mkdir_p(bundled_app("bin"))
File.open(bundled_app("bin/rackup"), "wb") do |file|
file.print "OMG"
end
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack"
expect(bundled_app("bin/rackup")).to exist
expect(File.read(bundled_app("bin/rackup"))).to eq("OMG")
expect(out).to include("Skipped rackup")
expect(out).to include("overwrite skipped stubs, use --force")
end
context "when using --force" do
it "overwrites the binstub" do
FileUtils.mkdir_p(bundled_app("bin"))
File.open(bundled_app("bin/rackup"), "wb") do |file|
file.print "OMG"
end
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack --force"
expect(bundled_app("bin/rackup")).to exist
expect(File.read(bundled_app("bin/rackup"))).not_to eq("OMG")
end
end
end
context "when the gem has no bins" do
it "suggests child gems if they have bins" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack-obama"
G
bundle "binstubs rack-obama"
expect(out).to include("rack-obama has no executables")
expect(out).to include("rack has: rackup")
end
it "works if child gems don't have bins" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "actionpack"
G
bundle "binstubs actionpack"
expect(out).to include("no executables for the gem actionpack")
end
it "works if the gem has development dependencies" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "with_development_dependency"
G
bundle "binstubs with_development_dependency"
expect(out).to include("no executables for the gem with_development_dependency")
end
end
context "when BUNDLE_INSTALL is specified" do
it "performs an automatic bundle install" do
gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "config auto_install 1"
bundle "binstubs rack"
expect(out).to include("Installing rack 1.0.0")
expect(the_bundle).to include_gems "rack 1.0.0"
end
it "does nothing when already up to date" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "config auto_install 1"
bundle "binstubs rack", :env => { "BUNDLE_INSTALL" => 1 }
expect(out).not_to include("Installing rack 1.0.0")
end
end
end
remove path that i copied from my personal computer
# frozen_string_literal: true
RSpec.describe "bundle binstubs <gem>" do
context "when the gem exists in the lockfile" do
it "sets up the binstub" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack"
expect(bundled_app("bin/rackup")).to exist
end
it "does not install other binstubs" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
gem "rails"
G
bundle "binstubs rails"
expect(bundled_app("bin/rackup")).not_to exist
expect(bundled_app("bin/rails")).to exist
end
it "does install multiple binstubs" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
gem "rails"
G
bundle "binstubs rails rack"
expect(bundled_app("bin/rackup")).to exist
expect(bundled_app("bin/rails")).to exist
end
it "allows installing all binstubs" do
install_gemfile! <<-G
source "file://#{gem_repo1}"
gem "rails"
G
bundle! :binstubs, :all => true
expect(bundled_app("bin/rails")).to exist
expect(bundled_app("bin/rake")).to exist
end
it "displays an error when used without any gem" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs"
expect(exitstatus).to eq(1) if exitstatus
expect(out).to include("`bundle binstubs` needs at least one gem to run.")
end
it "displays an error when used with --all and gems" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack", :all => true
expect(last_command).to be_failure
expect(last_command.bundler_err).to include("Cannot specify --all with specific gems")
end
context "when generating bundle binstub outside bundler" do
it "should abort" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack"
File.open("bin/bundle", "wb") do |file|
file.print "OMG"
end
sys_exec "bin/rackup"
expect(last_command.stderr).to include("was not generated by Bundler")
end
end
context "the bundle binstub" do
before do
if system_bundler_version == :bundler
system_gems :bundler
elsif system_bundler_version
build_repo4 do
build_gem "bundler", system_bundler_version do |s|
s.executables = "bundle"
s.bindir = "exe"
s.write "exe/bundle", "puts %(system bundler #{system_bundler_version}\\n\#{ARGV.inspect})"
end
end
system_gems "bundler-#{system_bundler_version}", :gem_repo => gem_repo4
end
build_repo2 do
build_gem "prints_loaded_gems", "1.0" do |s|
s.executables = "print_loaded_gems"
s.bindir = "exe"
s.write "exe/print_loaded_gems", <<-R
specs = Gem.loaded_specs.values.reject {|s| Bundler.rubygems.spec_default_gem?(s) }
puts specs.map(&:full_name).sort.inspect
R
end
end
install_gemfile! <<-G
source "file://#{gem_repo2}"
gem "rack"
gem "prints_loaded_gems"
G
bundle! "binstubs bundler rack prints_loaded_gems"
end
# When environment has a same version of bundler as default gems.
# `system_gems "bundler-x.y.z"` will detect system binstub.
# We need to avoid it by virtual version of bundler.
let(:system_bundler_version) { Gem::Version.new(Bundler::VERSION).bump.to_s }
context "when system bundler was used" do
# Support master branch of bundler
if ENV["BUNDLER_SPEC_SUB_VERSION"]
let(:system_bundler_version) { Bundler::VERSION }
end
before do
gemfile <<-G
source "https://rubygems.org"
gem "rack"
gem "prints_loaded_gems"
G
lockfile <<-G
GEM
remote: https://rubygems.org
specs:
prints_loaded_gems (1.0)
rack (1.2)
PLATFORMS
ruby
DEPENDENCIES
prints_loaded_gems
rack
BUNDLED WITH
#{system_bundler_version}
G
end
it "runs bundler" do
sys_exec! "#{bundled_app("bin/bundle")} install"
expect(out).to eq %(system bundler #{system_bundler_version}\n["install"])
end
end
context "when BUNDLER_VERSION is set" do
let(:system_bundler_version) { Bundler::VERSION }
it "runs the correct version of bundler" do
sys_exec "BUNDLER_VERSION='999.999.999' #{bundled_app("bin/bundle")} install"
expect(exitstatus).to eq(42) if exitstatus
expect(last_command.stderr).to include("Activating bundler (999.999.999) failed:").
and include("To install the version of bundler this project requires, run `gem install bundler -v '999.999.999'`")
end
end
context "when a lockfile exists with a locked bundler version" do
let(:system_bundler_version) { Bundler::VERSION }
it "runs the correct version of bundler when the version is newer" do
lockfile lockfile.gsub(system_bundler_version, "999.999.999")
sys_exec "#{bundled_app("bin/bundle")} install"
expect(exitstatus).to eq(42) if exitstatus
expect(last_command.stderr).to include("Activating bundler (999.999.999) failed:").
and include("To install the version of bundler this project requires, run `gem install bundler -v '999.999.999'`")
end
it "runs the correct version of bundler when the version is older" do
simulate_bundler_version "55"
lockfile lockfile.gsub(system_bundler_version, "44.0")
sys_exec "#{bundled_app("bin/bundle")} install"
expect(exitstatus).to eq(42) if exitstatus
expect(last_command.stderr).to include("Activating bundler (44.0) failed:").
and include("To install the version of bundler this project requires, run `gem install bundler -v '44.0'`")
end
it "runs the correct version of bundler when the version is a pre-release" do
simulate_bundler_version "55"
lockfile lockfile.gsub(system_bundler_version, "2.12.0.a")
sys_exec "#{bundled_app("bin/bundle")} install"
expect(exitstatus).to eq(42) if exitstatus
expect(last_command.stderr).to include("Activating bundler (2.12.0.a) failed:").
and include("To install the version of bundler this project requires, run `gem install bundler -v '2.12.0.a'`")
end
end
context "when update --bundler is called" do
before { lockfile.gsub(system_bundler_version, "1.1.1") }
it "calls through to the latest bundler version" do
sys_exec! "#{bundled_app("bin/bundle")} update --bundler"
expect(last_command.stdout).to eq %(system bundler #{system_bundler_version}\n["update", "--bundler"])
end
it "calls through to the explicit bundler version" do
sys_exec "#{bundled_app("bin/bundle")} update --bundler=999.999.999"
expect(exitstatus).to eq(42) if exitstatus
expect(last_command.stderr).to include("Activating bundler (999.999.999) failed:").
and include("To install the version of bundler this project requires, run `gem install bundler -v '999.999.999'`")
end
end
context "without a lockfile" do
it "falls back to the latest installed bundler" do
FileUtils.rm bundled_app("Gemfile.lock")
sys_exec! bundled_app("bin/bundle").to_s
expect(out).to eq "system bundler #{system_bundler_version}\n[]"
end
end
context "using another binstub" do
let(:system_bundler_version) { :bundler }
it "loads all gems" do
sys_exec! bundled_app("bin/print_loaded_gems").to_s
# RG < 2.0.14 didn't have a `Gem::Specification#default_gem?`
# This is dirty detection for old RG versions.
if File.dirname(Bundler.load.specs["bundler"][0].loaded_from) =~ %r{specifications/default}
expect(out).to eq %(["prints_loaded_gems-1.0", "rack-1.2"])
else
expect(out).to eq %(["bundler-#{Bundler::VERSION}", "prints_loaded_gems-1.0", "rack-1.2"])
end
end
context "when requesting a different bundler version" do
before { lockfile lockfile.gsub(Bundler::VERSION, "999.999.999") }
it "attempts to load that version", :ruby_repo do
sys_exec bundled_app("bin/rackup").to_s
expect(exitstatus).to eq(42) if exitstatus
expect(last_command.stderr).to include("Activating bundler (999.999.999) failed:").
and include("To install the version of bundler this project requires, run `gem install bundler -v '999.999.999'`")
end
end
end
end
it "installs binstubs from git gems" do
FileUtils.mkdir_p(lib_path("foo/bin"))
FileUtils.touch(lib_path("foo/bin/foo"))
build_git "foo", "1.0", :path => lib_path("foo") do |s|
s.executables = %w[foo]
end
install_gemfile <<-G
gem "foo", :git => "#{lib_path("foo")}"
G
bundle "binstubs foo"
expect(bundled_app("bin/foo")).to exist
end
it "installs binstubs from path gems" do
FileUtils.mkdir_p(lib_path("foo/bin"))
FileUtils.touch(lib_path("foo/bin/foo"))
build_lib "foo", "1.0", :path => lib_path("foo") do |s|
s.executables = %w[foo]
end
install_gemfile <<-G
gem "foo", :path => "#{lib_path("foo")}"
G
bundle "binstubs foo"
expect(bundled_app("bin/foo")).to exist
end
it "sets correct permissions for binstubs" do
with_umask(0o002) do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack"
binary = bundled_app("bin/rackup")
expect(File.stat(binary).mode.to_s(8)).to eq("100775")
end
end
context "when using --shebang" do
it "sets the specified shebang for the the binstub" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack --shebang jruby"
expect(File.open("bin/rackup").gets).to eq("#!/usr/bin/env jruby\n")
end
end
end
context "when the gem doesn't exist" do
it "displays an error with correct status" do
install_gemfile <<-G
source "file://#{gem_repo1}"
G
bundle "binstubs doesnt_exist"
expect(exitstatus).to eq(7) if exitstatus
expect(out).to include("Could not find gem 'doesnt_exist'.")
end
end
context "--path" do
it "sets the binstubs dir" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack --path exec"
expect(bundled_app("exec/rackup")).to exist
end
it "setting is saved for bundle install", :bundler => "< 3" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
gem "rails"
G
bundle! "binstubs rack", forgotten_command_line_options([:path, :bin] => "exec")
bundle! :install
expect(bundled_app("exec/rails")).to exist
end
end
context "with --standalone option" do
before do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
end
it "generates a standalone binstub" do
bundle! "binstubs rack --standalone"
expect(bundled_app("bin/rackup")).to exist
end
it "generates a binstub that does not depend on rubygems or bundler" do
bundle! "binstubs rack --standalone"
expect(File.read(bundled_app("bin/rackup"))).to_not include("Gem.bin_path")
end
context "when specified --path option" do
it "generates a standalone binstub at the given path" do
bundle! "binstubs rack --standalone --path foo"
expect(bundled_app("foo/rackup")).to exist
end
end
end
context "when the bin already exists" do
it "doesn't overwrite and warns" do
FileUtils.mkdir_p(bundled_app("bin"))
File.open(bundled_app("bin/rackup"), "wb") do |file|
file.print "OMG"
end
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack"
expect(bundled_app("bin/rackup")).to exist
expect(File.read(bundled_app("bin/rackup"))).to eq("OMG")
expect(out).to include("Skipped rackup")
expect(out).to include("overwrite skipped stubs, use --force")
end
context "when using --force" do
it "overwrites the binstub" do
FileUtils.mkdir_p(bundled_app("bin"))
File.open(bundled_app("bin/rackup"), "wb") do |file|
file.print "OMG"
end
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "binstubs rack --force"
expect(bundled_app("bin/rackup")).to exist
expect(File.read(bundled_app("bin/rackup"))).not_to eq("OMG")
end
end
end
context "when the gem has no bins" do
it "suggests child gems if they have bins" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack-obama"
G
bundle "binstubs rack-obama"
expect(out).to include("rack-obama has no executables")
expect(out).to include("rack has: rackup")
end
it "works if child gems don't have bins" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "actionpack"
G
bundle "binstubs actionpack"
expect(out).to include("no executables for the gem actionpack")
end
it "works if the gem has development dependencies" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "with_development_dependency"
G
bundle "binstubs with_development_dependency"
expect(out).to include("no executables for the gem with_development_dependency")
end
end
context "when BUNDLE_INSTALL is specified" do
it "performs an automatic bundle install" do
gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "config auto_install 1"
bundle "binstubs rack"
expect(out).to include("Installing rack 1.0.0")
expect(the_bundle).to include_gems "rack 1.0.0"
end
it "does nothing when already up to date" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
bundle "config auto_install 1"
bundle "binstubs rack", :env => { "BUNDLE_INSTALL" => 1 }
expect(out).not_to include("Installing rack 1.0.0")
end
end
end
|
$: << 'lib'
require 'pt_testcase'
class CompilerTestCase < ParseTreeTestCase
def self.bytecode &block
@tg = TestGenerator.new
@tg.instance_eval(&block)
@tg
end
add_tests("alias",
"Compiler" => bytecode do |g|
in_class :X do |d|
d.push_context
d.push_literal :y
d.push_literal :x
d.send :alias_method, 2, true
end
end)
add_tests("alias_ugh",
"Compiler" => testcases["alias"]["Compiler"])
add_tests("and",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.dup
lhs_true = g.new_label
g.gif lhs_true
g.pop
g.push :self
g.send :b, 0, true
lhs_true.set!
end)
add_tests("argscat_inside",
"Compiler" => bytecode do |g|
g.push :self
g.send :b, 0, true
g.make_array 1
g.push :self
g.send :c, 0, true
g.cast_array
g.send :+, 1
g.set_local 0
end)
add_tests("argscat_svalue",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.send :b, 0, true
g.push :self
g.send :c, 0, true
g.make_array 2
g.push :self
g.send :d, 0, true
g.cast_array
g.send :+, 1
g.cast_array
g.dup
g.send :size, 0
g.push 1
g.swap
g.send :<, 1 # TODO: or empty?
g.git t
g.push 0
g.send :at, 1
t.set!
g.set_local 0
end)
add_tests("argspush",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.push :self
g.send :b, 0, true
g.cast_array
g.push :self
g.send :c, 0, true
g.swap
g.push :nil
g.send_with_splat :[]=, 1, false, true
end)
add_tests("array",
"Compiler" => bytecode do |g|
g.push 1
g.push_unique_literal :b
g.push_literal "c"
g.string_dup
g.make_array 3
end)
add_tests("array_pct_W",
"Compiler" => bytecode do |g|
g.push_literal "a"
g.string_dup
g.push_literal "b"
g.string_dup
g.push_literal "c"
g.string_dup
g.make_array 3
end)
add_tests("array_pct_W_dstr",
"Compiler" => bytecode do |g|
g.push_literal "a"
g.string_dup
g.push_ivar :@b
g.send :to_s, 0, true
g.push_literal ""
g.string_dup
g.string_append
g.push_literal "c"
g.string_dup
g.make_array 3
end)
add_tests("attrasgn",
"Compiler" => bytecode do |g|
g.push 0
g.set_local 0
g.pop
g.push 42
g.push_local 0
g.send :method=, 1, false
end)
add_tests("attrasgn_index_equals",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.push 42
g.push 24
g.send :[]=, 2, false
end)
add_tests("attrasgn_index_equals_space",
"Compiler" => bytecode do |g|
g.make_array 0
g.set_local 0
g.pop
g.push_local 0
g.push 42
g.push 24
g.send :[]=, 2, false
end)
add_tests("back_ref",
"Compiler" => bytecode do |g|
g.push_context
g.push_literal :"&"
g.send :back_ref, 1
g.push_context
g.push_literal :"`"
g.send :back_ref, 1
g.push_context
g.push_literal :"'"
g.send :back_ref, 1
g.push_context
g.push_literal :"+"
g.send :back_ref, 1
g.make_array 4
end)
add_tests("begin",
"Compiler" => bytecode do |g|
g.push 1
g.push 1
g.meta_send_op_plus
end)
add_tests("begin_def",
"Compiler" => bytecode do |g|
in_method :m do |d|
d.push :nil
end
end)
add_tests("begin_rescue_ensure",
"Compiler" => bytecode do |g|
top = g.new_label
dunno = g.new_label
bottom = g.new_label
top.set!
g.push_modifiers
g.push :nil
g.pop_modifiers
g.goto bottom
dunno.set!
g.push :nil
g.pop
g.push_exception
g.raise_exc
bottom.set!
g.push :nil
g.pop
end)
add_tests("begin_rescue_twice",
"Compiler" => bytecode do |g|
g.push_modifiers
g.push :nil
g.pop_modifiers
g.pop
g.push_modifiers
g.push :nil
g.pop_modifiers
end)
add_tests("begin_rescue_twice_mri_verbose_flag",
"Compiler" => testcases['begin_rescue_twice']['Compiler'])
add_tests("block_attrasgn",
"Compiler" => bytecode do |g|
g.push :self
g.in_method :setup, true do |d|
d.push :self
d.send :allocate, 0, true
d.set_local 1
d.pop
d.push_local 1
d.push_local 0
d.send :context=, 1, false
d.pop
d.push_local 1
d.ret # TODO: why extra return?
end
end)
add_tests("block_lasgn",
"Compiler" => bytecode do |g|
g.push 1
g.set_local 1
g.pop
g.push_local 1
g.push 2
g.meta_send_op_plus
g.set_local 0
end)
add_tests("block_mystery_block",
"Compiler" => bytecode do |g|
g.push :self
g.push :self
g.send :b, 0, true
in_block_send :a, 0, 1 do |d|
f = d.new_label
bottom = d.new_label
d.push :self
d.send :b, 0, true
d.gif f
d.push :true
d.goto bottom
f.set!
d.push :false
d.set_local_depth 0, 0
d.pop
d.push :self
d.in_block_send :d, 1, 0, true, 0, true do |d2|
d2.push :true
d2.set_local_depth 1, 0
end
d.pop
d.push_local_depth 0, 0
bottom.set!
end
end)
add_tests("block_pass_args_and_splat",
"Compiler" => bytecode do |g|
in_method :blah do |d|
no_proc = d.new_label
no_splat = d.new_label
d.push_block
d.dup
d.is_nil
d.git no_proc
d.push_const :Proc
d.swap
d.send :__from_block__, 1
no_proc.set!
d.set_local 1
d.pop
d.push :self
d.push 42 # only line different from block_pass_splat
d.push_local 0
d.cast_array
d.push_local 1
d.dup
d.is_nil
d.git no_splat
d.push_cpath_top
d.find_const :Proc
d.swap
d.send :__from_block__, 1
no_splat.set!
d.send_with_splat :other, 1, true, false # ok, and this one
end
end)
add_tests("block_pass_call_0",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.send :a, 0, true
g.push :self
g.send :c, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :b, 0, false
end)
add_tests("block_pass_call_1",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.send :a, 0, true
g.push 4
g.push :self
g.send :c, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :b, 1, false
end)
add_tests("block_pass_call_n",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.send :a, 0, true
g.push 1
g.push 2
g.push 3
g.push :self
g.send :c, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :b, 3, false
end)
add_tests("block_pass_fcall_0",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.push :self
g.send :b, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :a, 0, true
end)
add_tests("block_pass_fcall_1",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.push 4
g.push :self
g.send :b, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :a, 1, true
end)
add_tests("block_pass_fcall_n",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.push 1
g.push 2
g.push 3
g.push :self
g.send :b, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :a, 3, true
end)
add_tests("block_pass_omgwtf",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.push_unique_literal :x
g.push_unique_literal :sequence_name
g.push_const :Proc
in_block_send :new, -1, 0, false do |d|
d.push :nil
end
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :define_attr_method, 2, true
end)
add_tests("block_pass_splat",
"Compiler" => bytecode do |g|
g.in_method :blah do |d|
no_proc = d.new_label
no_splat = d.new_label
d.push_block
d.dup
d.is_nil
d.git no_proc
d.push_const :Proc
d.swap
d.send :__from_block__, 1
no_proc.set!
d.set_local 1
d.pop
d.push :self
d.push_local 0
d.cast_array
d.push_local 1
d.dup
d.is_nil
d.git no_splat
d.push_cpath_top
d.find_const :Proc # FIX: why push_cpath/find vs push_const ?
d.swap
d.send :__from_block__, 1
no_splat.set!
d.send_with_splat :other, 0, true, false
end
end)
add_tests("block_pass_super",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :nil
g.push :self
g.send :prc, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_super nil, 0
end)
add_tests("block_pass_thingy",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.send :r, 0, true
g.push :self
g.send :dest, 0, true
g.push :self
g.send :block, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :read_body, 1, false
end)
add_tests("block_stmt_after",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.in_rescue(:StandardError) do |good_side|
if good_side then
d.push :self
d.send :b, 0, true
else
d.push :self
d.send :c, 0, true
end
end
d.pop
d.push :self
d.send :d, 0, true
end
end)
add_tests("block_stmt_after_mri_verbose_flag",
"Compiler" => testcases['block_stmt_after']['Compiler'])
add_tests("block_stmt_before",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.push :self
d.send :a, 0, true
d.pop
d.in_rescue(:StandardError) do |good_side|
if good_side then
d.push :self
d.send :b, 0, true
else
d.push :self
d.send :c, 0, true
end
end
end
end)
add_tests("block_stmt_before_mri_verbose_flag",
"Compiler" => testcases['block_stmt_before']['Compiler'])
add_tests("block_stmt_both",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.push :self
d.send :a, 0, true
d.pop
d.in_rescue(:StandardError) do |good_side|
if good_side then
d.push :self
d.send :b, 0, true
else
d.push :self
d.send :c, 0, true
end
end
d.pop
d.push :self
d.send :d, 0, true
end
end)
add_tests("block_stmt_both_mri_verbose_flag",
"Compiler" => testcases['block_stmt_both']['Compiler'])
add_tests("break",
"Compiler" => bytecode do |g|
break_value = :nil # TODO: refactor later
top = g.new_label
cond = g.new_label
rtry = g.new_label
brk = g.new_label
g.push_modifiers
top.set!
g.push :true
g.gif cond
g.push break_value
g.goto brk
g.goto rtry # TODO: only used when there is a retry statement
cond.set!
g.push :nil
rtry.set!
g.pop
g.goto top
brk.set!
g.pop_modifiers
end)
# "Ruby" => "loop { break 42 if true }",
add_tests("break_arg",
"Compiler" => bytecode do |g|
break_value = 42
top = g.new_label
cond = g.new_label
rtry = g.new_label
brk = g.new_label
g.push_modifiers
top.set!
g.push :true
g.gif cond
g.push break_value
g.goto brk
g.goto rtry # TODO: only used when there is a retry statement
cond.set!
g.push :nil
rtry.set!
g.pop
g.goto top
brk.set!
g.pop_modifiers
end)
add_tests("call",
"Compiler" => bytecode do |g|
g.push :self
g.send :method, 0, false
end)
add_tests("call_arglist",
"Compiler" => bytecode do |g|
g.push :self
g.send :o, 0, true
g.push 42
g.send :puts, 1, false
end)
add_tests("call_arglist_hash",
"Compiler" => bytecode do |g|
g.push :self
g.send :o, 0, true
g.push_cpath_top
g.find_const :Hash
g.push_unique_literal :a
g.push 1
g.push_unique_literal :b
g.push 2
g.send :[], 4
g.send :m, 1, false
end)
add_tests("call_arglist_norm_hash",
"Compiler" => bytecode do |g|
g.push :self
g.send :o, 0, true
g.push 42
g.push_cpath_top
g.find_const :Hash
g.push_unique_literal :a
g.push 1
g.push_unique_literal :b
g.push 2
g.send :[], 4
g.send :m, 2, false
end)
add_tests("call_arglist_norm_hash_splat",
"Compiler" => bytecode do |g|
g.push :self
g.send :o, 0, true
g.push 42
g.push_cpath_top
g.find_const :Hash
g.push_unique_literal :a
g.push 1
g.push_unique_literal :b
g.push 2
g.send :[], 4
g.push :self
g.send :c, 0, true
g.cast_array
g.push :nil
g.send_with_splat :m, 2, false, false
end)
add_tests("call_arglist_space",
"Compiler" => bytecode do |g|
g.push :self
g.push 1
g.push 2
g.push 3
g.send :a, 3, true
end)
add_tests("call_command",
"Compiler" => bytecode do |g|
g.push 1
g.push :self
g.send :c, 0, true
g.send :b, 1, false
end)
add_tests("call_expr",
"Compiler" => bytecode do |g|
g.push 1
g.push 1
g.meta_send_op_plus
g.set_local 0
g.send :zero?, 0, false
end)
add_tests("call_index",
"Compiler" => bytecode do |g|
g.make_array 0
g.set_local 0
g.pop
g.push_local 0
g.push 42
g.send :[], 1, false
end)
add_tests("call_index_no_args",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.send :[], 0, false
end)
add_tests("call_index_space",
"Compiler" => testcases["call_index"]["Compiler"])
add_tests("call_unary_neg",
"Compiler" => bytecode do |g|
g.push 2
g.push 31
g.send :**, 1, false
g.send :-@, 0, false
end)
add_tests("case",
"Compiler" => bytecode do |g|
a1 = g.new_label
a2 = g.new_label
a3 = g.new_label
a4 = g.new_label
a_bottom = g.new_label
g.push 2
g.set_local 0
g.pop
g.push_literal ""
g.string_dup
g.set_local 1
g.pop
g.push_local 0
g.dup
g.push 1
g.swap
g.send :===, 1
g.gif a1
g.pop
g.push :self
g.push_literal "something"
g.string_dup
g.send :puts, 1, true
g.pop
g.push_literal "red"
g.string_dup
g.set_local 1
g.goto a_bottom
a1.set!
g.dup
g.push 2
g.swap
g.send :===, 1
g.git a2
g.dup
g.push 3
g.swap
g.send :===, 1
g.git a2
g.goto a3
a2.set!
g.pop
g.push_literal "yellow"
g.string_dup
g.set_local 1
g.goto a_bottom
a3.set!
g.dup
g.push 4
g.swap
g.send :===, 1
g.gif a4
g.pop
g.push :nil
g.goto a_bottom
a4.set!
g.pop
g.push_literal "green"
g.string_dup
g.set_local 1
a_bottom.set!
b1 = g.new_label
b2 = g.new_label
b3 = g.new_label
b_bottom = g.new_label
g.pop
g.push_local 1
g.dup
g.push_literal "red"
g.string_dup
g.swap
g.send :===, 1
g.gif b1
g.pop
g.push 1
g.set_local 0
g.goto b_bottom
b1.set!
g.dup
g.push_literal "yellow"
g.string_dup
g.swap
g.send :===, 1
g.gif b2
g.pop
g.push 2
g.set_local 0
g.goto b_bottom
b2.set!
g.dup
g.push_literal "green"
g.string_dup
g.swap
g.send :===, 1
g.gif b3
g.pop
g.push 3
g.set_local 0
g.goto b_bottom
b3.set!
g.pop
g.push :nil
b_bottom.set!
end)
add_tests("case_nested",
"Compiler" => bytecode do |g|
# case => a
# when
# case => b
# when
# case => c
# end
########################################
a2 = g.new_label
a3 = g.new_label
a_bottom = g.new_label
g.push 1
g.set_local 0
g.pop
g.push 2
g.set_local 1
g.pop
g.push :nil
g.set_local 2
g.pop
########################################
b2 = g.new_label
b3 = g.new_label
b_bottom = g.new_label
g.push_local 0
g.dup
g.push 1
g.swap
g.send :===, 1
g.gif a2
g.pop
g.push_local 1
g.dup
g.push 1
g.swap
g.send :===, 1
g.gif b2
g.pop
g.push 1
g.set_local 2
g.goto b_bottom
b2.set!
g.dup
g.push 2
g.swap
g.send :===, 1
g.gif b3
g.pop
g.push 2
g.set_local 2
g.goto b_bottom
b3.set!
g.pop
g.push 3
g.set_local 2
b_bottom.set!
g.goto a_bottom
a2.set!
g.dup
g.push 2
g.swap
g.send :===, 1
g.gif a3
########################################
c2 = g.new_label
c3 = g.new_label
c_bottom = g.new_label
g.pop
g.push_local 1
g.dup
g.push 1
g.swap
g.send :===, 1
g.gif c2
g.pop
g.push 4
g.set_local 2
g.goto c_bottom
c2.set!
g.dup
g.push 2
g.swap
g.send :===, 1
g.gif c3
g.pop
g.push 5
g.set_local 2
g.goto c_bottom
c3.set!
g.pop
g.push 6
g.set_local 2
c_bottom.set!
g.goto a_bottom
a3.set!
g.pop
g.push 7
g.set_local 2
a_bottom.set!
end)
add_tests("case_nested_inner_no_expr",
"Compiler" => bytecode do |g|
c2, bottom = g.new_label, g.new_label
i1, i2, ibottom = g.new_label, g.new_label, g.new_label
g.push :self
g.send :a, 0, true
g.dup
g.push :self
g.send :b, 0, true
g.swap
g.send :===, 1
g.gif c2
g.pop
g.push :self
g.send :d, 0, true
g.dup
g.gif i1 # TODO: lamest jump ever - should be ibottom
g.pop
g.push :self
g.send :e, 0, true
i1.set!
g.gif i2
g.push :self
g.send :f, 0, true
g.goto ibottom
i2.set!
g.push :nil
ibottom.set!
g.goto bottom
c2.set!
g.pop
g.push :nil
bottom.set!
end)
add_tests("case_no_expr",
"Compiler" => bytecode do |g|
c2, c3, bottom = g.new_label, g.new_label, g.new_label
g.push :self
g.send :a, 0, true
g.push 1
g.meta_send_op_equal
g.gif c2
g.push_unique_literal :a
g.goto bottom
c2.set!
g.push :self
g.send :a, 0, true
g.push 2
g.meta_send_op_equal
g.gif c3
g.push_unique_literal :b
g.goto bottom
c3.set!
g.push_unique_literal :c
bottom.set!
end)
add_tests("case_splat",
"Compiler" => bytecode do |g|
c1, c2, bottom = g.new_label, g.new_label, g.new_label
g.push :self
g.send :a, 0, true
g.dup
g.push_unique_literal :b
g.swap
g.send :===, 1
g.git c1
g.dup
g.push :self
g.send :c, 0, true
g.cast_array
g.swap
g.send :__matches_when__, 1
g.git c1
g.goto c2
c1.set!
g.pop
g.push :self
g.send :d, 0, true
g.goto bottom
c2.set!
g.pop
g.push :self
g.send :e, 0, true
bottom.set!
end)
add_tests("cdecl",
"Compiler" => bytecode do |g|
g.push_context
g.push_literal :X
g.push 42
g.send :__const_set__, 2
end)
add_tests("class_plain",
"Compiler" => bytecode do |g|
in_class :X do |d|
d.push :self
d.push 1
d.push 1
d.meta_send_op_plus
d.send :puts, 1, true
d.pop
d.in_method :blah do |d2|
d2.push :self
d2.push_literal "hello"
d2.string_dup
d2.send :puts, 1, true
end
end
end)
add_tests("class_scoped",
"Compiler" => bytecode do |g|
in_class "X::Y" do |d|
d.push :self
d.send :c, 0, true
end
end)
add_tests("class_scoped3",
"Compiler" => bytecode do |g|
in_class :Y do |d|
d.push :self
d.send :c, 0, true
end
end)
add_tests("class_super_array",
"Compiler" => bytecode do |g|
g.push_const :Array
g.open_class :X
end)
add_tests("class_super_expr",
"Compiler" => bytecode do |g|
g.push :self
g.send :expr, 0, true
g.open_class :X
end)
add_tests("class_super_object",
"Compiler" => bytecode do |g|
g.push_const :Object
g.open_class :X
end)
add_tests("colon2",
"Compiler" => bytecode do |g|
g.push_const :X
g.find_const :Y
end)
add_tests("colon3",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :X
end)
add_tests("const",
"Compiler" => bytecode do |g|
g.push_const :X
end)
add_tests("constX",
"Compiler" => bytecode do |g|
g.push_context
g.push_literal :X
g.push 1
g.send :__const_set__, 2
end)
add_tests("constY",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.push_literal :X
g.push 1
g.send :__const_set__, 2
end)
add_tests("constZ",
"Compiler" => bytecode do |g|
g.push_const :X
g.push_literal :Y
g.push 1
g.send :__const_set__, 2
end)
add_tests("cvar",
"Compiler" => bytecode do |g|
g.push_context
g.push_literal :@@x
g.send :class_variable_get, 1
end)
add_tests("cvasgn",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push_context
d.push_literal :@@blah
d.push 1
d.send :class_variable_set, 2
end
end)
add_tests("cvasgn_cls_method",
"Compiler" => bytecode do |g|
g.push :self
in_method :quiet_mode=, :singleton do |d|
d.push_context
d.push_literal :@@quiet_mode
d.push_local 0
d.send :class_variable_set, 2
end
end)
add_tests("cvdecl",
"Compiler" => bytecode do |g|
in_class :X do |d|
d.push :self
d.push_literal :@@blah
d.push 1
d.send :class_variable_set, 2
end
end)
add_tests("dasgn_0",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.in_block_send :each, 1, 0, false, 0, false do |d|
t = d.new_label
f = d.new_label
d.push :true
d.gif f
d.push :self
d.send :b, 0, true
d.in_block_send :each, 1, 0, false, 0, true do |d2|
d2.push_local_depth 1, 0
d2.push 1
d2.meta_send_op_plus
d2.set_local_depth 1, 0
end
d.goto t
f.set!
d.push :nil
t.set!
end
end)
add_tests("dasgn_1",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.in_block_send :each, 1, 0, false, 0, false do |d|
t = d.new_label
f = d.new_label
d.push :true
d.gif f
d.push :self
d.send :b, 0, true
d.in_block_send :each, 1, 0, false, 0, true do |d2|
d2.push_local_depth 0, 1
d2.push 1
d2.meta_send_op_plus
d2.set_local_depth 0, 1
end
d.goto t
f.set!
d.push :nil
t.set!
end
end)
add_tests("dasgn_2",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.in_block_send :each, 1, 0, false, 0, false do |d|
t = d.new_label
f = d.new_label
d.push :true
d.gif f
d.push 0
d.set_local_depth 0, 1
d.pop
d.push :self
d.send :b, 0, true
d.in_block_send :each, 1, 0, false, 0, true do |d2|
d2.push_local_depth 1, 1
d2.push 1
d2.meta_send_op_plus
d2.set_local_depth 1, 1
end
d.goto t
f.set!
d.push :nil
t.set!
end
end)
add_tests("dasgn_curr",
"Compiler" => bytecode do |g|
g.push :self
g.send :data, 0, true
g.in_block_send :each, 2, 0, false do |d|
d.push 1
d.set_local_depth 0, 2
d.pop
d.push_local_depth 0, 2
d.set_local_depth 0, 3
d.pop
d.push_local_depth 0, 0
d.set_local_depth 0, 2
d.set_local_depth 0, 3
end
end)
add_tests("dasgn_icky",
"Compiler" => bytecode do |g|
g.push :self
g.in_block_send :a do |d|
d.push :nil
d.set_local_depth 0, 0
d.pop
d.push :self
d.push :self
d.send :full_message, 0, true
d.in_block_send :assert_block, 0, 1, true, 0, true do |d2|
d2.in_rescue :Exception do |good_side|
if good_side then
d2.push_block
d2.meta_send_call 0
else
d2.push_exception
d2.set_local_depth 1, 0
d2.push :nil
d2.push_local 0
d2.swap
d2.send :break_value=, 1
d2.pop
d2.push_local 0
d2.raise_exc
end
end
end
end
end)
add_tests("dasgn_mixed",
"Compiler" => bytecode do |g|
g.push 0
g.set_local 0
g.pop
g.push :self
g.send :ns, 0, true
in_block_send :each, 1, 0, false, 1 do |d|
d.push_local 0
d.push_local_depth 0, 0
d.meta_send_op_plus
d.set_local 0
end
end)
add_tests("defined",
"Compiler" => bytecode do |g|
t = g.new_label
f = g.new_label
g.push_const :Globals
g.push_literal :$x
g.send :key?, 1
g.git t
g.push :nil
g.goto f
t.set!
g.push_literal "global-variable"
f.set!
end)
add_tests("defn_args_block",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.block_arg 0
d.push :nil
end
end)
add_tests("defn_args_mand",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.push :nil
end
end)
add_tests("defn_args_mand_block",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.block_arg 1
d.push :nil
end
end)
add_tests("defn_args_mand_opt",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 1
d.push :nil
end
end)
add_tests("defn_args_mand_opt_block",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 1
d.block_arg 2
d.push :nil
end
end)
add_tests("defn_args_mand_opt_splat",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 1
d.push :nil
end
end)
add_tests("defn_args_mand_opt_splat_block",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 1
d.block_arg 3
d.push :nil
end
end)
add_tests("defn_args_mand_opt_splat_no_name",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.optional_arg 1
d.push :nil
end
end)
add_tests("defn_args_mand_splat",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.push :nil
end
end)
add_tests("defn_args_mand_splat_block",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.block_arg 2
d.push :nil
end
end)
add_tests("defn_args_mand_splat_no_name",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push :self
d.push_local 0
d.push_local 1
d.send :p, 2, true
end
end)
add_tests("defn_args_none",
"Compiler" => bytecode do |g|
in_method :empty do |d|
d.push :nil
end
end)
add_tests("defn_args_opt",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 0
d.push :nil
end
end)
add_tests("defn_args_opt_block",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 0
d.block_arg 1
d.push :nil
end
end)
add_tests("defn_args_opt_splat",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 0
d.push :nil
end
end)
add_tests("defn_args_opt_splat_block",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 0
d.block_arg 2
d.push :nil
end
end)
add_tests("defn_args_opt_splat_no_name",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.optional_arg 0
d.push :nil
end
end)
add_tests("defn_args_splat",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.push :nil
end
end)
add_tests("defn_args_splat_no_name",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push :nil
end
end)
add_tests("defn_or",
"Compiler" => bytecode do |g|
in_method :"|" do |d|
d.push :nil
end
end)
add_tests("defn_rescue",
"Compiler" => bytecode do |g|
in_method :eql? do |d|
d.in_rescue :StandardError do |good_side|
if good_side then
d.push :self
d.send :uuid, 0, false
d.push_local 0
d.send :uuid, 0, false
d.meta_send_op_equal
else
d.push :false
end
end
end
end)
add_tests("defn_rescue_mri_verbose_flag",
"Compiler" => testcases["defn_rescue"]["Compiler"])
add_tests("defn_something_eh",
"Compiler" => bytecode do |g|
in_method :something? do |d|
d.push :nil
end
end)
add_tests("defn_splat_no_name",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push :self
d.push_local 0
d.send :p, 1, true
end
end)
add_tests("defn_zarray",
"Compiler" => bytecode do |g|
in_method :zarray do |d|
d.make_array 0
d.set_local 0
d.pop
d.push_local 0
# TODO we emit a ret instruction even though the last statement
# is itself a return, so we get to return instructions, one
# after another. We could instead detect that an only output
# the one.
d.ret
end
end)
add_tests("defs",
"Compiler" => bytecode do |g|
g.push :self
in_method :x, true do |d|
d.push_local 0
d.push 1
d.meta_send_op_plus
end
end)
add_tests("defs_empty",
"Compiler" => bytecode do |g|
g.push :self
in_method :empty, true do |d|
d.push :nil
end
end)
add_tests("defs_empty_args",
"Compiler" => bytecode do |g|
g.push :self
in_method :empty, true do |d|
d.push :nil
end
end)
add_tests("dot2",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Range
g.push :self
g.send :a, 0, true
g.push :self
g.send :b, 0, true
g.send :new, 2
end)
add_tests("dot3",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Range
g.push :self
g.send :a, 0, true
g.push :self
g.send :b, 0, true
g.push :true
g.send :new, 3
end)
add_tests("dregx",
"Compiler" => bytecode do |g|
g.push_const :Regexp
g.push_literal "y" # 1
g.string_dup
g.push 1 # 2
g.push 1
g.meta_send_op_plus
g.send :to_s, 0, true
g.push_literal "x" # 3
g.string_dup
2.times do
g.string_append
end
g.push 0
g.send :new, 2
end)
add_tests("dregx_interp",
"Compiler" => bytecode do |g|
g.push_const :Regexp
g.push_ivar :@rakefile
g.send :to_s, 0, true
g.push_literal ""
g.string_dup
g.string_append
g.push 0
g.send :new, 2
end)
add_tests("dregx_n",
"Compiler" => bytecode do |g|
g.push_const :Regexp
g.push 1
g.send :to_s, 0, true
g.push_literal ""
g.string_dup
g.string_append
g.push 16
g.send :new, 2
end)
add_tests("dregx_once",
"Compiler" => bytecode do |g|
memoize do
g.push_const :Regexp
g.push_literal "y" # 1
g.string_dup
g.push 1 # 2
g.push 1
g.meta_send_op_plus
g.send :to_s, 0, true
g.push_literal "x" # 3
g.string_dup
2.times do
g.string_append
end
g.push 0
g.send :new, 2
end
end)
add_tests("dregx_once_n_interp",
"Compiler" => bytecode do |g|
memoize do
g.push_const :Regexp
g.push_const :SB # 1
g.send :to_s, 0, true
g.push_const :IAC # 2
g.send :to_s, 0, true
g.push_literal "" # 3
g.string_dup
2.times do
g.string_append
end
g.push 16
g.send :new, 2
end
end)
add_tests("dstr",
"Compiler" => bytecode do |g|
g.push 1
g.set_local 0
g.pop
g.push_literal "y" # 1
g.string_dup
g.push_local 0 # 2
g.send :to_s, 0, true
g.push_literal "x" # 3
g.string_dup
2.times do
g.string_append
end
end)
add_tests("dstr_2",
"Compiler" => bytecode do |g|
g.push 1
g.set_local 0
g.pop
g.push_literal "y" # 1
g.string_dup
g.push_literal "%.2f" # 2
g.string_dup
g.push 3.14159
g.send :%, 1, false
g.send :to_s, 0, true
g.push_literal "x" # 3
g.string_dup
2.times do
g.string_append
end
end)
add_tests("dstr_3",
"Compiler" => bytecode do |g|
g.push 2
g.set_local 0
g.pop
g.push 1
g.set_local 1
g.pop
g.push_literal "y" # - # 1
g.string_dup
g.push_literal "f" # 1
g.string_dup
g.push_local 0 # 2
g.send :to_s, 0, true
g.push_literal "%." # 3
g.string_dup
2.times do
g.string_append
end
g.push 3.14159 # - # 2
g.send :%, 1, false
g.send :to_s, 0, true
g.push_literal "x" # - # 3
g.string_dup
2.times do
g.string_append
end
end)
add_tests("dstr_concat",
"Compiler" => bytecode do |g|
g.push 66 # 1
g.send :to_s, 0, true
g.push_literal "55" # 2
g.string_dup
g.push 44 # 3
g.send :to_s, 0, true
g.push_literal "cd" # 4
g.string_dup
g.push_literal "aa" # 5
g.string_dup
g.push 22 # 6
g.send :to_s, 0, true
g.push_literal "" # 7
g.string_dup
6.times do
g.string_append
end
end)
add_tests("dstr_gross",
"Compiler" => bytecode do |g|
g.push_literal " d" # 1
g.string_dup
g.push_context # 2
g.push_literal :@@cvar
g.send :class_variable_get, 1
g.send :to_s, 0, true
g.push_literal " c " # 3
g.string_dup
g.push_ivar :@ivar # 4
g.send :to_s, 0, true
g.push_literal " b " # 5
g.string_dup
g.push_cpath_top # 6
g.find_const :Globals
g.push_literal :$global
g.send :[], 1
g.send :to_s, 0, true
g.push_literal "a " # 7
g.string_dup
6.times do
g.string_append
end
end)
add_tests("dstr_heredoc_expand",
"Compiler" => bytecode do |g|
g.push_literal "blah\n" # 1
g.string_dup
g.push 1 # 2
g.push 1
g.meta_send_op_plus
g.send :to_s, 0, true
g.push_literal " blah\n" # 3
g.string_dup
2.times do
g.string_append
end
end)
add_tests("dstr_heredoc_windoze_sucks",
"Compiler" => bytecode do |g|
g.push_literal "_valid_feed\n" # 1
g.string_dup
g.push :self # 2
g.send :action, 0, true
g.send :to_s, 0, true
g.push_literal "def test_" # 3
g.string_dup
2.times do
g.string_append
end
end)
add_tests("dstr_heredoc_yet_again",
"Compiler" => bytecode do |g|
g.push_literal "\n" # 1
g.string_dup
g.push_literal "(eval)" # 2
g.string_dup
g.push_literal "' s2\n" # 3
g.string_dup
g.push_const :RUBY_PLATFORM # 4
g.send :to_s, 0, true
g.push_literal "s1 '" # 5
g.string_dup
4.times do
g.string_append
end
end)
add_tests("dstr_nest",
"Compiler" => bytecode do |g|
g.push_literal "] after" # 1
g.string_dup
g.push :self # 2
g.send :nest, 0, true
g.send :to_s, 0, true
g.push_literal "before [" # 3
g.string_dup
2.times do
g.string_append
end
end)
add_tests("dstr_str_lit_start",
"Compiler" => bytecode do |g|
g.push_literal ")" # 1
g.string_dup
g.push_exception # 2
g.send :class, 0, false
g.send :to_s, 0, true
g.push_literal " (" # 3
g.string_dup
g.push_exception # 4
g.send :message, 0, false
g.send :to_s, 0, true
g.push_literal ": warning: " # 5
g.string_dup
g.push 1 # 6
g.send :to_s, 0, true
g.push_literal "blah(eval):" # 7
g.string_dup
6.times do
g.string_append
end
end)
add_tests("dstr_the_revenge",
"Compiler" => bytecode do |g|
g.push_literal ")" # 1
g.string_dup
g.push 1 # 2
g.send :to_s, 0, true
g.push_literal ":" # 3
g.string_dup
g.push_literal "(eval)" # 4
g.string_dup
g.push_literal " (" # 5
g.string_dup
g.push :self # 6
g.send :to, 0, true
g.send :to_s, 0, true
g.push_literal " middle " # 7
g.string_dup
g.push :self # 8
g.send :from, 0, true
g.send :to_s, 0, true
g.push_literal "before " # 9
g.string_dup
8.times do
g.string_append
end
end)
add_tests("dsym",
"Compiler" => bytecode do |g|
g.push_literal "y"
g.string_dup
g.push 1
g.push 1
g.meta_send_op_plus
g.send :to_s, 0, true
g.push_literal "x"
g.string_dup
g.string_append
g.string_append
g.send :to_sym, 0, true
end)
add_tests("dxstr",
"Compiler" => bytecode do |g|
g.push 5
g.set_local 0
g.pop
g.push :self
g.push_local 0
g.send :to_s, 0, true
g.push_literal "touch "
g.string_dup
g.string_append
g.send :"`", 1, true
end)
# TODO: OMFG!
add_tests("ensure",
"Compiler" => bytecode do |g|
# TODO: refactor in_rescue to work with this... I think I have the
# pattern down now
top = g.new_label
bottom = g.new_label
label_1 = g.new_label
label_5 = g.new_label
label_10 = g.new_label
label_14 = g.new_label
label_19 = g.new_label
label_24 = g.new_label
label_26 = g.new_label
label_28 = g.new_label
label_30 = g.new_label
top.set!
g.push_modifiers
label_1.set!
label_1.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.goto label_26
label_5.set!
g.push_const :SyntaxError
g.push_exception
g.send :===, 1
g.git label_10
g.goto label_14
label_10.set!
g.push_exception
g.set_local 0
g.push 2
g.goto label_28
label_14.set!
g.push_const :Exception
g.push_exception
g.send :===, 1
g.git label_19
g.goto label_24
label_19.set!
g.push_exception
g.set_local 1
g.push 3
g.clear_exception
g.goto label_28
label_24.set!
g.push_exception
g.raise_exc
label_26.set!
g.pop
g.push 4
label_28.set!
g.pop_modifiers
g.goto bottom
label_30.set!
g.push 5
g.pop
g.push_exception
g.raise_exc
bottom.set!
g.push 5
g.pop
end)
add_tests("false",
"Compiler" => bytecode do |g|
g.push :false
end)
add_tests("fcall_arglist",
"Compiler" => bytecode do |g|
g.push :self
g.push 42
g.send :m, 1, true
end)
add_tests("fcall_arglist_hash",
"Compiler" => bytecode do |g|
g.push :self
g.push_cpath_top
g.find_const :Hash
g.push_unique_literal :a
g.push 1
g.push_unique_literal :b
g.push 2
g.send :[], 4
g.send :m, 1, true
end)
add_tests("fcall_arglist_norm_hash",
"Compiler" => bytecode do |g|
g.push :self
g.push 42
g.push_cpath_top
g.find_const :Hash
g.push_unique_literal :a
g.push 1
g.push_unique_literal :b
g.push 2
g.send :[], 4
g.send :m, 2, true
end)
add_tests("fcall_arglist_norm_hash_splat",
"Compiler" => bytecode do |g|
g.push :self
g.push 42
g.push_cpath_top
g.find_const :Hash
g.push_unique_literal :a
g.push 1
g.push_unique_literal :b
g.push 2
g.send :[], 4
g.push :self
g.send :c, 0, true
g.cast_array
g.push :nil
g.send_with_splat :m, 2, true, false
end)
add_tests("fcall_block",
"Compiler" => bytecode do |g|
g.push :self
g.push_unique_literal :b
g.in_block_send :a, 0, 1 do |d|
d.push_unique_literal :c
end
end)
add_tests("fcall_index_space",
"Compiler" => bytecode do |g|
g.push :self
g.push 42
g.make_array 1
g.send :a, 1, true
end)
add_tests("fcall_keyword",
"Compiler" => bytecode do |g|
t = g.new_label
f = g.new_label
g.push_block
g.gif f
g.push 42
g.goto t
f.set!
g.push :nil
t.set!
end)
add_tests("flip2",
"Compiler" => :skip)
add_tests("flip2_method",
"Compiler" => :skip)
add_tests("flip3",
"Compiler" => :skip)
add_tests("for",
"Compiler" => bytecode do |g|
g.push :self
g.send :ary, 0, true
in_block_send :each, 1.0, 0, false, 1 do |d|
d.push :self
d.push_local 0
d.send :puts, 1, true
end
end)
add_tests("for_no_body",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Range
g.push 0
g.push :self
g.send :max, 0, true
g.send :new, 2
in_block_send :each, 1.0, 0, false, 1 do |d|
d.push :nil
end
end)
add_tests("gasgn",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Globals
g.push_literal :"$x" # REFACTOR g.get_global("$x")
g.push 42
g.send :[]=, 2
end)
add_tests("global",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Globals
g.push_literal :"$stderr"
g.send :[], 1
end)
add_tests("gvar",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Globals
g.push_literal :"$x" # REFACTOR g.get_global("$x")
g.send :[], 1
end)
add_tests("gvar_underscore",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Globals
g.push_literal :"$_"
g.send :[], 1
end)
add_tests("gvar_underscore_blah",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Globals
g.push_literal :"$__blah"
g.send :[], 1
end)
add_tests("hash",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Hash
g.push 1
g.push 2
g.push 3
g.push 4
g.send :[], 4
end)
add_tests("hash_rescue",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Hash
g.push 1
g.in_rescue :StandardError do |good_side|
if good_side then
g.push 2
else
g.push 3
end
end
g.send :[], 2
end)
add_tests("iasgn",
"Compiler" => bytecode do |g|
g.push 4
g.set_ivar :@a
end)
add_tests("if_block_condition",
"Compiler" => bytecode do |g|
f = g.new_label
bottom = g.new_label
g.push 5
g.set_local 0
g.pop
g.push_local 0
g.push 1
g.meta_send_op_plus
g.gif f
g.push :nil
g.goto bottom
f.set!
g.push :nil
bottom.set!
end)
add_tests("if_lasgn_short",
"Compiler" => bytecode do |g|
f = g.new_label
bottom = g.new_label
g.push :self
g.send :obj, 0, true
g.send :x, 0, false
g.set_local 0
g.gif f
g.push_local 0
g.send :do_it, 0, false
g.goto bottom
f.set!
g.push :nil
bottom.set!
end)
add_tests("if_nested",
"Compiler" => bytecode do |g|
yep = g.new_label
done = g.new_label
inner_done = g.new_label
nope = g.new_label
g.push :true
g.git yep
g.push :false
g.gif nope
g.push :nil
g.ret
g.goto inner_done
nope.set!
g.push :nil
inner_done.set!
g.goto done
yep.set!
g.push :nil
done.set!
end)
add_tests("if_post",
"Compiler" => bytecode do |g|
nope = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.gif nope
g.push :self
g.send :a, 0, true
g.goto done
nope.set!
g.push :nil
done.set!
end)
add_tests("if_post_not",
"Compiler" => bytecode do |g|
yep = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.git yep
g.push :self
g.send :a, 0, true
g.goto done
yep.set!
g.push :nil
done.set!
end)
add_tests("if_pre",
"Compiler" => bytecode do |g|
nope = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.gif nope
g.push :self
g.send :a, 0, true
g.goto done
nope.set!
g.push :nil
done.set!
end)
add_tests("if_pre_not",
"Compiler" => bytecode do |g|
yep = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.git yep
g.push :self
g.send :a, 0, true
g.goto done
yep.set!
g.push :nil
done.set!
end)
add_tests("iter_call_arglist_space",
"Compiler" => bytecode do |g|
g.push :self
g.push 1
in_block_send :a, 1, 1 do |d|
d.push :self
d.send :d, 0, true
end
end)
add_tests("iter_dasgn_curr_dasgn_madness",
"Compiler" => bytecode do |g|
g.push :self
g.send :as, 0, true
in_block_send :each, 1, 0, false do |d|
d.push_local_depth 0, 1
d.push_local_depth 0, 0
d.push :false
d.send :b, 1, false
d.meta_send_op_plus
d.set_local_depth 0, 1
end
end)
add_tests("iter_downto",
"Compiler" => bytecode do |g|
g.push 3
g.push 1
in_block_send :downto, 1, 1, false do |d|
d.push :self
d.push_local_depth 0, 0
d.send :to_s, 0, false
d.send :puts, 1, true
end
end)
add_tests("iter_each_lvar",
"Compiler" => bytecode do |g|
g.push 1
g.push 2
g.push 3
g.make_array 3
g.set_local 0
g.pop
g.push_local 0
in_block_send :each, 1, 0, false, 1 do |d|
d.push :self
d.push_local_depth 0, 0
d.send :to_s, 0, false
d.send :puts, 1, true
end
end)
add_tests("iter_each_nested",
"Compiler" => bytecode do |g|
g.push 1
g.push 2
g.push 3
g.make_array 3
g.set_local 0
g.pop
g.push 4
g.push 5
g.push 6
g.push 7
g.make_array 4
g.set_local 1
g.pop
g.push_local 0
in_block_send :each, 1, 0, false, 2 do |d|
d.push_local 1
d.in_block_send :each, 1, 0, false, 2, true do |d2|
d2.push :self
d2.push_local_depth 1, 0
d2.send :to_s, 0, false
d2.send :puts, 1, true
d2.pop
d2.push :self
d2.push_local_depth 0, 0
d2.send :to_s, 0, false
d2.send :puts, 1, true
end
end
end)
add_tests("iter_loop_empty",
"Compiler" => bytecode do |g|
top = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.goto top
bottom.set!
g.pop_modifiers
end)
add_tests("iter_masgn_2",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :a, 2 do |d|
d.push :self
d.push_local_depth 0, 1
d.send :p, 1, true
end
end)
add_tests("iter_masgn_args_splat",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :a, 2 do |d|
d.push :self
d.push_local_depth 0, 1
d.send :p, 1, true
end
end)
add_tests("iter_masgn_args_splat_no_name",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :a, 2 do |d|
d.push :self
d.push_local_depth 0, 1
d.send :p, 1, true
end
end)
add_tests("iter_masgn_splat",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :a, -1 do |d|
d.push :self
d.push_local_depth 0, 0
d.send :p, 1, true
end
end)
add_tests("iter_masgn_splat_no_name",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :a, -2 do |d|
d.push :self
d.push :self
d.send :c, 0, true
d.send :p, 1, true
end
end)
add_tests("iter_shadowed_var",
"Compiler" => bytecode do |g|
g.push :self
g.in_block_send :a, 1 do |d|
d.push :self
d.in_block_send :b, 1, 0, true, 0, true, 1 do |d2|
d2.push :self
d2.push_local_depth 1, 0
d2.send :puts, 1, true
end
end
end)
add_tests("iter_upto",
"Compiler" => bytecode do |g|
g.push 1
g.push 3
in_block_send :upto, 1, 1, false do |d|
d.push :self
d.push_local_depth 0, 0
d.send :to_s, 0, false
d.send :puts, 1, true
end
end)
add_tests("iter_while",
"Compiler" => bytecode do |g|
top = g.new_label
f = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push 10
g.set_local 0
g.pop
g.push_modifiers
top.set!
g.push_local 0
g.push 1
g.send :>=, 1, false
g.gif f
dunno1.set!
g.push :self
g.push_literal "hello"
g.string_dup
g.send :puts, 1, true
g.pop
g.push_local 0
g.push 1
g.meta_send_op_minus
g.set_local 0
g.pop
g.goto top
f.set!
g.push :nil
bottom.set!
g.pop_modifiers
end)
add_tests("lasgn_array",
"Compiler" => bytecode do |g|
g.push_literal "foo"
g.string_dup
g.push_literal "bar"
g.string_dup
g.make_array 2
g.set_local 0
end)
add_tests("lasgn_call",
"Compiler" => bytecode do |g|
g.push 2
g.push 3
g.meta_send_op_plus
g.set_local 0
end)
add_tests("lit_bool_false",
"Compiler" => bytecode do |g|
g.push :false
end)
add_tests("lit_bool_true",
"Compiler" => bytecode do |g|
g.push :true
end)
add_tests("lit_float",
"Compiler" => bytecode do |g|
g.push 1.1
end)
add_tests("lit_long",
"Compiler" => bytecode do |g|
g.push 1
end)
add_tests("lit_long_negative",
"Compiler" => bytecode do |g|
g.push(-1)
end)
add_tests("lit_range2",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Range
g.push 1
g.push 10
g.send :new, 2
end)
add_tests("lit_range3",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Range
g.push 1
g.push 10
g.push :true
g.send :new, 3
end)
add_tests("lit_regexp",
"Compiler" => bytecode do |g|
g.memoize do
g.push_const :Regexp
g.push_literal "x"
g.push 0
g.send :new, 2
end
end)
add_tests("lit_regexp_i_wwtt",
"Compiler" => bytecode do |g|
g.push :self
g.send :str, 0, true
g.memoize do
g.push_const :Regexp
g.push_literal ""
g.push 1
g.send :new, 2
end
g.send :split, 1, false
end)
add_tests("lit_regexp_n",
"Compiler" => bytecode do |g|
g.memoize do
g.push_const :Regexp
g.push_literal "x"
g.push 16
g.send :new, 2
end
end)
add_tests("lit_regexp_once", # TODO: same as lit_regexp. verify
"Compiler" => bytecode do |g|
g.memoize do
g.push_const :Regexp
g.push_literal "x"
g.push 0
g.send :new, 2
end
end)
add_tests("lit_sym",
"Compiler" => bytecode do |g|
g.push_unique_literal :x
end)
add_tests("lit_sym_splat",
"Compiler" => bytecode do |g|
g.push_unique_literal :"*args"
end)
add_tests("lvar_def_boundary",
"Compiler" => bytecode do |g|
g.push 42
g.set_local 0
g.pop
in_method :a do |d|
d.push :self
d.in_block_send :c, 0 do |d2|
d2.in_rescue :RuntimeError do |good_side|
if good_side then
d2.push :self
d2.send :do_stuff, 0, true
else
d2.push_exception
d2.set_local_depth 0, 0
d2.push :self
d2.push_local_depth 0, 0
d2.send :puts, 1, true
end
end
end
end
end)
add_tests("masgn",
"Compiler" => bytecode do |g|
g.push :self
g.send :c, 0, true
g.push :self
g.send :d, 0, true
g.rotate 2
g.set_local 0
g.pop
g.set_local 1
g.pop
g.push :true
end)
add_tests("masgn_argscat",
"Compiler" => bytecode do |g|
g.push 1
g.push 2
g.make_array 2
g.push 3
g.push 4
g.make_array 2
g.cast_array
g.send :+, 1
g.cast_tuple
g.shift_tuple
g.set_local 0
g.pop
g.shift_tuple
g.set_local 1
g.pop
g.cast_array
g.set_local 2
g.pop
g.push :true
end)
add_tests("masgn_attrasgn",
"Compiler" => bytecode do |g|
g.push :self
g.send :d, 0, true
g.push :self
g.send :e, 0, true
g.rotate 2
g.set_local 0
g.pop
g.push :self
g.send :b, 0, true
g.send :c=, 1, false
g.pop
g.push :true
end)
add_tests("masgn_attrasgn_idx",
"Compiler" => bytecode do |g|
g.make_array 0
g.push 1
g.push 2
g.rotate 3
g.set_local 0
g.pop
g.set_local 1
g.pop
g.set_local 2
g.pop
g.push :true
g.pop
g.push_local 0
g.push_local 2
g.send :[], 1, false
g.push_local 0
g.push_local 1
g.send :[], 1, false
g.rotate 2
g.push_local 0
g.push_local 1
g.send :[]=, 2, false
g.pop
g.push_local 0
g.push_local 2
g.send :[]=, 2, false
g.pop
g.push :true
end)
add_tests("masgn_iasgn",
"Compiler" => bytecode do |g|
g.push :self
g.send :c, 0, true
g.push :self
g.send :d, 0, true
g.rotate 2
g.set_local 0
g.pop
g.set_ivar :@b
g.pop
g.push :true
end)
add_tests("masgn_masgn",
"Compiler" => bytecode do |g|
g.push 1
g.push 2
g.push 3
g.make_array 2
g.make_array 2
g.cast_tuple
g.shift_tuple
g.set_local 0
g.pop
g.shift_tuple
g.cast_tuple
g.shift_tuple
g.set_local 1
g.pop
g.shift_tuple
g.set_local 2
g.pop
g.pop
g.push :true # FIX: necessary?!?
g.pop
g.pop
g.push :true
end)
add_tests("masgn_splat",
"Compiler" => bytecode do |g|
g.push :self
g.send :d, 0, true
g.push :self
g.send :e, 0, true
g.push :self
g.send :f, 0, true
g.push :self
g.send :g, 0, true
g.make_array 2 # TODO: 2?!?
g.set_local 2 # TODO: backwards
g.pop
g.set_local 1
g.pop
g.set_local 0
g.pop
g.push :true
end)
add_tests("masgn_splat_no_name_to_ary",
"Compiler" => bytecode do |g|
g.push :self
g.send :c, 0, true
g.cast_tuple
g.shift_tuple
g.set_local 0
g.pop
g.shift_tuple
g.set_local 1
g.pop
g.cast_array
g.cast_array
g.pop
g.push :true
end)
add_tests("masgn_splat_no_name_trailing",
"Compiler" => bytecode do |g|
g.push :self
g.send :c, 0, true
g.cast_tuple
g.shift_tuple
g.set_local 0
g.pop
g.shift_tuple
g.set_local 1
g.pop
g.pop # TODO: why?
g.push :true
end)
add_tests("masgn_splat_to_ary",
"Compiler" => bytecode do |g|
g.push :self
g.send :d, 0, true
g.cast_tuple
g.shift_tuple
g.set_local 0
g.pop
g.shift_tuple
g.set_local 1
g.pop
g.cast_array
g.set_local 2
g.pop
g.push :true
end)
add_tests("masgn_splat_to_ary2",
"Compiler" => bytecode do |g|
g.push :self
g.send :d, 0, true
g.push_literal "f"
g.string_dup
g.send :e, 1, false
g.cast_tuple
g.shift_tuple
g.set_local 0
g.pop
g.shift_tuple
g.set_local 1
g.pop
g.cast_array
g.set_local 2
g.pop
g.push :true
end)
add_tests("match",
"Compiler" => bytecode do |g|
g.push_literal :$_ # REFACTOR - we use this block a lot
g.push_cpath_top
g.find_const :Globals # FIX: find the other Globals, order flipped
g.send :[], 1
g.memoize do
g.push_const :Regexp
g.push_literal "x"
g.push 0
g.send :new, 2
end
f = g.new_label
t = g.new_label
g.send :=~, 1
g.gif f
g.push 1
g.goto t
f.set!
g.push :nil
t.set!
end)
add_tests("match2",
"Compiler" => bytecode do |g|
g.memoize do
g.push_const :Regexp
g.push_literal "x"
g.push 0
g.send :new, 2
end
g.push_literal "blah"
g.string_dup
g.send :=~, 1
end)
add_tests("match3",
"Compiler" => bytecode do |g|
g.push_literal "blah"
g.string_dup
g.memoize do
g.push_const :Regexp
g.push_literal "x"
g.push 0
g.send :new, 2
end
g.send :=~, 1
end)
add_tests("module",
"Compiler" => bytecode do |g|
in_module :X do |d|
d.in_method :y do |d2|
d2.push :nil
end
end
end)
add_tests("module_scoped",
"Compiler" => bytecode do |g|
in_module "X::Y" do |d|
d.push :self
d.send :c, 0, true
end
end)
add_tests("module_scoped3",
"Compiler" => bytecode do |g|
in_module :Y do |d|
d.push :self
d.send :c, 0, true
end
end)
add_tests("next",
"Compiler" => bytecode do |g|
top = g.new_label
bottom = g.new_label
dunno1 = g.new_label
f = g.new_label
g.push_modifiers
top.set!
g.push :false
g.gif f
g.goto top
g.goto dunno1
f.set!
g.push :nil
dunno1.set!
g.pop
g.goto top
bottom.set!
g.pop_modifiers
end)
add_tests("next_arg",
"Compiler" => testcases['next']['Compiler']) # FIX: not the same
add_tests("not",
"Compiler" => bytecode do |g|
f = g.new_label
t = g.new_label
g.push :true
g.git f
g.push :true
g.goto t
f.set!
g.push :false
t.set!
end)
add_tests("nth_ref",
"Compiler" => bytecode do |g|
g.push_context
g.push 1
g.send :nth_ref, 1
end)
add_tests("op_asgn1",
"Compiler" => bytecode do |g|
l_or = g.new_label
l_and = g.new_label
l_idx = g.new_label
l_rhs = g.new_label
g.make_array 0
g.set_local 0
g.pop
g.push_local 0
g.dup
g.push 1
g.send :[], 1
g.dup
g.git l_or
g.pop
g.push 1
g.push 10
g.send :[]=, 2
g.goto l_and
l_or.set!
g.swap
g.pop
l_and.set!
g.pop
g.push_local 0
g.dup
g.push 2
g.send :[], 1
g.dup
g.gif l_idx
g.pop
g.push 2
g.push 11
g.send :[]=, 2
g.goto l_rhs
l_idx.set!
g.swap
g.pop
l_rhs.set!
g.pop
g.push_local 0
g.dup
g.push 3
g.send :[], 1
g.push 12
g.send :+, 1
g.push 3
g.swap
g.send :[]=, 2
end)
add_tests("op_asgn1_ivar",
"Compiler" => bytecode do |g|
l_or = g.new_label
l_and = g.new_label
l_idx = g.new_label
l_rhs = g.new_label
g.make_array 0
g.set_ivar :@b
g.pop
g.push_ivar :@b
g.dup
g.push 1
g.send :[], 1
g.dup
g.git l_or
g.pop
g.push 1
g.push 10
g.send :[]=, 2
g.goto l_and
l_or.set!
g.swap
g.pop
l_and.set!
g.pop
g.push_ivar :@b
g.dup
g.push 2
g.send :[], 1
g.dup
g.gif l_idx
g.pop
g.push 2
g.push 11
g.send :[]=, 2
g.goto l_rhs
l_idx.set!
g.swap
g.pop
l_rhs.set!
g.pop
g.push_ivar :@b
g.dup
g.push 3
g.send :[], 1
g.push 12
g.send :+, 1
g.push 3
g.swap
g.send :[]=, 2
end)
add_tests("op_asgn2",
"Compiler" => bytecode do |g|
l_or = g.new_label
l_and = g.new_label
l_plus = g.new_label
l_or2 = g.new_label
l_rhs = g.new_label
bottom = g.new_label
g.push_const :Struct
g.push_unique_literal :var
g.send :new, 1, false
g.set_local 0
g.pop
g.push_local 0
g.push :nil
g.send :new, 1, false
g.set_local 1
g.pop
g.push_local 1
g.dup
g.send :var, 0
g.dup
g.git l_or
g.pop
g.push 20
g.send :var=, 1
g.goto l_and
l_or.set!
g.swap
g.pop
l_and.set!
g.pop
g.push_local 1
g.dup
g.send :var, 0
g.dup
g.gif l_plus
g.pop
g.push 21
g.send :var=, 1
g.goto l_or2
l_plus.set!
g.swap
g.pop
l_or2.set!
g.pop
g.push_local 1
g.dup
g.send :var, 0
g.push 22
g.send :+, 1
g.send :var=, 1
g.pop
g.push_local 1
g.send :d, 0, false
g.send :e, 0, false
g.dup
g.send :f, 0
g.dup
g.git l_rhs
g.pop
g.push 42
g.send :f=, 1
g.goto bottom
l_rhs.set!
g.swap
g.pop
bottom.set!
end)
add_tests("op_asgn2_self",
"Compiler" => bytecode do |g|
t = g.new_label
f = g.new_label
g.push :self
g.dup
g.send :Bag, 0
g.dup
g.git t
g.pop
g.push_const :Bag
g.send :new, 0, false
g.send :"Bag=", 1
g.goto f
t.set!
g.swap
g.pop
f.set!
end)
add_tests("op_asgn_and",
"Compiler" => bytecode do |g|
g.push 0
g.set_local 0
g.pop
g.push_local 0
g.dup
f = g.new_label
g.gif f
g.pop
g.push 2
g.set_local 0
f.set!
end)
add_tests("op_asgn_and_ivar2",
"Compiler" => bytecode do |g|
t = g.new_label
g.push_ivar :@fetcher
g.dup
g.gif t
g.pop
g.push :self
g.push_const :Gem
g.send :configuration, 0, false
g.push_unique_literal :http_proxy
g.send :[], 1, false
g.send :new, 1, true
g.set_ivar :@fetcher
t.set!
end)
add_tests("op_asgn_or",
"Compiler" => bytecode do |g|
t = g.new_label
g.push 0
g.set_local 0
g.pop # FIX: lame
g.push_local 0
g.dup
g.git t
g.pop
g.push 1
g.set_local 0
t.set!
end)
add_tests("op_asgn_or_block",
"Compiler" => bytecode do |g|
t = g.new_label
g.push_local 0
g.dup
g.git t
g.pop
in_rescue :StandardError do |good_side|
if good_side then
g.push :self
g.send :b, 0, true
else
g.push :self
g.send :c, 0, true
end
end
g.set_local 0
t.set!
end)
add_tests("op_asgn_or_ivar",
"Compiler" => bytecode do |g|
t = g.new_label
g.push_ivar :@v
g.dup
g.git t
g.pop
g.push_cpath_top
g.find_const :Hash
g.send :[], 0
g.set_ivar :@v
t.set!
end)
add_tests("op_asgn_or_ivar2",
"Compiler" => bytecode do |g|
t = g.new_label
g.push_ivar :@fetcher
g.dup
g.git t
g.pop
g.push :self
g.push_const :Gem
g.send :configuration, 0, false
g.push_unique_literal :http_proxy
g.send :[], 1, false
g.send :new, 1, true
g.set_ivar :@fetcher
t.set!
end)
add_tests("or",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.dup
lhs_true = g.new_label
g.git lhs_true
g.pop
g.push :self
g.send :b, 0, true
lhs_true.set!
end)
add_tests("or_big",
"Compiler" => bytecode do |g|
j1 = g.new_label
j2 = g.new_label
j3 = g.new_label
g.push :self
g.send :a, 0, true
g.dup
g.git j1
g.pop
g.push :self
g.send :b, 0, true
j1.set!
g.dup
g.git j3
g.pop
g.push :self
g.send :c, 0, true
g.dup
g.gif j2
g.pop
g.push :self
g.send :d, 0, true
j2.set!
j3.set!
end)
add_tests("or_big2",
"Compiler" => testcases['or_big']['Compiler'])
add_tests("parse_floats_as_args",
"Compiler" => bytecode do |g|
in_method :x do |d|
opt_arg_1 = d.new_label
opt_arg_2 = d.new_label
d.passed_arg 0
d.git opt_arg_1
d.push 0.0
d.set_local 0
d.pop
opt_arg_1.set!
d.passed_arg 1
d.git opt_arg_2
d.push 0.0
d.set_local 1
d.pop
opt_arg_2.set!
d.push_local 0
d.push_local 1
d.meta_send_op_plus
end
end)
add_tests("postexe",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :at_exit, 0 do |d|
d.push 1
end
end)
add_tests("proc_args_0",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :proc, 0 do |d|
d.push :self
d.send :x, 0, true
d.push 1
d.meta_send_op_plus
end
end)
add_tests("proc_args_1",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :proc, 1 do |d|
d.push_local_depth 0, 0
d.push 1
d.meta_send_op_plus
end
end)
add_tests("proc_args_2",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :proc, 2 do |d|
d.push_local_depth 0, 0
d.push_local_depth 0, 1
d.meta_send_op_plus
end
end)
add_tests("proc_args_no", # TODO shouldn't 0 bitch if there are args?
"Compiler" => testcases['proc_args_0']['Compiler'])
add_tests("redo",
"Compiler" => bytecode do |g|
top = g.new_label
f = g.new_label
t = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push :false
g.gif f
g.goto top
g.goto t
f.set!
g.push :nil
t.set!
g.pop
g.goto top
bottom.set!
g.pop_modifiers
end)
add_tests("rescue",
"Compiler" => bytecode do |g|
in_rescue :StandardError do |good_side|
if good_side then
g.push :self
g.send :blah, 0, true
else
g.push :nil
end
end
end)
add_tests("rescue_block_body",
"Compiler" => bytecode do |g|
in_rescue :StandardError, :wtf do |good_side| # FIX: wtf param
if good_side then
g.push :self
g.send :a, 0, true
else
g.push_exception
g.set_local 0
g.push :self
g.send :c, 0, true
g.pop
g.push :self
g.send :d, 0, true
end
end
end)
add_tests("rescue_block_nada",
"Compiler" => bytecode do |g|
in_rescue :StandardError do |good_side|
if good_side then
g.push :self
g.send :blah, 0, true
else
g.push :nil
end
end
end)
add_tests("rescue_exceptions",
"Compiler" => bytecode do |g|
in_rescue :RuntimeError do |good_side|
if good_side then
g.push :self
g.send :blah, 0, true
else
g.push_exception
g.set_local 0
g.push :nil
end
end
end)
add_tests("retry",
"Compiler" => bytecode do |g| # TODO: maybe have a real example?
g.push :self
g.push_const :LocalJumpError
g.push_literal "retry used in invalid context"
g.send :raise, 2, true
end)
add_tests("return_0",
"Compiler" => bytecode do |g|
g.push :nil
g.ret
end)
add_tests("return_1",
"Compiler" => bytecode do |g|
g.push 1
g.ret
end)
add_tests("return_n",
"Compiler" => bytecode do |g|
g.push 1
g.push 2
g.push 3
g.make_array 3
g.ret
end)
add_tests("sclass",
"Compiler" => bytecode do |g|
g.push :self
g.dup
g.send :__verify_metaclass__, 0 # TODO: maybe refactor...
g.pop
g.open_metaclass
g.dup
g.push_literal_desc do |d2|
d2.push 42
d2.ret
end
g.swap
g.attach_method :__metaclass_init__
g.pop
g.send :__metaclass_init__, 0
end)
add_tests("sclass_trailing_class",
"Compiler" => bytecode do |g|
in_class :A do |d|
d.push :self
d.dup
d.send :__verify_metaclass__, 0 # TODO: maybe refactor...
d.pop
d.open_metaclass
d.dup
d.push_literal_desc do |d2|
d2.push :self
d2.send :a, 0, true
d2.ret
end
d.swap
d.attach_method :__metaclass_init__
d.pop
d.send :__metaclass_init__, 0
d.pop
d.push :nil
d.open_class :B
end
end)
add_tests("splat",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push :self
d.push_local 0
d.cast_array
d.push :nil
d.send_with_splat :a, 0, true, false
end
end)
add_tests("str",
"Compiler" => bytecode do |g|
g.push_literal "x"
g.string_dup
end)
add_tests("str_concat_newline",
"Compiler" => bytecode do |g|
g.push_literal "before after"
g.string_dup
end)
add_tests("str_concat_space",
"Compiler" => testcases["str_concat_newline"]["Compiler"])
add_tests("str_heredoc",
"Compiler" => bytecode do |g|
g.push_literal " blah\nblah\n"
g.string_dup
end)
add_tests("str_heredoc_call",
"Compiler" => bytecode do |g|
g.push_literal " blah\nblah\n"
g.string_dup
g.send :strip, 0, false
end)
add_tests("str_heredoc_double",
"Compiler" => bytecode do |g|
g.push_local 0
g.push_literal " first\n"
g.string_dup
g.push :self
g.send :b, 0, true
g.meta_send_op_plus
g.push_literal " second\n"
g.string_dup
g.meta_send_op_plus
g.meta_send_op_plus
g.set_local 0
end)
add_tests("str_heredoc_indent",
"Compiler" => bytecode do |g|
g.push_literal " blah\nblah\n\n"
g.string_dup
end)
add_tests("str_interp_file",
"Compiler" => bytecode do |g|
g.push_literal "file = (eval)\n"
g.string_dup
end)
add_tests("structure_extra_block_for_dvar_scoping",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.in_block_send :b, 2, 0, false do |d|
f = d.new_label
t = d.new_label
d.push :self
d.send :e, 0, true
d.push_local_depth 0, 0
d.send :f, 1, false
d.git f
d.push :false
d.set_local_depth 0, 2
d.pop
d.push_local_depth 0, 1
d.in_block_send :h, 2, 0, false, 0, true do |d2|
d2.push :true
d2.set_local_depth 1, 2
end
d.goto t
f.set!
d.push :nil
t.set!
end
end)
add_tests("structure_remove_begin_1",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
in_rescue :StandardError do |good_side|
if good_side then
g.push :self
g.send :b, 0, true
else
g.push :self
g.send :c, 0, true
end
end
g.send :<<, 1, false
end)
add_tests("structure_remove_begin_2",
"Compiler" => bytecode do |g|
bottom = g.new_label
f = g.new_label
g.push :self
g.send :c, 0, true
g.gif f
in_rescue :StandardError do |good_side|
if good_side then
g.push :self
g.send :b, 0, true
else
g.push :nil
end
end
g.goto bottom
f.set!
g.push :nil
bottom.set!
g.set_local 0
g.pop
g.push_local 0
end)
add_tests("structure_unused_literal_wwtt",
"Compiler" => bytecode do |g|
g.open_module :Graffle
end)
add_tests("super",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push 4
d.push :nil
d.push :nil
d.send_super :x, 1
end
end)
add_tests("super_block_pass",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.send :a, 0, true
g.push :nil
g.push :self
g.send :b, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_super nil, 1 # TODO: nil?
end)
add_tests("super_block_splat",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.push :self
g.send :b, 0, true
g.cast_array
g.push :nil
g.send_super nil, 1, true # TODO: nil?
end)
add_tests("super_multi",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push 4
d.push 2
d.push 1
d.push :nil
d.push :nil
d.send_super :x, 3
end
end)
add_tests("svalue",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.send :b, 0, true
g.cast_array
g.cast_array
g.dup
g.send :size, 0
g.push 1
g.swap
g.send :<, 1
g.git t
g.push 0
g.send :at, 1
t.set!
g.set_local 0
end)
add_tests("to_ary",
"Compiler" => bytecode do |g|
g.push :self
g.send :c, 0, true
g.cast_tuple
g.shift_tuple
g.set_local 0
g.pop
g.shift_tuple
g.set_local 1
g.pop
g.pop
g.push :true
end)
add_tests("true",
"Compiler" => bytecode do |g|
g.push :true
end)
add_tests("undef",
"Compiler" => bytecode do |g|
undef_bytecode :x
end)
add_tests("undef_2",
"Compiler" => bytecode do |g|
undef_bytecode :x, :y
end)
add_tests("undef_3",
"Compiler" => bytecode do |g|
undef_bytecode :x, :y, :z
end)
add_tests("undef_block_1",
"Compiler" => bytecode do |g|
g.push :self
g.send :f1, 0, true
g.pop
undef_bytecode :x
end)
add_tests("undef_block_2",
"Compiler" => bytecode do |g|
g.push :self
g.send :f1, 0, true
g.pop
undef_bytecode :x, :y
end)
add_tests("undef_block_3",
"Compiler" => bytecode do |g|
g.push :self
g.send :f1, 0, true
g.pop
undef_bytecode :x, :y, :z
end)
add_tests("undef_block_3_post",
"Compiler" => bytecode do |g|
undef_bytecode :x, :y, :z
g.pop
g.push :self
g.send :f2, 0, true
end)
add_tests("undef_block_wtf",
"Compiler" => bytecode do |g|
g.push :self
g.send :f1, 0, true
g.pop
undef_bytecode :x, :y, :z
g.pop
g.push :self
g.send :f2, 0, true
end)
add_tests("unless_post",
"Compiler" => bytecode do |g|
yep = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.git yep
g.push :self
g.send :a, 0, true
g.goto done
yep.set!
g.push :nil
done.set!
end)
add_tests("unless_post_not",
"Compiler" => bytecode do |g|
nope = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.gif nope
g.push :self
g.send :a, 0, true
g.goto done
nope.set!
g.push :nil
done.set!
end)
add_tests("unless_pre",
"Compiler" => bytecode do |g|
yep = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.git yep
g.push :self
g.send :a, 0, true
g.goto done
yep.set!
g.push :nil
done.set!
end)
add_tests("unless_pre_not",
"Compiler" => bytecode do |g|
nope = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.gif nope
g.push :self
g.send :a, 0, true
g.goto done
nope.set!
g.push :nil
done.set!
end)
add_tests("until_post",
"Compiler" => bytecode do |g|
top = g.new_label
nxt = g.new_label
brek = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
nxt.set!
g.push :false
g.git bottom
g.goto top
bottom.set!
g.push :nil
brek.set!
g.pop_modifiers
end)
add_tests("until_post_not",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
dunno1.set!
g.push :true
g.gif bottom
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("until_pre",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push :false
g.git bottom
dunno1.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("until_pre_mod",
"Compiler" => testcases["until_pre"]["Compiler"])
add_tests("until_pre_not",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push :true
g.gif bottom
dunno1.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("until_pre_not_mod",
"Compiler" => testcases["until_pre_not"]["Compiler"])
add_tests("valias",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Globals
g.push_literal :$x
g.push_literal :$y
g.send :add_alias, 2
end)
add_tests("vcall",
"Compiler" => bytecode do |g|
g.push :self
g.send :method, 0, true
end)
add_tests("while_post",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
dunno1.set!
g.push :false
g.gif bottom
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("while_post2",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push 1
g.push 2
g.meta_send_op_plus
g.pop
g.push 3
g.push 4
g.meta_send_op_plus
g.pop
dunno1.set!
g.push :false
g.gif bottom
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("while_post_not",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
dunno1.set!
g.push :true
g.git bottom
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("while_pre",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push :false
g.gif bottom
dunno1.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("while_pre_mod",
"Compiler" => testcases["while_pre"]["Compiler"])
add_tests("while_pre_nil",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push :false
g.gif bottom
dunno1.set!
g.push :nil
g.pop
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("while_pre_not",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push :true
g.git bottom
dunno1.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("while_pre_not_mod",
"Compiler" => testcases["while_pre_not"]["Compiler"])
add_tests("xstr",
"Compiler" => bytecode do |g|
g.push :self
g.push_literal "touch 5"
g.string_dup
g.send :"`", 1, true
end)
add_tests("yield_0",
"Compiler" => bytecode do |g|
g.push_block
g.meta_send_call 0
end)
add_tests("yield_1",
"Compiler" => bytecode do |g|
g.push_block
g.push 42
g.meta_send_call 1
end)
add_tests("yield_n",
"Compiler" => bytecode do |g|
g.push_block
g.push 42
g.push 24
g.make_array 2
g.meta_send_call 1
end)
add_tests("zarray",
"Compiler" => bytecode do |g|
g.make_array 0
g.set_local 0
end)
add_tests("zsuper",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push :nil
d.push :nil
d.send_super :x, 0
end
end)
end
stupid conflicts, pt 2
$: << 'lib'
require 'pt_testcase'
class CompilerTestCase < ParseTreeTestCase
def self.bytecode &block
@tg = TestGenerator.new
@tg.instance_eval(&block)
@tg
end
add_tests("alias",
"Compiler" => bytecode do |g|
in_class :X do |d|
d.push_context
d.push_literal :y
d.push_literal :x
d.send :alias_method, 2, true
end
end)
add_tests("alias_ugh",
"Compiler" => testcases["alias"]["Compiler"])
add_tests("and",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.dup
lhs_true = g.new_label
g.gif lhs_true
g.pop
g.push :self
g.send :b, 0, true
lhs_true.set!
end)
add_tests("argscat_inside",
"Compiler" => bytecode do |g|
g.push :self
g.send :b, 0, true
g.make_array 1
g.push :self
g.send :c, 0, true
g.cast_array
g.send :+, 1
g.set_local 0
end)
add_tests("argscat_svalue",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.send :b, 0, true
g.push :self
g.send :c, 0, true
g.make_array 2
g.push :self
g.send :d, 0, true
g.cast_array
g.send :+, 1
g.cast_array
g.dup
g.send :size, 0
g.push 1
g.swap
g.send :<, 1 # TODO: or empty?
g.git t
g.push 0
g.send :at, 1
t.set!
g.set_local 0
end)
add_tests("argspush",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.push :self
g.send :b, 0, true
g.cast_array
g.push :self
g.send :c, 0, true
g.swap
g.push :nil
g.send_with_splat :[]=, 1, false, true
end)
add_tests("array",
"Compiler" => bytecode do |g|
g.push 1
g.push_unique_literal :b
g.push_literal "c"
g.string_dup
g.make_array 3
end)
add_tests("array_pct_W",
"Compiler" => bytecode do |g|
g.push_literal "a"
g.string_dup
g.push_literal "b"
g.string_dup
g.push_literal "c"
g.string_dup
g.make_array 3
end)
add_tests("array_pct_W_dstr",
"Compiler" => bytecode do |g|
g.push_literal "a"
g.string_dup
g.push_ivar :@b
g.send :to_s, 0, true
g.push_literal ""
g.string_dup
g.string_append
g.push_literal "c"
g.string_dup
g.make_array 3
end)
add_tests("attrasgn",
"Compiler" => bytecode do |g|
g.push 0
g.set_local 0
g.pop
g.push 42
g.push_local 0
g.send :method=, 1, false
end)
add_tests("attrasgn_index_equals",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.push 42
g.push 24
g.send :[]=, 2, false
end)
add_tests("attrasgn_index_equals_space",
"Compiler" => bytecode do |g|
g.make_array 0
g.set_local 0
g.pop
g.push_local 0
g.push 42
g.push 24
g.send :[]=, 2, false
end)
add_tests("back_ref",
"Compiler" => bytecode do |g|
g.push_context
g.push_literal :"&"
g.send :back_ref, 1
g.push_context
g.push_literal :"`"
g.send :back_ref, 1
g.push_context
g.push_literal :"'"
g.send :back_ref, 1
g.push_context
g.push_literal :"+"
g.send :back_ref, 1
g.make_array 4
end)
add_tests("begin",
"Compiler" => bytecode do |g|
g.push 1
g.push 1
g.meta_send_op_plus
end)
add_tests("begin_def",
"Compiler" => bytecode do |g|
in_method :m do |d|
d.push :nil
end
end)
add_tests("begin_rescue_ensure",
"Compiler" => bytecode do |g|
top = g.new_label
dunno = g.new_label
bottom = g.new_label
top.set!
g.push_modifiers
g.push :nil
g.pop_modifiers
g.goto bottom
dunno.set!
g.push :nil
g.pop
g.push_exception
g.raise_exc
bottom.set!
g.push :nil
g.pop
end)
add_tests("begin_rescue_twice",
"Compiler" => bytecode do |g|
g.push_modifiers
g.push :nil
g.pop_modifiers
g.pop
g.push_modifiers
g.push :nil
g.pop_modifiers
end)
add_tests("begin_rescue_twice_mri_verbose_flag",
"Compiler" => testcases['begin_rescue_twice']['Compiler'])
add_tests("block_attrasgn",
"Compiler" => bytecode do |g|
g.push :self
g.in_method :setup, true do |d|
d.push :self
d.send :allocate, 0, true
d.set_local 1
d.pop
d.push_local 1
d.push_local 0
d.send :context=, 1, false
d.pop
d.push_local 1
d.ret # TODO: why extra return?
end
end)
add_tests("block_lasgn",
"Compiler" => bytecode do |g|
g.push 1
g.set_local 1
g.pop
g.push_local 1
g.push 2
g.meta_send_op_plus
g.set_local 0
end)
add_tests("block_mystery_block",
"Compiler" => bytecode do |g|
g.push :self
g.push :self
g.send :b, 0, true
in_block_send :a, 0, 1 do |d|
f = d.new_label
bottom = d.new_label
d.push :self
d.send :b, 0, true
d.gif f
d.push :true
d.goto bottom
f.set!
d.push :false
d.set_local_depth 0, 0
d.pop
d.push :self
d.in_block_send :d, 1, 0, true, 0, true do |d2|
d2.push :true
d2.set_local_depth 1, 0
end
d.pop
d.push_local_depth 0, 0
bottom.set!
end
end)
add_tests("block_pass_args_and_splat",
"Compiler" => bytecode do |g|
in_method :blah do |d|
no_proc = d.new_label
no_splat = d.new_label
d.push_block
d.dup
d.is_nil
d.git no_proc
d.push_const :Proc
d.swap
d.send :__from_block__, 1
no_proc.set!
d.set_local 1
d.pop
d.push :self
d.push 42 # only line different from block_pass_splat
d.push_local 0
d.cast_array
d.push_local 1
d.dup
d.is_nil
d.git no_splat
d.push_cpath_top
d.find_const :Proc
d.swap
d.send :__from_block__, 1
no_splat.set!
d.send_with_splat :other, 1, true, false # ok, and this one
end
end)
add_tests("block_pass_call_0",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.send :a, 0, true
g.push :self
g.send :c, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :b, 0, false
end)
add_tests("block_pass_call_1",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.send :a, 0, true
g.push 4
g.push :self
g.send :c, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :b, 1, false
end)
add_tests("block_pass_call_n",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.send :a, 0, true
g.push 1
g.push 2
g.push 3
g.push :self
g.send :c, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :b, 3, false
end)
add_tests("block_pass_fcall_0",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.push :self
g.send :b, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :a, 0, true
end)
add_tests("block_pass_fcall_1",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.push 4
g.push :self
g.send :b, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :a, 1, true
end)
add_tests("block_pass_fcall_n",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.push 1
g.push 2
g.push 3
g.push :self
g.send :b, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :a, 3, true
end)
add_tests("block_pass_omgwtf",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.push_unique_literal :x
g.push_unique_literal :sequence_name
g.push_const :Proc
in_block_send :new, -1, 0, false do |d|
d.push :nil
end
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :define_attr_method, 2, true
end)
add_tests("block_pass_splat",
"Compiler" => bytecode do |g|
g.in_method :blah do |d|
no_proc = d.new_label
no_splat = d.new_label
d.push_block
d.dup
d.is_nil
d.git no_proc
d.push_const :Proc
d.swap
d.send :__from_block__, 1
no_proc.set!
d.set_local 1
d.pop
d.push :self
d.push_local 0
d.cast_array
d.push_local 1
d.dup
d.is_nil
d.git no_splat
d.push_cpath_top
d.find_const :Proc # FIX: why push_cpath/find vs push_const ?
d.swap
d.send :__from_block__, 1
no_splat.set!
d.send_with_splat :other, 0, true, false
end
end)
add_tests("block_pass_super",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :nil
g.push :self
g.send :prc, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_super nil, 0
end)
add_tests("block_pass_thingy",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.send :r, 0, true
g.push :self
g.send :dest, 0, true
g.push :self
g.send :block, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_with_block :read_body, 1, false
end)
add_tests("block_stmt_after",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.in_rescue(:StandardError) do |good_side|
if good_side then
d.push :self
d.send :b, 0, true
else
d.push :self
d.send :c, 0, true
end
end
d.pop
d.push :self
d.send :d, 0, true
end
end)
add_tests("block_stmt_after_mri_verbose_flag",
"Compiler" => testcases['block_stmt_after']['Compiler'])
add_tests("block_stmt_before",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.push :self
d.send :a, 0, true
d.pop
d.in_rescue(:StandardError) do |good_side|
if good_side then
d.push :self
d.send :b, 0, true
else
d.push :self
d.send :c, 0, true
end
end
end
end)
add_tests("block_stmt_before_mri_verbose_flag",
"Compiler" => testcases['block_stmt_before']['Compiler'])
add_tests("block_stmt_both",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.push :self
d.send :a, 0, true
d.pop
d.in_rescue(:StandardError) do |good_side|
if good_side then
d.push :self
d.send :b, 0, true
else
d.push :self
d.send :c, 0, true
end
end
d.pop
d.push :self
d.send :d, 0, true
end
end)
add_tests("block_stmt_both_mri_verbose_flag",
"Compiler" => testcases['block_stmt_both']['Compiler'])
add_tests("break",
"Compiler" => bytecode do |g|
break_value = :nil # TODO: refactor later
top = g.new_label
cond = g.new_label
rtry = g.new_label
brk = g.new_label
g.push_modifiers
top.set!
g.push :true
g.gif cond
g.push break_value
g.goto brk
g.goto rtry # TODO: only used when there is a retry statement
cond.set!
g.push :nil
rtry.set!
g.pop
g.goto top
brk.set!
g.pop_modifiers
end)
# "Ruby" => "loop { break 42 if true }",
add_tests("break_arg",
"Compiler" => bytecode do |g|
break_value = 42
top = g.new_label
cond = g.new_label
rtry = g.new_label
brk = g.new_label
g.push_modifiers
top.set!
g.push :true
g.gif cond
g.push break_value
g.goto brk
g.goto rtry # TODO: only used when there is a retry statement
cond.set!
g.push :nil
rtry.set!
g.pop
g.goto top
brk.set!
g.pop_modifiers
end)
add_tests("call",
"Compiler" => bytecode do |g|
g.push :self
g.send :method, 0, false
end)
add_tests("call_arglist",
"Compiler" => bytecode do |g|
g.push :self
g.send :o, 0, true
g.push 42
g.send :puts, 1, false
end)
add_tests("call_arglist_hash",
"Compiler" => bytecode do |g|
g.push :self
g.send :o, 0, true
g.push_cpath_top
g.find_const :Hash
g.push_unique_literal :a
g.push 1
g.push_unique_literal :b
g.push 2
g.send :[], 4
g.send :m, 1, false
end)
add_tests("call_arglist_norm_hash",
"Compiler" => bytecode do |g|
g.push :self
g.send :o, 0, true
g.push 42
g.push_cpath_top
g.find_const :Hash
g.push_unique_literal :a
g.push 1
g.push_unique_literal :b
g.push 2
g.send :[], 4
g.send :m, 2, false
end)
add_tests("call_arglist_norm_hash_splat",
"Compiler" => bytecode do |g|
g.push :self
g.send :o, 0, true
g.push 42
g.push_cpath_top
g.find_const :Hash
g.push_unique_literal :a
g.push 1
g.push_unique_literal :b
g.push 2
g.send :[], 4
g.push :self
g.send :c, 0, true
g.cast_array
g.push :nil
g.send_with_splat :m, 2, false, false
end)
add_tests("call_arglist_space",
"Compiler" => bytecode do |g|
g.push :self
g.push 1
g.push 2
g.push 3
g.send :a, 3, true
end)
add_tests("call_command",
"Compiler" => bytecode do |g|
g.push 1
g.push :self
g.send :c, 0, true
g.send :b, 1, false
end)
add_tests("call_expr",
"Compiler" => bytecode do |g|
g.push 1
g.push 1
g.meta_send_op_plus
g.set_local 0
g.send :zero?, 0, false
end)
add_tests("call_index",
"Compiler" => bytecode do |g|
g.make_array 0
g.set_local 0
g.pop
g.push_local 0
g.push 42
g.send :[], 1, false
end)
add_tests("call_index_no_args",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.send :[], 0, false
end)
add_tests("call_index_space",
"Compiler" => testcases["call_index"]["Compiler"])
add_tests("call_unary_neg",
"Compiler" => bytecode do |g|
g.push 2
g.push 31
g.send :**, 1, false
g.send :-@, 0, false
end)
add_tests("case",
"Compiler" => bytecode do |g|
a1 = g.new_label
a2 = g.new_label
a3 = g.new_label
a4 = g.new_label
a_bottom = g.new_label
g.push 2
g.set_local 0
g.pop
g.push_literal ""
g.string_dup
g.set_local 1
g.pop
g.push_local 0
g.dup
g.push 1
g.swap
g.send :===, 1
g.gif a1
g.pop
g.push :self
g.push_literal "something"
g.string_dup
g.send :puts, 1, true
g.pop
g.push_literal "red"
g.string_dup
g.set_local 1
g.goto a_bottom
a1.set!
g.dup
g.push 2
g.swap
g.send :===, 1
g.git a2
g.dup
g.push 3
g.swap
g.send :===, 1
g.git a2
g.goto a3
a2.set!
g.pop
g.push_literal "yellow"
g.string_dup
g.set_local 1
g.goto a_bottom
a3.set!
g.dup
g.push 4
g.swap
g.send :===, 1
g.gif a4
g.pop
g.push :nil
g.goto a_bottom
a4.set!
g.pop
g.push_literal "green"
g.string_dup
g.set_local 1
a_bottom.set!
b1 = g.new_label
b2 = g.new_label
b3 = g.new_label
b_bottom = g.new_label
g.pop
g.push_local 1
g.dup
g.push_literal "red"
g.string_dup
g.swap
g.send :===, 1
g.gif b1
g.pop
g.push 1
g.set_local 0
g.goto b_bottom
b1.set!
g.dup
g.push_literal "yellow"
g.string_dup
g.swap
g.send :===, 1
g.gif b2
g.pop
g.push 2
g.set_local 0
g.goto b_bottom
b2.set!
g.dup
g.push_literal "green"
g.string_dup
g.swap
g.send :===, 1
g.gif b3
g.pop
g.push 3
g.set_local 0
g.goto b_bottom
b3.set!
g.pop
g.push :nil
b_bottom.set!
end)
add_tests("case_nested",
"Compiler" => bytecode do |g|
# case => a
# when
# case => b
# when
# case => c
# end
########################################
a2 = g.new_label
a3 = g.new_label
a_bottom = g.new_label
g.push 1
g.set_local 0
g.pop
g.push 2
g.set_local 1
g.pop
g.push :nil
g.set_local 2
g.pop
########################################
b2 = g.new_label
b3 = g.new_label
b_bottom = g.new_label
g.push_local 0
g.dup
g.push 1
g.swap
g.send :===, 1
g.gif a2
g.pop
g.push_local 1
g.dup
g.push 1
g.swap
g.send :===, 1
g.gif b2
g.pop
g.push 1
g.set_local 2
g.goto b_bottom
b2.set!
g.dup
g.push 2
g.swap
g.send :===, 1
g.gif b3
g.pop
g.push 2
g.set_local 2
g.goto b_bottom
b3.set!
g.pop
g.push 3
g.set_local 2
b_bottom.set!
g.goto a_bottom
a2.set!
g.dup
g.push 2
g.swap
g.send :===, 1
g.gif a3
########################################
c2 = g.new_label
c3 = g.new_label
c_bottom = g.new_label
g.pop
g.push_local 1
g.dup
g.push 1
g.swap
g.send :===, 1
g.gif c2
g.pop
g.push 4
g.set_local 2
g.goto c_bottom
c2.set!
g.dup
g.push 2
g.swap
g.send :===, 1
g.gif c3
g.pop
g.push 5
g.set_local 2
g.goto c_bottom
c3.set!
g.pop
g.push 6
g.set_local 2
c_bottom.set!
g.goto a_bottom
a3.set!
g.pop
g.push 7
g.set_local 2
a_bottom.set!
end)
add_tests("case_nested_inner_no_expr",
"Compiler" => bytecode do |g|
c2, bottom = g.new_label, g.new_label
i1, i2, ibottom = g.new_label, g.new_label, g.new_label
g.push :self
g.send :a, 0, true
g.dup
g.push :self
g.send :b, 0, true
g.swap
g.send :===, 1
g.gif c2
g.pop
g.push :self
g.send :d, 0, true
g.dup
g.gif i1 # TODO: lamest jump ever - should be ibottom
g.pop
g.push :self
g.send :e, 0, true
i1.set!
g.gif i2
g.push :self
g.send :f, 0, true
g.goto ibottom
i2.set!
g.push :nil
ibottom.set!
g.goto bottom
c2.set!
g.pop
g.push :nil
bottom.set!
end)
add_tests("case_no_expr",
"Compiler" => bytecode do |g|
c2, c3, bottom = g.new_label, g.new_label, g.new_label
g.push :self
g.send :a, 0, true
g.push 1
g.meta_send_op_equal
g.gif c2
g.push_unique_literal :a
g.goto bottom
c2.set!
g.push :self
g.send :a, 0, true
g.push 2
g.meta_send_op_equal
g.gif c3
g.push_unique_literal :b
g.goto bottom
c3.set!
g.push_unique_literal :c
bottom.set!
end)
add_tests("case_splat",
"Compiler" => bytecode do |g|
c1, c2, bottom = g.new_label, g.new_label, g.new_label
g.push :self
g.send :a, 0, true
g.dup
g.push_unique_literal :b
g.swap
g.send :===, 1
g.git c1
g.dup
g.push :self
g.send :c, 0, true
g.cast_array
g.swap
g.send :__matches_when__, 1
g.git c1
g.goto c2
c1.set!
g.pop
g.push :self
g.send :d, 0, true
g.goto bottom
c2.set!
g.pop
g.push :self
g.send :e, 0, true
bottom.set!
end)
add_tests("cdecl",
"Compiler" => bytecode do |g|
g.push_context
g.push_literal :X
g.push 42
g.send :__const_set__, 2
end)
add_tests("class_plain",
"Compiler" => bytecode do |g|
in_class :X do |d|
d.push :self
d.push 1
d.push 1
d.meta_send_op_plus
d.send :puts, 1, true
d.pop
d.in_method :blah do |d2|
d2.push :self
d2.push_literal "hello"
d2.string_dup
d2.send :puts, 1, true
end
end
end)
add_tests("class_scoped",
"Compiler" => bytecode do |g|
in_class "X::Y" do |d|
d.push :self
d.send :c, 0, true
end
end)
add_tests("class_scoped3",
"Compiler" => bytecode do |g|
in_class :Y do |d|
d.push :self
d.send :c, 0, true
end
end)
add_tests("class_super_array",
"Compiler" => bytecode do |g|
g.push_const :Array
g.open_class :X
end)
add_tests("class_super_expr",
"Compiler" => bytecode do |g|
g.push :self
g.send :expr, 0, true
g.open_class :X
end)
add_tests("class_super_object",
"Compiler" => bytecode do |g|
g.push_const :Object
g.open_class :X
end)
add_tests("colon2",
"Compiler" => bytecode do |g|
g.push_const :X
g.find_const :Y
end)
add_tests("colon3",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :X
end)
add_tests("const",
"Compiler" => bytecode do |g|
g.push_const :X
end)
add_tests("constX",
"Compiler" => bytecode do |g|
g.push_context
g.push_literal :X
g.push 1
g.send :__const_set__, 2
end)
add_tests("constY",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.push_literal :X
g.push 1
g.send :__const_set__, 2
end)
add_tests("constZ",
"Compiler" => bytecode do |g|
g.push_const :X
g.push_literal :Y
g.push 1
g.send :__const_set__, 2
end)
add_tests("cvar",
"Compiler" => bytecode do |g|
g.push_context
g.push_literal :@@x
g.send :class_variable_get, 1
end)
add_tests("cvasgn",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push_context
d.push_literal :@@blah
d.push 1
d.send :class_variable_set, 2
end
end)
add_tests("cvasgn_cls_method",
"Compiler" => bytecode do |g|
g.push :self
in_method :quiet_mode=, :singleton do |d|
d.push_context
d.push_literal :@@quiet_mode
d.push_local 0
d.send :class_variable_set, 2
end
end)
add_tests("cvdecl",
"Compiler" => bytecode do |g|
in_class :X do |d|
d.push :self
d.push_literal :@@blah
d.push 1
d.send :class_variable_set, 2
end
end)
add_tests("dasgn_0",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.in_block_send :each, 1, 0, false, 0, false do |d|
t = d.new_label
f = d.new_label
d.push :true
d.gif f
d.push :self
d.send :b, 0, true
d.in_block_send :each, 1, 0, false, 0, true do |d2|
d2.push_local_depth 1, 0
d2.push 1
d2.meta_send_op_plus
d2.set_local_depth 1, 0
end
d.goto t
f.set!
d.push :nil
t.set!
end
end)
add_tests("dasgn_1",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.in_block_send :each, 1, 0, false, 0, false do |d|
t = d.new_label
f = d.new_label
d.push :true
d.gif f
d.push :self
d.send :b, 0, true
d.in_block_send :each, 1, 0, false, 0, true do |d2|
d2.push_local_depth 0, 1
d2.push 1
d2.meta_send_op_plus
d2.set_local_depth 0, 1
end
d.goto t
f.set!
d.push :nil
t.set!
end
end)
add_tests("dasgn_2",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.in_block_send :each, 1, 0, false, 0, false do |d|
t = d.new_label
f = d.new_label
d.push :true
d.gif f
d.push 0
d.set_local_depth 0, 1
d.pop
d.push :self
d.send :b, 0, true
d.in_block_send :each, 1, 0, false, 0, true do |d2|
d2.push_local_depth 1, 1
d2.push 1
d2.meta_send_op_plus
d2.set_local_depth 1, 1
end
d.goto t
f.set!
d.push :nil
t.set!
end
end)
add_tests("dasgn_curr",
"Compiler" => bytecode do |g|
g.push :self
g.send :data, 0, true
g.in_block_send :each, 2, 0, false do |d|
d.push 1
d.set_local_depth 0, 2
d.pop
d.push_local_depth 0, 2
d.set_local_depth 0, 3
d.pop
d.push_local_depth 0, 0
d.set_local_depth 0, 2
d.set_local_depth 0, 3
end
end)
add_tests("dasgn_icky",
"Compiler" => bytecode do |g|
g.push :self
g.in_block_send :a do |d|
d.push :nil
d.set_local_depth 0, 0
d.pop
d.push :self
d.push :self
d.send :full_message, 0, true
d.in_block_send :assert_block, 0, 1, true, 0, true do |d2|
d2.in_rescue :Exception do |good_side|
if good_side then
d2.push_block
d2.meta_send_call 0
else
d2.push_exception
d2.set_local_depth 1, 0
d2.push :nil
d2.push_cpath_top
d2.find_const :LongReturnException
d2.send :allocate, 0
d2.swap
d2.push_local 0
d2.swap
d2.send :set_break_value, 2
d2.raise_exc
end
end
end
end
end)
add_tests("dasgn_mixed",
"Compiler" => bytecode do |g|
g.push 0
g.set_local 0
g.pop
g.push :self
g.send :ns, 0, true
in_block_send :each, 1, 0, false, 1 do |d|
d.push_local 0
d.push_local_depth 0, 0
d.meta_send_op_plus
d.set_local 0
end
end)
add_tests("defined",
"Compiler" => bytecode do |g|
t = g.new_label
f = g.new_label
g.push_const :Globals
g.push_literal :$x
g.send :key?, 1
g.git t
g.push :nil
g.goto f
t.set!
g.push_literal "global-variable"
f.set!
end)
add_tests("defn_args_block",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.block_arg 0
d.push :nil
end
end)
add_tests("defn_args_mand",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.push :nil
end
end)
add_tests("defn_args_mand_block",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.block_arg 1
d.push :nil
end
end)
add_tests("defn_args_mand_opt",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 1
d.push :nil
end
end)
add_tests("defn_args_mand_opt_block",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 1
d.block_arg 2
d.push :nil
end
end)
add_tests("defn_args_mand_opt_splat",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 1
d.push :nil
end
end)
add_tests("defn_args_mand_opt_splat_block",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 1
d.block_arg 3
d.push :nil
end
end)
add_tests("defn_args_mand_opt_splat_no_name",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.optional_arg 1
d.push :nil
end
end)
add_tests("defn_args_mand_splat",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.push :nil
end
end)
add_tests("defn_args_mand_splat_block",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.block_arg 2
d.push :nil
end
end)
add_tests("defn_args_mand_splat_no_name",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push :self
d.push_local 0
d.push_local 1
d.send :p, 2, true
end
end)
add_tests("defn_args_none",
"Compiler" => bytecode do |g|
in_method :empty do |d|
d.push :nil
end
end)
add_tests("defn_args_opt",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 0
d.push :nil
end
end)
add_tests("defn_args_opt_block",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 0
d.block_arg 1
d.push :nil
end
end)
add_tests("defn_args_opt_splat",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 0
d.push :nil
end
end)
add_tests("defn_args_opt_splat_block",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.optional_arg 0
d.block_arg 2
d.push :nil
end
end)
add_tests("defn_args_opt_splat_no_name",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.optional_arg 0
d.push :nil
end
end)
add_tests("defn_args_splat",
"Compiler" => bytecode do |g|
in_method :f do |d|
d.push :nil
end
end)
add_tests("defn_args_splat_no_name",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push :nil
end
end)
add_tests("defn_or",
"Compiler" => bytecode do |g|
in_method :"|" do |d|
d.push :nil
end
end)
add_tests("defn_rescue",
"Compiler" => bytecode do |g|
in_method :eql? do |d|
d.in_rescue :StandardError do |good_side|
if good_side then
d.push :self
d.send :uuid, 0, false
d.push_local 0
d.send :uuid, 0, false
d.meta_send_op_equal
else
d.push :false
end
end
end
end)
add_tests("defn_rescue_mri_verbose_flag",
"Compiler" => testcases["defn_rescue"]["Compiler"])
add_tests("defn_something_eh",
"Compiler" => bytecode do |g|
in_method :something? do |d|
d.push :nil
end
end)
add_tests("defn_splat_no_name",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push :self
d.push_local 0
d.send :p, 1, true
end
end)
add_tests("defn_zarray",
"Compiler" => bytecode do |g|
in_method :zarray do |d|
d.make_array 0
d.set_local 0
d.pop
d.push_local 0
# TODO we emit a ret instruction even though the last statement
# is itself a return, so we get to return instructions, one
# after another. We could instead detect that an only output
# the one.
d.ret
end
end)
add_tests("defs",
"Compiler" => bytecode do |g|
g.push :self
in_method :x, true do |d|
d.push_local 0
d.push 1
d.meta_send_op_plus
end
end)
add_tests("defs_empty",
"Compiler" => bytecode do |g|
g.push :self
in_method :empty, true do |d|
d.push :nil
end
end)
add_tests("defs_empty_args",
"Compiler" => bytecode do |g|
g.push :self
in_method :empty, true do |d|
d.push :nil
end
end)
add_tests("dot2",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Range
g.push :self
g.send :a, 0, true
g.push :self
g.send :b, 0, true
g.send :new, 2
end)
add_tests("dot3",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Range
g.push :self
g.send :a, 0, true
g.push :self
g.send :b, 0, true
g.push :true
g.send :new, 3
end)
add_tests("dregx",
"Compiler" => bytecode do |g|
g.push_const :Regexp
g.push_literal "y" # 1
g.string_dup
g.push 1 # 2
g.push 1
g.meta_send_op_plus
g.send :to_s, 0, true
g.push_literal "x" # 3
g.string_dup
2.times do
g.string_append
end
g.push 0
g.send :new, 2
end)
add_tests("dregx_interp",
"Compiler" => bytecode do |g|
g.push_const :Regexp
g.push_ivar :@rakefile
g.send :to_s, 0, true
g.push_literal ""
g.string_dup
g.string_append
g.push 0
g.send :new, 2
end)
add_tests("dregx_n",
"Compiler" => bytecode do |g|
g.push_const :Regexp
g.push 1
g.send :to_s, 0, true
g.push_literal ""
g.string_dup
g.string_append
g.push 16
g.send :new, 2
end)
add_tests("dregx_once",
"Compiler" => bytecode do |g|
memoize do
g.push_const :Regexp
g.push_literal "y" # 1
g.string_dup
g.push 1 # 2
g.push 1
g.meta_send_op_plus
g.send :to_s, 0, true
g.push_literal "x" # 3
g.string_dup
2.times do
g.string_append
end
g.push 0
g.send :new, 2
end
end)
add_tests("dregx_once_n_interp",
"Compiler" => bytecode do |g|
memoize do
g.push_const :Regexp
g.push_const :SB # 1
g.send :to_s, 0, true
g.push_const :IAC # 2
g.send :to_s, 0, true
g.push_literal "" # 3
g.string_dup
2.times do
g.string_append
end
g.push 16
g.send :new, 2
end
end)
add_tests("dstr",
"Compiler" => bytecode do |g|
g.push 1
g.set_local 0
g.pop
g.push_literal "y" # 1
g.string_dup
g.push_local 0 # 2
g.send :to_s, 0, true
g.push_literal "x" # 3
g.string_dup
2.times do
g.string_append
end
end)
add_tests("dstr_2",
"Compiler" => bytecode do |g|
g.push 1
g.set_local 0
g.pop
g.push_literal "y" # 1
g.string_dup
g.push_literal "%.2f" # 2
g.string_dup
g.push 3.14159
g.send :%, 1, false
g.send :to_s, 0, true
g.push_literal "x" # 3
g.string_dup
2.times do
g.string_append
end
end)
add_tests("dstr_3",
"Compiler" => bytecode do |g|
g.push 2
g.set_local 0
g.pop
g.push 1
g.set_local 1
g.pop
g.push_literal "y" # - # 1
g.string_dup
g.push_literal "f" # 1
g.string_dup
g.push_local 0 # 2
g.send :to_s, 0, true
g.push_literal "%." # 3
g.string_dup
2.times do
g.string_append
end
g.push 3.14159 # - # 2
g.send :%, 1, false
g.send :to_s, 0, true
g.push_literal "x" # - # 3
g.string_dup
2.times do
g.string_append
end
end)
add_tests("dstr_concat",
"Compiler" => bytecode do |g|
g.push 66 # 1
g.send :to_s, 0, true
g.push_literal "55" # 2
g.string_dup
g.push 44 # 3
g.send :to_s, 0, true
g.push_literal "cd" # 4
g.string_dup
g.push_literal "aa" # 5
g.string_dup
g.push 22 # 6
g.send :to_s, 0, true
g.push_literal "" # 7
g.string_dup
6.times do
g.string_append
end
end)
add_tests("dstr_gross",
"Compiler" => bytecode do |g|
g.push_literal " d" # 1
g.string_dup
g.push_context # 2
g.push_literal :@@cvar
g.send :class_variable_get, 1
g.send :to_s, 0, true
g.push_literal " c " # 3
g.string_dup
g.push_ivar :@ivar # 4
g.send :to_s, 0, true
g.push_literal " b " # 5
g.string_dup
g.push_cpath_top # 6
g.find_const :Globals
g.push_literal :$global
g.send :[], 1
g.send :to_s, 0, true
g.push_literal "a " # 7
g.string_dup
6.times do
g.string_append
end
end)
add_tests("dstr_heredoc_expand",
"Compiler" => bytecode do |g|
g.push_literal "blah\n" # 1
g.string_dup
g.push 1 # 2
g.push 1
g.meta_send_op_plus
g.send :to_s, 0, true
g.push_literal " blah\n" # 3
g.string_dup
2.times do
g.string_append
end
end)
add_tests("dstr_heredoc_windoze_sucks",
"Compiler" => bytecode do |g|
g.push_literal "_valid_feed\n" # 1
g.string_dup
g.push :self # 2
g.send :action, 0, true
g.send :to_s, 0, true
g.push_literal "def test_" # 3
g.string_dup
2.times do
g.string_append
end
end)
add_tests("dstr_heredoc_yet_again",
"Compiler" => bytecode do |g|
g.push_literal "\n" # 1
g.string_dup
g.push_literal "(eval)" # 2
g.string_dup
g.push_literal "' s2\n" # 3
g.string_dup
g.push_const :RUBY_PLATFORM # 4
g.send :to_s, 0, true
g.push_literal "s1 '" # 5
g.string_dup
4.times do
g.string_append
end
end)
add_tests("dstr_nest",
"Compiler" => bytecode do |g|
g.push_literal "] after" # 1
g.string_dup
g.push :self # 2
g.send :nest, 0, true
g.send :to_s, 0, true
g.push_literal "before [" # 3
g.string_dup
2.times do
g.string_append
end
end)
add_tests("dstr_str_lit_start",
"Compiler" => bytecode do |g|
g.push_literal ")" # 1
g.string_dup
g.push_exception # 2
g.send :class, 0, false
g.send :to_s, 0, true
g.push_literal " (" # 3
g.string_dup
g.push_exception # 4
g.send :message, 0, false
g.send :to_s, 0, true
g.push_literal ": warning: " # 5
g.string_dup
g.push 1 # 6
g.send :to_s, 0, true
g.push_literal "blah(eval):" # 7
g.string_dup
6.times do
g.string_append
end
end)
add_tests("dstr_the_revenge",
"Compiler" => bytecode do |g|
g.push_literal ")" # 1
g.string_dup
g.push 1 # 2
g.send :to_s, 0, true
g.push_literal ":" # 3
g.string_dup
g.push_literal "(eval)" # 4
g.string_dup
g.push_literal " (" # 5
g.string_dup
g.push :self # 6
g.send :to, 0, true
g.send :to_s, 0, true
g.push_literal " middle " # 7
g.string_dup
g.push :self # 8
g.send :from, 0, true
g.send :to_s, 0, true
g.push_literal "before " # 9
g.string_dup
8.times do
g.string_append
end
end)
add_tests("dsym",
"Compiler" => bytecode do |g|
g.push_literal "y"
g.string_dup
g.push 1
g.push 1
g.meta_send_op_plus
g.send :to_s, 0, true
g.push_literal "x"
g.string_dup
g.string_append
g.string_append
g.send :to_sym, 0, true
end)
add_tests("dxstr",
"Compiler" => bytecode do |g|
g.push 5
g.set_local 0
g.pop
g.push :self
g.push_local 0
g.send :to_s, 0, true
g.push_literal "touch "
g.string_dup
g.string_append
g.send :"`", 1, true
end)
# TODO: OMFG!
add_tests("ensure",
"Compiler" => bytecode do |g|
# TODO: refactor in_rescue to work with this... I think I have the
# pattern down now
top = g.new_label
bottom = g.new_label
label_1 = g.new_label
label_5 = g.new_label
label_10 = g.new_label
label_14 = g.new_label
label_19 = g.new_label
label_24 = g.new_label
label_26 = g.new_label
label_28 = g.new_label
label_30 = g.new_label
top.set!
g.push_modifiers
label_1.set!
label_1.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.goto label_26
label_5.set!
g.push_const :SyntaxError
g.push_exception
g.send :===, 1
g.git label_10
g.goto label_14
label_10.set!
g.push_exception
g.set_local 0
g.push 2
g.goto label_28
label_14.set!
g.push_const :Exception
g.push_exception
g.send :===, 1
g.git label_19
g.goto label_24
label_19.set!
g.push_exception
g.set_local 1
g.push 3
g.clear_exception
g.goto label_28
label_24.set!
g.push_exception
g.raise_exc
label_26.set!
g.pop
g.push 4
label_28.set!
g.pop_modifiers
g.goto bottom
label_30.set!
g.push 5
g.pop
g.push_exception
g.raise_exc
bottom.set!
g.push 5
g.pop
end)
add_tests("false",
"Compiler" => bytecode do |g|
g.push :false
end)
add_tests("fcall_arglist",
"Compiler" => bytecode do |g|
g.push :self
g.push 42
g.send :m, 1, true
end)
add_tests("fcall_arglist_hash",
"Compiler" => bytecode do |g|
g.push :self
g.push_cpath_top
g.find_const :Hash
g.push_unique_literal :a
g.push 1
g.push_unique_literal :b
g.push 2
g.send :[], 4
g.send :m, 1, true
end)
add_tests("fcall_arglist_norm_hash",
"Compiler" => bytecode do |g|
g.push :self
g.push 42
g.push_cpath_top
g.find_const :Hash
g.push_unique_literal :a
g.push 1
g.push_unique_literal :b
g.push 2
g.send :[], 4
g.send :m, 2, true
end)
add_tests("fcall_arglist_norm_hash_splat",
"Compiler" => bytecode do |g|
g.push :self
g.push 42
g.push_cpath_top
g.find_const :Hash
g.push_unique_literal :a
g.push 1
g.push_unique_literal :b
g.push 2
g.send :[], 4
g.push :self
g.send :c, 0, true
g.cast_array
g.push :nil
g.send_with_splat :m, 2, true, false
end)
add_tests("fcall_block",
"Compiler" => bytecode do |g|
g.push :self
g.push_unique_literal :b
g.in_block_send :a, 0, 1 do |d|
d.push_unique_literal :c
end
end)
add_tests("fcall_index_space",
"Compiler" => bytecode do |g|
g.push :self
g.push 42
g.make_array 1
g.send :a, 1, true
end)
add_tests("fcall_keyword",
"Compiler" => bytecode do |g|
t = g.new_label
f = g.new_label
g.push_block
g.gif f
g.push 42
g.goto t
f.set!
g.push :nil
t.set!
end)
add_tests("flip2",
"Compiler" => :skip)
add_tests("flip2_method",
"Compiler" => :skip)
add_tests("flip3",
"Compiler" => :skip)
add_tests("for",
"Compiler" => bytecode do |g|
g.push :self
g.send :ary, 0, true
in_block_send :each, 1.0, 0, false, 1 do |d|
d.push :self
d.push_local 0
d.send :puts, 1, true
end
end)
add_tests("for_no_body",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Range
g.push 0
g.push :self
g.send :max, 0, true
g.send :new, 2
in_block_send :each, 1.0, 0, false, 1 do |d|
d.push :nil
end
end)
add_tests("gasgn",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Globals
g.push_literal :"$x" # REFACTOR g.get_global("$x")
g.push 42
g.send :[]=, 2
end)
add_tests("global",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Globals
g.push_literal :"$stderr"
g.send :[], 1
end)
add_tests("gvar",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Globals
g.push_literal :"$x" # REFACTOR g.get_global("$x")
g.send :[], 1
end)
add_tests("gvar_underscore",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Globals
g.push_literal :"$_"
g.send :[], 1
end)
add_tests("gvar_underscore_blah",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Globals
g.push_literal :"$__blah"
g.send :[], 1
end)
add_tests("hash",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Hash
g.push 1
g.push 2
g.push 3
g.push 4
g.send :[], 4
end)
add_tests("hash_rescue",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Hash
g.push 1
g.in_rescue :StandardError do |good_side|
if good_side then
g.push 2
else
g.push 3
end
end
g.send :[], 2
end)
add_tests("iasgn",
"Compiler" => bytecode do |g|
g.push 4
g.set_ivar :@a
end)
add_tests("if_block_condition",
"Compiler" => bytecode do |g|
f = g.new_label
bottom = g.new_label
g.push 5
g.set_local 0
g.pop
g.push_local 0
g.push 1
g.meta_send_op_plus
g.gif f
g.push :nil
g.goto bottom
f.set!
g.push :nil
bottom.set!
end)
add_tests("if_lasgn_short",
"Compiler" => bytecode do |g|
f = g.new_label
bottom = g.new_label
g.push :self
g.send :obj, 0, true
g.send :x, 0, false
g.set_local 0
g.gif f
g.push_local 0
g.send :do_it, 0, false
g.goto bottom
f.set!
g.push :nil
bottom.set!
end)
add_tests("if_nested",
"Compiler" => bytecode do |g|
yep = g.new_label
done = g.new_label
inner_done = g.new_label
nope = g.new_label
g.push :true
g.git yep
g.push :false
g.gif nope
g.push :nil
g.ret
g.goto inner_done
nope.set!
g.push :nil
inner_done.set!
g.goto done
yep.set!
g.push :nil
done.set!
end)
add_tests("if_post",
"Compiler" => bytecode do |g|
nope = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.gif nope
g.push :self
g.send :a, 0, true
g.goto done
nope.set!
g.push :nil
done.set!
end)
add_tests("if_post_not",
"Compiler" => bytecode do |g|
yep = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.git yep
g.push :self
g.send :a, 0, true
g.goto done
yep.set!
g.push :nil
done.set!
end)
add_tests("if_pre",
"Compiler" => bytecode do |g|
nope = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.gif nope
g.push :self
g.send :a, 0, true
g.goto done
nope.set!
g.push :nil
done.set!
end)
add_tests("if_pre_not",
"Compiler" => bytecode do |g|
yep = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.git yep
g.push :self
g.send :a, 0, true
g.goto done
yep.set!
g.push :nil
done.set!
end)
add_tests("iter_call_arglist_space",
"Compiler" => bytecode do |g|
g.push :self
g.push 1
in_block_send :a, 1, 1 do |d|
d.push :self
d.send :d, 0, true
end
end)
add_tests("iter_dasgn_curr_dasgn_madness",
"Compiler" => bytecode do |g|
g.push :self
g.send :as, 0, true
in_block_send :each, 1, 0, false do |d|
d.push_local_depth 0, 1
d.push_local_depth 0, 0
d.push :false
d.send :b, 1, false
d.meta_send_op_plus
d.set_local_depth 0, 1
end
end)
add_tests("iter_downto",
"Compiler" => bytecode do |g|
g.push 3
g.push 1
in_block_send :downto, 1, 1, false do |d|
d.push :self
d.push_local_depth 0, 0
d.send :to_s, 0, false
d.send :puts, 1, true
end
end)
add_tests("iter_each_lvar",
"Compiler" => bytecode do |g|
g.push 1
g.push 2
g.push 3
g.make_array 3
g.set_local 0
g.pop
g.push_local 0
in_block_send :each, 1, 0, false, 1 do |d|
d.push :self
d.push_local_depth 0, 0
d.send :to_s, 0, false
d.send :puts, 1, true
end
end)
add_tests("iter_each_nested",
"Compiler" => bytecode do |g|
g.push 1
g.push 2
g.push 3
g.make_array 3
g.set_local 0
g.pop
g.push 4
g.push 5
g.push 6
g.push 7
g.make_array 4
g.set_local 1
g.pop
g.push_local 0
in_block_send :each, 1, 0, false, 2 do |d|
d.push_local 1
d.in_block_send :each, 1, 0, false, 2, true do |d2|
d2.push :self
d2.push_local_depth 1, 0
d2.send :to_s, 0, false
d2.send :puts, 1, true
d2.pop
d2.push :self
d2.push_local_depth 0, 0
d2.send :to_s, 0, false
d2.send :puts, 1, true
end
end
end)
add_tests("iter_loop_empty",
"Compiler" => bytecode do |g|
top = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.goto top
bottom.set!
g.pop_modifiers
end)
add_tests("iter_masgn_2",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :a, 2 do |d|
d.push :self
d.push_local_depth 0, 1
d.send :p, 1, true
end
end)
add_tests("iter_masgn_args_splat",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :a, 2 do |d|
d.push :self
d.push_local_depth 0, 1
d.send :p, 1, true
end
end)
add_tests("iter_masgn_args_splat_no_name",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :a, 2 do |d|
d.push :self
d.push_local_depth 0, 1
d.send :p, 1, true
end
end)
add_tests("iter_masgn_splat",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :a, -1 do |d|
d.push :self
d.push_local_depth 0, 0
d.send :p, 1, true
end
end)
add_tests("iter_masgn_splat_no_name",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :a, -2 do |d|
d.push :self
d.push :self
d.send :c, 0, true
d.send :p, 1, true
end
end)
add_tests("iter_shadowed_var",
"Compiler" => bytecode do |g|
g.push :self
g.in_block_send :a, 1 do |d|
d.push :self
d.in_block_send :b, 1, 0, true, 0, true, 1 do |d2|
d2.push :self
d2.push_local_depth 1, 0
d2.send :puts, 1, true
end
end
end)
add_tests("iter_upto",
"Compiler" => bytecode do |g|
g.push 1
g.push 3
in_block_send :upto, 1, 1, false do |d|
d.push :self
d.push_local_depth 0, 0
d.send :to_s, 0, false
d.send :puts, 1, true
end
end)
add_tests("iter_while",
"Compiler" => bytecode do |g|
top = g.new_label
f = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push 10
g.set_local 0
g.pop
g.push_modifiers
top.set!
g.push_local 0
g.push 1
g.send :>=, 1, false
g.gif f
dunno1.set!
g.push :self
g.push_literal "hello"
g.string_dup
g.send :puts, 1, true
g.pop
g.push_local 0
g.push 1
g.meta_send_op_minus
g.set_local 0
g.pop
g.goto top
f.set!
g.push :nil
bottom.set!
g.pop_modifiers
end)
add_tests("lasgn_array",
"Compiler" => bytecode do |g|
g.push_literal "foo"
g.string_dup
g.push_literal "bar"
g.string_dup
g.make_array 2
g.set_local 0
end)
add_tests("lasgn_call",
"Compiler" => bytecode do |g|
g.push 2
g.push 3
g.meta_send_op_plus
g.set_local 0
end)
add_tests("lit_bool_false",
"Compiler" => bytecode do |g|
g.push :false
end)
add_tests("lit_bool_true",
"Compiler" => bytecode do |g|
g.push :true
end)
add_tests("lit_float",
"Compiler" => bytecode do |g|
g.push 1.1
end)
add_tests("lit_long",
"Compiler" => bytecode do |g|
g.push 1
end)
add_tests("lit_long_negative",
"Compiler" => bytecode do |g|
g.push(-1)
end)
add_tests("lit_range2",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Range
g.push 1
g.push 10
g.send :new, 2
end)
add_tests("lit_range3",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Range
g.push 1
g.push 10
g.push :true
g.send :new, 3
end)
add_tests("lit_regexp",
"Compiler" => bytecode do |g|
g.memoize do
g.push_const :Regexp
g.push_literal "x"
g.push 0
g.send :new, 2
end
end)
add_tests("lit_regexp_i_wwtt",
"Compiler" => bytecode do |g|
g.push :self
g.send :str, 0, true
g.memoize do
g.push_const :Regexp
g.push_literal ""
g.push 1
g.send :new, 2
end
g.send :split, 1, false
end)
add_tests("lit_regexp_n",
"Compiler" => bytecode do |g|
g.memoize do
g.push_const :Regexp
g.push_literal "x"
g.push 16
g.send :new, 2
end
end)
add_tests("lit_regexp_once", # TODO: same as lit_regexp. verify
"Compiler" => bytecode do |g|
g.memoize do
g.push_const :Regexp
g.push_literal "x"
g.push 0
g.send :new, 2
end
end)
add_tests("lit_sym",
"Compiler" => bytecode do |g|
g.push_unique_literal :x
end)
add_tests("lit_sym_splat",
"Compiler" => bytecode do |g|
g.push_unique_literal :"*args"
end)
add_tests("lvar_def_boundary",
"Compiler" => bytecode do |g|
g.push 42
g.set_local 0
g.pop
in_method :a do |d|
d.push :self
d.in_block_send :c, 0 do |d2|
d2.in_rescue :RuntimeError do |good_side|
if good_side then
d2.push :self
d2.send :do_stuff, 0, true
else
d2.push_exception
d2.set_local_depth 0, 0
d2.push :self
d2.push_local_depth 0, 0
d2.send :puts, 1, true
end
end
end
end
end)
add_tests("masgn",
"Compiler" => bytecode do |g|
g.push :self
g.send :c, 0, true
g.push :self
g.send :d, 0, true
g.rotate 2
g.set_local 0
g.pop
g.set_local 1
g.pop
g.push :true
end)
add_tests("masgn_argscat",
"Compiler" => bytecode do |g|
g.push 1
g.push 2
g.make_array 2
g.push 3
g.push 4
g.make_array 2
g.cast_array
g.send :+, 1
g.cast_tuple
g.shift_tuple
g.set_local 0
g.pop
g.shift_tuple
g.set_local 1
g.pop
g.cast_array
g.set_local 2
g.pop
g.push :true
end)
add_tests("masgn_attrasgn",
"Compiler" => bytecode do |g|
g.push :self
g.send :d, 0, true
g.push :self
g.send :e, 0, true
g.rotate 2
g.set_local 0
g.pop
g.push :self
g.send :b, 0, true
g.send :c=, 1, false
g.pop
g.push :true
end)
add_tests("masgn_attrasgn_idx",
"Compiler" => bytecode do |g|
g.make_array 0
g.push 1
g.push 2
g.rotate 3
g.set_local 0
g.pop
g.set_local 1
g.pop
g.set_local 2
g.pop
g.push :true
g.pop
g.push_local 0
g.push_local 2
g.send :[], 1, false
g.push_local 0
g.push_local 1
g.send :[], 1, false
g.rotate 2
g.push_local 0
g.push_local 1
g.send :[]=, 2, false
g.pop
g.push_local 0
g.push_local 2
g.send :[]=, 2, false
g.pop
g.push :true
end)
add_tests("masgn_iasgn",
"Compiler" => bytecode do |g|
g.push :self
g.send :c, 0, true
g.push :self
g.send :d, 0, true
g.rotate 2
g.set_local 0
g.pop
g.set_ivar :@b
g.pop
g.push :true
end)
add_tests("masgn_masgn",
"Compiler" => bytecode do |g|
g.push 1
g.push 2
g.push 3
g.make_array 2
g.make_array 2
g.cast_tuple
g.shift_tuple
g.set_local 0
g.pop
g.shift_tuple
g.cast_tuple
g.shift_tuple
g.set_local 1
g.pop
g.shift_tuple
g.set_local 2
g.pop
g.pop
g.push :true # FIX: necessary?!?
g.pop
g.pop
g.push :true
end)
add_tests("masgn_splat",
"Compiler" => bytecode do |g|
g.push :self
g.send :d, 0, true
g.push :self
g.send :e, 0, true
g.push :self
g.send :f, 0, true
g.push :self
g.send :g, 0, true
g.make_array 2 # TODO: 2?!?
g.set_local 2 # TODO: backwards
g.pop
g.set_local 1
g.pop
g.set_local 0
g.pop
g.push :true
end)
add_tests("masgn_splat_no_name_to_ary",
"Compiler" => bytecode do |g|
g.push :self
g.send :c, 0, true
g.cast_tuple
g.shift_tuple
g.set_local 0
g.pop
g.shift_tuple
g.set_local 1
g.pop
g.cast_array
g.cast_array
g.pop
g.push :true
end)
add_tests("masgn_splat_no_name_trailing",
"Compiler" => bytecode do |g|
g.push :self
g.send :c, 0, true
g.cast_tuple
g.shift_tuple
g.set_local 0
g.pop
g.shift_tuple
g.set_local 1
g.pop
g.pop # TODO: why?
g.push :true
end)
add_tests("masgn_splat_to_ary",
"Compiler" => bytecode do |g|
g.push :self
g.send :d, 0, true
g.cast_tuple
g.shift_tuple
g.set_local 0
g.pop
g.shift_tuple
g.set_local 1
g.pop
g.cast_array
g.set_local 2
g.pop
g.push :true
end)
add_tests("masgn_splat_to_ary2",
"Compiler" => bytecode do |g|
g.push :self
g.send :d, 0, true
g.push_literal "f"
g.string_dup
g.send :e, 1, false
g.cast_tuple
g.shift_tuple
g.set_local 0
g.pop
g.shift_tuple
g.set_local 1
g.pop
g.cast_array
g.set_local 2
g.pop
g.push :true
end)
add_tests("match",
"Compiler" => bytecode do |g|
g.push_literal :$_ # REFACTOR - we use this block a lot
g.push_cpath_top
g.find_const :Globals # FIX: find the other Globals, order flipped
g.send :[], 1
g.memoize do
g.push_const :Regexp
g.push_literal "x"
g.push 0
g.send :new, 2
end
f = g.new_label
t = g.new_label
g.send :=~, 1
g.gif f
g.push 1
g.goto t
f.set!
g.push :nil
t.set!
end)
add_tests("match2",
"Compiler" => bytecode do |g|
g.memoize do
g.push_const :Regexp
g.push_literal "x"
g.push 0
g.send :new, 2
end
g.push_literal "blah"
g.string_dup
g.send :=~, 1
end)
add_tests("match3",
"Compiler" => bytecode do |g|
g.push_literal "blah"
g.string_dup
g.memoize do
g.push_const :Regexp
g.push_literal "x"
g.push 0
g.send :new, 2
end
g.send :=~, 1
end)
add_tests("module",
"Compiler" => bytecode do |g|
in_module :X do |d|
d.in_method :y do |d2|
d2.push :nil
end
end
end)
add_tests("module_scoped",
"Compiler" => bytecode do |g|
in_module "X::Y" do |d|
d.push :self
d.send :c, 0, true
end
end)
add_tests("module_scoped3",
"Compiler" => bytecode do |g|
in_module :Y do |d|
d.push :self
d.send :c, 0, true
end
end)
add_tests("next",
"Compiler" => bytecode do |g|
top = g.new_label
bottom = g.new_label
dunno1 = g.new_label
f = g.new_label
g.push_modifiers
top.set!
g.push :false
g.gif f
g.goto top
g.goto dunno1
f.set!
g.push :nil
dunno1.set!
g.pop
g.goto top
bottom.set!
g.pop_modifiers
end)
add_tests("next_arg",
"Compiler" => testcases['next']['Compiler']) # FIX: not the same
add_tests("not",
"Compiler" => bytecode do |g|
f = g.new_label
t = g.new_label
g.push :true
g.git f
g.push :true
g.goto t
f.set!
g.push :false
t.set!
end)
add_tests("nth_ref",
"Compiler" => bytecode do |g|
g.push_context
g.push 1
g.send :nth_ref, 1
end)
add_tests("op_asgn1",
"Compiler" => bytecode do |g|
l_or = g.new_label
l_and = g.new_label
l_idx = g.new_label
l_rhs = g.new_label
g.make_array 0
g.set_local 0
g.pop
g.push_local 0
g.dup
g.push 1
g.send :[], 1
g.dup
g.git l_or
g.pop
g.push 1
g.push 10
g.send :[]=, 2
g.goto l_and
l_or.set!
g.swap
g.pop
l_and.set!
g.pop
g.push_local 0
g.dup
g.push 2
g.send :[], 1
g.dup
g.gif l_idx
g.pop
g.push 2
g.push 11
g.send :[]=, 2
g.goto l_rhs
l_idx.set!
g.swap
g.pop
l_rhs.set!
g.pop
g.push_local 0
g.dup
g.push 3
g.send :[], 1
g.push 12
g.send :+, 1
g.push 3
g.swap
g.send :[]=, 2
end)
add_tests("op_asgn1_ivar",
"Compiler" => bytecode do |g|
l_or = g.new_label
l_and = g.new_label
l_idx = g.new_label
l_rhs = g.new_label
g.make_array 0
g.set_ivar :@b
g.pop
g.push_ivar :@b
g.dup
g.push 1
g.send :[], 1
g.dup
g.git l_or
g.pop
g.push 1
g.push 10
g.send :[]=, 2
g.goto l_and
l_or.set!
g.swap
g.pop
l_and.set!
g.pop
g.push_ivar :@b
g.dup
g.push 2
g.send :[], 1
g.dup
g.gif l_idx
g.pop
g.push 2
g.push 11
g.send :[]=, 2
g.goto l_rhs
l_idx.set!
g.swap
g.pop
l_rhs.set!
g.pop
g.push_ivar :@b
g.dup
g.push 3
g.send :[], 1
g.push 12
g.send :+, 1
g.push 3
g.swap
g.send :[]=, 2
end)
add_tests("op_asgn2",
"Compiler" => bytecode do |g|
l_or = g.new_label
l_and = g.new_label
l_plus = g.new_label
l_or2 = g.new_label
l_rhs = g.new_label
bottom = g.new_label
g.push_const :Struct
g.push_unique_literal :var
g.send :new, 1, false
g.set_local 0
g.pop
g.push_local 0
g.push :nil
g.send :new, 1, false
g.set_local 1
g.pop
g.push_local 1
g.dup
g.send :var, 0
g.dup
g.git l_or
g.pop
g.push 20
g.send :var=, 1
g.goto l_and
l_or.set!
g.swap
g.pop
l_and.set!
g.pop
g.push_local 1
g.dup
g.send :var, 0
g.dup
g.gif l_plus
g.pop
g.push 21
g.send :var=, 1
g.goto l_or2
l_plus.set!
g.swap
g.pop
l_or2.set!
g.pop
g.push_local 1
g.dup
g.send :var, 0
g.push 22
g.send :+, 1
g.send :var=, 1
g.pop
g.push_local 1
g.send :d, 0, false
g.send :e, 0, false
g.dup
g.send :f, 0
g.dup
g.git l_rhs
g.pop
g.push 42
g.send :f=, 1
g.goto bottom
l_rhs.set!
g.swap
g.pop
bottom.set!
end)
add_tests("op_asgn2_self",
"Compiler" => bytecode do |g|
t = g.new_label
f = g.new_label
g.push :self
g.dup
g.send :Bag, 0
g.dup
g.git t
g.pop
g.push_const :Bag
g.send :new, 0, false
g.send :"Bag=", 1
g.goto f
t.set!
g.swap
g.pop
f.set!
end)
add_tests("op_asgn_and",
"Compiler" => bytecode do |g|
g.push 0
g.set_local 0
g.pop
g.push_local 0
g.dup
f = g.new_label
g.gif f
g.pop
g.push 2
g.set_local 0
f.set!
end)
add_tests("op_asgn_and_ivar2",
"Compiler" => bytecode do |g|
t = g.new_label
g.push_ivar :@fetcher
g.dup
g.gif t
g.pop
g.push :self
g.push_const :Gem
g.send :configuration, 0, false
g.push_unique_literal :http_proxy
g.send :[], 1, false
g.send :new, 1, true
g.set_ivar :@fetcher
t.set!
end)
add_tests("op_asgn_or",
"Compiler" => bytecode do |g|
t = g.new_label
g.push 0
g.set_local 0
g.pop # FIX: lame
g.push_local 0
g.dup
g.git t
g.pop
g.push 1
g.set_local 0
t.set!
end)
add_tests("op_asgn_or_block",
"Compiler" => bytecode do |g|
t = g.new_label
g.push_local 0
g.dup
g.git t
g.pop
in_rescue :StandardError do |good_side|
if good_side then
g.push :self
g.send :b, 0, true
else
g.push :self
g.send :c, 0, true
end
end
g.set_local 0
t.set!
end)
add_tests("op_asgn_or_ivar",
"Compiler" => bytecode do |g|
t = g.new_label
g.push_ivar :@v
g.dup
g.git t
g.pop
g.push_cpath_top
g.find_const :Hash
g.send :[], 0
g.set_ivar :@v
t.set!
end)
add_tests("op_asgn_or_ivar2",
"Compiler" => bytecode do |g|
t = g.new_label
g.push_ivar :@fetcher
g.dup
g.git t
g.pop
g.push :self
g.push_const :Gem
g.send :configuration, 0, false
g.push_unique_literal :http_proxy
g.send :[], 1, false
g.send :new, 1, true
g.set_ivar :@fetcher
t.set!
end)
add_tests("or",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.dup
lhs_true = g.new_label
g.git lhs_true
g.pop
g.push :self
g.send :b, 0, true
lhs_true.set!
end)
add_tests("or_big",
"Compiler" => bytecode do |g|
j1 = g.new_label
j2 = g.new_label
g.push :self
g.send :a, 0, true
g.dup
g.git j1
g.pop
g.push :self
g.send :b, 0, true
j1.set!
g.dup
g.git j2
g.pop
g.push :self
g.send :c, 0, true
g.dup
g.gif j2
g.pop
g.push :self
g.send :d, 0, true
j2.set!
j2.set!
end)
add_tests("or_big2",
"Compiler" => testcases['or_big']['Compiler'])
add_tests("parse_floats_as_args",
"Compiler" => bytecode do |g|
in_method :x do |d|
opt_arg_1 = d.new_label
opt_arg_2 = d.new_label
d.passed_arg 0
d.git opt_arg_1
d.push 0.0
d.set_local 0
d.pop
opt_arg_1.set!
d.passed_arg 1
d.git opt_arg_2
d.push 0.0
d.set_local 1
d.pop
opt_arg_2.set!
d.push_local 0
d.push_local 1
d.meta_send_op_plus
end
end)
add_tests("postexe",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :at_exit, 0 do |d|
d.push 1
end
end)
add_tests("proc_args_0",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :proc, 0 do |d|
d.push :self
d.send :x, 0, true
d.push 1
d.meta_send_op_plus
end
end)
add_tests("proc_args_1",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :proc, 1 do |d|
d.push_local_depth 0, 0
d.push 1
d.meta_send_op_plus
end
end)
add_tests("proc_args_2",
"Compiler" => bytecode do |g|
g.push :self
in_block_send :proc, 2 do |d|
d.push_local_depth 0, 0
d.push_local_depth 0, 1
d.meta_send_op_plus
end
end)
add_tests("proc_args_no", # TODO shouldn't 0 bitch if there are args?
"Compiler" => testcases['proc_args_0']['Compiler'])
add_tests("redo",
"Compiler" => bytecode do |g|
top = g.new_label
f = g.new_label
t = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push :false
g.gif f
g.goto top
g.goto t
f.set!
g.push :nil
t.set!
g.pop
g.goto top
bottom.set!
g.pop_modifiers
end)
add_tests("rescue",
"Compiler" => bytecode do |g|
in_rescue :StandardError do |good_side|
if good_side then
g.push :self
g.send :blah, 0, true
else
g.push :nil
end
end
end)
add_tests("rescue_block_body",
"Compiler" => bytecode do |g|
in_rescue :StandardError, :wtf do |good_side| # FIX: wtf param
if good_side then
g.push :self
g.send :a, 0, true
else
g.push_exception
g.set_local 0
g.push :self
g.send :c, 0, true
g.pop
g.push :self
g.send :d, 0, true
end
end
end)
add_tests("rescue_block_nada",
"Compiler" => bytecode do |g|
in_rescue :StandardError do |good_side|
if good_side then
g.push :self
g.send :blah, 0, true
else
g.push :nil
end
end
end)
add_tests("rescue_exceptions",
"Compiler" => bytecode do |g|
in_rescue :RuntimeError do |good_side|
if good_side then
g.push :self
g.send :blah, 0, true
else
g.push_exception
g.set_local 0
g.push :nil
end
end
end)
add_tests("retry",
"Compiler" => bytecode do |g| # TODO: maybe have a real example?
g.push :self
g.push_const :LocalJumpError
g.push_literal "retry used in invalid context"
g.send :raise, 2, true
end)
add_tests("return_0",
"Compiler" => bytecode do |g|
g.push :nil
g.ret
end)
add_tests("return_1",
"Compiler" => bytecode do |g|
g.push 1
g.ret
end)
add_tests("return_n",
"Compiler" => bytecode do |g|
g.push 1
g.push 2
g.push 3
g.make_array 3
g.ret
end)
add_tests("sclass",
"Compiler" => bytecode do |g|
g.push :self
g.dup
g.send :__verify_metaclass__, 0 # TODO: maybe refactor...
g.pop
g.open_metaclass
g.dup
g.push_literal_desc do |d2|
d2.push 42
d2.ret
end
g.swap
g.attach_method :__metaclass_init__
g.pop
g.send :__metaclass_init__, 0
end)
add_tests("sclass_trailing_class",
"Compiler" => bytecode do |g|
in_class :A do |d|
d.push :self
d.dup
d.send :__verify_metaclass__, 0 # TODO: maybe refactor...
d.pop
d.open_metaclass
d.dup
d.push_literal_desc do |d2|
d2.push :self
d2.send :a, 0, true
d2.ret
end
d.swap
d.attach_method :__metaclass_init__
d.pop
d.send :__metaclass_init__, 0
d.pop
d.push :nil
d.open_class :B
end
end)
add_tests("splat",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push :self
d.push_local 0
d.cast_array
d.push :nil
d.send_with_splat :a, 0, true, false
end
end)
add_tests("str",
"Compiler" => bytecode do |g|
g.push_literal "x"
g.string_dup
end)
add_tests("str_concat_newline",
"Compiler" => bytecode do |g|
g.push_literal "before after"
g.string_dup
end)
add_tests("str_concat_space",
"Compiler" => testcases["str_concat_newline"]["Compiler"])
add_tests("str_heredoc",
"Compiler" => bytecode do |g|
g.push_literal " blah\nblah\n"
g.string_dup
end)
add_tests("str_heredoc_call",
"Compiler" => bytecode do |g|
g.push_literal " blah\nblah\n"
g.string_dup
g.send :strip, 0, false
end)
add_tests("str_heredoc_double",
"Compiler" => bytecode do |g|
g.push_local 0
g.push_literal " first\n"
g.string_dup
g.push :self
g.send :b, 0, true
g.meta_send_op_plus
g.push_literal " second\n"
g.string_dup
g.meta_send_op_plus
g.meta_send_op_plus
g.set_local 0
end)
add_tests("str_heredoc_indent",
"Compiler" => bytecode do |g|
g.push_literal " blah\nblah\n\n"
g.string_dup
end)
add_tests("str_interp_file",
"Compiler" => bytecode do |g|
g.push_literal "file = (eval)\n"
g.string_dup
end)
add_tests("structure_extra_block_for_dvar_scoping",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.in_block_send :b, 2, 0, false do |d|
f = d.new_label
t = d.new_label
d.push :self
d.send :e, 0, true
d.push_local_depth 0, 0
d.send :f, 1, false
d.git f
d.push :false
d.set_local_depth 0, 2
d.pop
d.push_local_depth 0, 1
d.in_block_send :h, 2, 0, false, 0, true do |d2|
d2.push :true
d2.set_local_depth 1, 2
end
d.goto t
f.set!
d.push :nil
t.set!
end
end)
add_tests("structure_remove_begin_1",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
in_rescue :StandardError do |good_side|
if good_side then
g.push :self
g.send :b, 0, true
else
g.push :self
g.send :c, 0, true
end
end
g.send :<<, 1, false
end)
add_tests("structure_remove_begin_2",
"Compiler" => bytecode do |g|
bottom = g.new_label
f = g.new_label
g.push :self
g.send :c, 0, true
g.gif f
in_rescue :StandardError do |good_side|
if good_side then
g.push :self
g.send :b, 0, true
else
g.push :nil
end
end
g.goto bottom
f.set!
g.push :nil
bottom.set!
g.set_local 0
g.pop
g.push_local 0
end)
add_tests("structure_unused_literal_wwtt",
"Compiler" => bytecode do |g|
g.open_module :Graffle
end)
add_tests("super",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push 4
d.push :nil
d.push :nil
d.send_super :x, 1
end
end)
add_tests("super_block_pass",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.send :a, 0, true
g.push :nil
g.push :self
g.send :b, 0, true
g.dup
g.is_nil
g.git t
g.push_cpath_top
g.find_const :Proc
g.swap
g.send :__from_block__, 1
t.set!
g.send_super nil, 1 # TODO: nil?
end)
add_tests("super_block_splat",
"Compiler" => bytecode do |g|
g.push :self
g.send :a, 0, true
g.push :self
g.send :b, 0, true
g.cast_array
g.push :nil
g.send_super nil, 1, true # TODO: nil?
end)
add_tests("super_multi",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push 4
d.push 2
d.push 1
d.push :nil
d.push :nil
d.send_super :x, 3
end
end)
add_tests("svalue",
"Compiler" => bytecode do |g|
t = g.new_label
g.push :self
g.send :b, 0, true
g.cast_array
g.cast_array
g.dup
g.send :size, 0
g.push 1
g.swap
g.send :<, 1
g.git t
g.push 0
g.send :at, 1
t.set!
g.set_local 0
end)
add_tests("to_ary",
"Compiler" => bytecode do |g|
g.push :self
g.send :c, 0, true
g.cast_tuple
g.shift_tuple
g.set_local 0
g.pop
g.shift_tuple
g.set_local 1
g.pop
g.pop
g.push :true
end)
add_tests("true",
"Compiler" => bytecode do |g|
g.push :true
end)
add_tests("undef",
"Compiler" => bytecode do |g|
undef_bytecode :x
end)
add_tests("undef_2",
"Compiler" => bytecode do |g|
undef_bytecode :x, :y
end)
add_tests("undef_3",
"Compiler" => bytecode do |g|
undef_bytecode :x, :y, :z
end)
add_tests("undef_block_1",
"Compiler" => bytecode do |g|
g.push :self
g.send :f1, 0, true
g.pop
undef_bytecode :x
end)
add_tests("undef_block_2",
"Compiler" => bytecode do |g|
g.push :self
g.send :f1, 0, true
g.pop
undef_bytecode :x, :y
end)
add_tests("undef_block_3",
"Compiler" => bytecode do |g|
g.push :self
g.send :f1, 0, true
g.pop
undef_bytecode :x, :y, :z
end)
add_tests("undef_block_3_post",
"Compiler" => bytecode do |g|
undef_bytecode :x, :y, :z
g.pop
g.push :self
g.send :f2, 0, true
end)
add_tests("undef_block_wtf",
"Compiler" => bytecode do |g|
g.push :self
g.send :f1, 0, true
g.pop
undef_bytecode :x, :y, :z
g.pop
g.push :self
g.send :f2, 0, true
end)
add_tests("unless_post",
"Compiler" => bytecode do |g|
yep = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.git yep
g.push :self
g.send :a, 0, true
g.goto done
yep.set!
g.push :nil
done.set!
end)
add_tests("unless_post_not",
"Compiler" => bytecode do |g|
nope = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.gif nope
g.push :self
g.send :a, 0, true
g.goto done
nope.set!
g.push :nil
done.set!
end)
add_tests("unless_pre",
"Compiler" => bytecode do |g|
yep = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.git yep
g.push :self
g.send :a, 0, true
g.goto done
yep.set!
g.push :nil
done.set!
end)
add_tests("unless_pre_not",
"Compiler" => bytecode do |g|
nope = g.new_label
done = g.new_label
g.push :self
g.send :b, 0, true
g.gif nope
g.push :self
g.send :a, 0, true
g.goto done
nope.set!
g.push :nil
done.set!
end)
add_tests("until_post",
"Compiler" => bytecode do |g|
top = g.new_label
nxt = g.new_label
brek = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
nxt.set!
g.push :false
g.git bottom
g.goto top
bottom.set!
g.push :nil
brek.set!
g.pop_modifiers
end)
add_tests("until_post_not",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
dunno1.set!
g.push :true
g.gif bottom
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("until_pre",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push :false
g.git bottom
dunno1.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("until_pre_mod",
"Compiler" => testcases["until_pre"]["Compiler"])
add_tests("until_pre_not",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push :true
g.gif bottom
dunno1.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("until_pre_not_mod",
"Compiler" => testcases["until_pre_not"]["Compiler"])
add_tests("valias",
"Compiler" => bytecode do |g|
g.push_cpath_top
g.find_const :Globals
g.push_literal :$x
g.push_literal :$y
g.send :add_alias, 2
end)
add_tests("vcall",
"Compiler" => bytecode do |g|
g.push :self
g.send :method, 0, true
end)
add_tests("while_post",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
dunno1.set!
g.push :false
g.gif bottom
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("while_post2",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push 1
g.push 2
g.meta_send_op_plus
g.pop
g.push 3
g.push 4
g.meta_send_op_plus
g.pop
dunno1.set!
g.push :false
g.gif bottom
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("while_post_not",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
dunno1.set!
g.push :true
g.git bottom
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("while_pre",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push :false
g.gif bottom
dunno1.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("while_pre_mod",
"Compiler" => testcases["while_pre"]["Compiler"])
add_tests("while_pre_nil",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push :false
g.gif bottom
dunno1.set!
g.push :nil
g.pop
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("while_pre_not",
"Compiler" => bytecode do |g|
top = g.new_label
dunno1 = g.new_label
dunno2 = g.new_label
bottom = g.new_label
g.push_modifiers
top.set!
g.push :true
g.git bottom
dunno1.set!
g.push 1
g.push 1
g.meta_send_op_plus
g.pop
g.goto top
bottom.set!
g.push :nil
dunno2.set!
g.pop_modifiers
end)
add_tests("while_pre_not_mod",
"Compiler" => testcases["while_pre_not"]["Compiler"])
add_tests("xstr",
"Compiler" => bytecode do |g|
g.push :self
g.push_literal "touch 5"
g.string_dup
g.send :"`", 1, true
end)
add_tests("yield_0",
"Compiler" => bytecode do |g|
g.push_block
g.meta_send_call 0
end)
add_tests("yield_1",
"Compiler" => bytecode do |g|
g.push_block
g.push 42
g.meta_send_call 1
end)
add_tests("yield_n",
"Compiler" => bytecode do |g|
g.push_block
g.push 42
g.push 24
g.make_array 2
g.meta_send_call 1
end)
add_tests("zarray",
"Compiler" => bytecode do |g|
g.make_array 0
g.set_local 0
end)
add_tests("zsuper",
"Compiler" => bytecode do |g|
in_method :x do |d|
d.push :nil
d.push :nil
d.send_super :x, 0
end
end)
end
|
require 'spec_helper'
require 'bra/model/component_creator'
shared_examples 'a valid symbol constant' do |type, value|
it "returns an object whose flat representation is :#{value}" do
created = subject.send(type, value)
expect(created.flat).to eq(value.to_sym)
end
end
shared_examples 'a symbol constant' do |type, valid_list|
valid_list.each do |valid|
context "when the argument is :#{valid}" do
it_behaves_like 'a valid symbol constant', type, valid
end
context "when the argument is '#{valid}'" do
it_behaves_like 'a valid symbol constant', type, valid.to_s
end
end
context 'when the argument is invalid' do
specify { expect { subject.send(type, :xyzzy) }.to raise_error }
end
context 'when the argument is nil' do
specify { expect { subject.send(type, nil) }.to raise_error }
end
end
describe Bra::Model::ComponentCreator do
subject { Bra::Model::ComponentCreator.new(registrar) }
let(:registrar) { double(:registrar) }
describe '#load_state' do
it_behaves_like(
'a symbol constant', :load_state, Bra::Common::Types::LOAD_STATES
)
end
describe '#play_state' do
it_behaves_like(
'a symbol constant', :play_state, Bra::Common::Types::PLAY_STATES
)
end
end
Expect the registrar to be called.
require 'spec_helper'
require 'bra/model/component_creator'
require 'bra/model'
shared_examples 'a valid symbol constant' do |type, value|
it "returns an object whose flat representation is :#{value}" do
created = subject.send(type, value)
expect(created.flat).to eq(value.to_sym)
end
end
shared_examples 'a successful factory method' do |type, value|
it 'sends the registrar #register with an object to register' do
expect(registrar).to receive(:register).once do |arg|
expect(arg).to be_a(Bra::Model::ModelObject)
end
subject.send(type, value)
end
end
shared_examples 'a symbol constant' do |type, valid_list|
valid_list.each do |valid|
it_behaves_like 'a successful factory method', type, valid
context "when the argument is :#{valid}" do
it_behaves_like 'a valid symbol constant', type, valid
end
context "when the argument is '#{valid}'" do
it_behaves_like 'a valid symbol constant', type, valid.to_s
end
end
context 'when the argument is invalid' do
specify { expect { subject.send(type, :xyzzy) }.to raise_error }
end
context 'when the argument is nil' do
specify { expect { subject.send(type, nil) }.to raise_error }
end
end
describe Bra::Model::ComponentCreator do
subject { Bra::Model::ComponentCreator.new(registrar) }
let(:registrar) { double(:registrar) }
before(:each) { allow(registrar).to receive(:register) }
describe '#load_state' do
it_behaves_like(
'a symbol constant', :load_state, Bra::Common::Types::LOAD_STATES
)
end
describe '#play_state' do
it_behaves_like(
'a symbol constant', :play_state, Bra::Common::Types::PLAY_STATES
)
end
end
|
RSpec.describe Dry::Data::SumType do
describe '#call' do
it 'works with two pass-through types' do
type = Dry::Data['int'] | Dry::Data['string']
expect(type[312]).to be(312)
expect(type['312']).to eql('312')
end
it 'works with two strict types' do
type = Dry::Data['strict.int'] | Dry::Data['strict.string']
expect(type[312]).to be(312)
expect(type['312']).to eql('312')
expect { type[{}] }.to raise_error(TypeError)
end
it 'works with nil and strict types' do
type = Dry::Data['nil'] | Dry::Data['strict.string']
expect(type[nil]).to be(nil)
expect(type['312']).to eql('312')
expect { type[{}] }.to raise_error(TypeError)
end
end
end
Updating spec desc to reflect tested method
The description of the spec stated it was testing #call yet the specs
were instead testing #[]. This change brings the spec description more
inline with other specs for #[].
RSpec.describe Dry::Data::SumType do
describe '#[]' do
it 'works with two pass-through types' do
type = Dry::Data['int'] | Dry::Data['string']
expect(type[312]).to be(312)
expect(type['312']).to eql('312')
end
it 'works with two strict types' do
type = Dry::Data['strict.int'] | Dry::Data['strict.string']
expect(type[312]).to be(312)
expect(type['312']).to eql('312')
expect { type[{}] }.to raise_error(TypeError)
end
it 'works with nil and strict types' do
type = Dry::Data['nil'] | Dry::Data['strict.string']
expect(type[nil]).to be(nil)
expect(type['312']).to eql('312')
expect { type[{}] }.to raise_error(TypeError)
end
end
end
|
require 'rest-client'
require 'uri'
require 'rdf'
class RESTService
@@base = "http://deichman.no/work/"
@@dcterms = "http://purl.org/dc/terms/"
@@deichman = "http://deichman.no/vocab/"
@@type = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"
def process_work (data)
id = data[:id]
author = data[:author]
title = data[:title]
date = data[:date]
s = RDF::URI.new(@@base + id)
rdfservice = RDFService.new
rdfservice.add_triple(s, RDF::URI.new(@@type), RDF::URI.new(@@base + "Work"))
if !author.empty?
rdfservice.add_triple(s, RDF::URI.new(@@dcterms + "creator"), RDF::URI.new(author))
end
if !title.empty?
rdfservice.add_triple(s, RDF::URI.new(@@dcterms + "title"), RDF::Literal.new(title[:string], :language => "#{title[:language]}"))
end
if !date.empty?
rdfservice.add_triple(s, RDF::URI.new(@@dcterms + "date"), RDF::Literal.new(date[:string], :datatype => date[:datatype]))
end
return rdfservice.get_model
end
end
Remove unused dependency
require 'uri'
require 'rdf'
class RESTService
@@base = "http://deichman.no/work/"
@@dcterms = "http://purl.org/dc/terms/"
@@deichman = "http://deichman.no/vocab/"
@@type = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"
def process_work (data)
id = data[:id]
author = data[:author]
title = data[:title]
date = data[:date]
s = RDF::URI.new(@@base + id)
rdfservice = RDFService.new
rdfservice.add_triple(s, RDF::URI.new(@@type), RDF::URI.new(@@base + "Work"))
if !author.empty?
rdfservice.add_triple(s, RDF::URI.new(@@dcterms + "creator"), RDF::URI.new(author))
end
if !title.empty?
rdfservice.add_triple(s, RDF::URI.new(@@dcterms + "title"), RDF::Literal.new(title[:string], :language => "#{title[:language]}"))
end
if !date.empty?
rdfservice.add_triple(s, RDF::URI.new(@@dcterms + "date"), RDF::Literal.new(date[:string], :datatype => date[:datatype]))
end
return rdfservice.get_model
end
end |
# rubocop:disable Naming/FileName
# -------------------------------------------------------------------------- #
# Copyright 2002-2022, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
require 'strategy'
require 'ActionManager'
require 'ServiceWatchDog'
require 'ServiceAutoScaler'
# Service Life Cycle Manager
class ServiceLCM
attr_writer :event_manager
attr_reader :am
LOG_COMP = 'LCM'
ACTIONS = [
# Callbacks
:deploy_cb,
:deploy_failure_cb,
:undeploy_cb,
:undeploy_failure_cb,
:cooldown_cb,
:scaleup_cb,
:scaleup_failure_cb,
:scaledown_cb,
:scaledown_failure_cb,
:add_cb,
:add_failure_cb,
:remove_cb,
:remove_failure_cb,
# Network callbacks
:deploy_nets_cb,
:deploy_nets_failure_cb,
:undeploy_nets_cb,
:undeploy_nets_failure_cb,
# WD callbacks
:error_wd_cb,
:done_wd_cb,
:running_wd_cb,
# Hold/Release callbacks
:hold_cb,
:release_cb
]
def initialize(client, concurrency, cloud_auth, retries)
@cloud_auth = cloud_auth
@am = ActionManager.new(concurrency, true)
@srv_pool = ServicePool.new(@cloud_auth, nil)
@retries = retries
em_conf = {
:cloud_auth => @cloud_auth,
:lcm => @am
}
@event_manager = EventManager.new(em_conf).am
@wd = ServiceWD.new(em_conf)
# Register Action Manager actions
ACTIONS.each do |m|
@am.register_action(m, method(m.to_s))
end
Thread.new { @am.start_listener }
Thread.new { catch_up(client) }
Thread.new { @wd.start(@srv_pool) }
Thread.new do
ServiceAutoScaler.new(
@srv_pool,
@cloud_auth,
self
).start
end
end
# Change service ownership
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param u_id [Integer] User ID
# @param g_id [Integer] Group ID
#
# @return [OpenNebula::Error] Error if any
def chown_action(client, service_id, u_id, g_id)
rc = @srv_pool.get(service_id, client) do |service|
service.chown(u_id, g_id)
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Change service permissions
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param octet [Integer] Permissions in octet format
#
# @return [OpenNebula::Error] Error if any
def chmod_action(client, service_id, octet)
rc = @srv_pool.get(service_id, client) do |service|
service.chmod_octet(octet)
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Change service name
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param new_name [String] New service name
#
# @return [OpenNebula::Error] Error if any
def rename_action(client, service_id, new_name)
rc = @srv_pool.get(service_id, client) do |service|
service.rename(new_name)
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Add shced action to service
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param action [String] Action to perform
# @param period [Integer] When to execute the action
# @param number [Integer] How many VMs per period
# @param args [String] Action arguments
#
# @return [OpenNebula::Error] Error if any
# rubocop:disable Metrics/ParameterLists
def service_sched_action(client, service_id, action, period, number, args)
# rubocop:enable Metrics/ParameterLists
rc = @srv_pool.get(service_id, client) do |service|
service.roles.each do |_, role|
role.batch_action(action, period, number, args)
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Add shced action to service role
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param role_name [String] Role to add action
# @param action [String] Action to perform
# @param period [Integer] When to execute the action
# @param number [Integer] How many VMs per period
# @param args [String] Action arguments
#
# @return [OpenNebula::Error] Error if any
# rubocop:disable Metrics/ParameterLists
def sched_action(client,
service_id,
role_name,
action,
period,
number,
args)
# rubocop:enable Metrics/ParameterLists
rc = @srv_pool.get(service_id, client) do |service|
role = service.roles[role_name]
if role.nil?
break OpenNebula::Error.new("Role '#{role_name}' not found")
end
role.batch_action(action, period, number, args)
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Release a service on hold state
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
#
# @return [OpenNebula::Error] Error if any
def release_action(client, service_id)
rc = @srv_pool.get(service_id, client) do |service|
# Get roles that can be release
set_deploy_strategy(service)
roles = service.roles_release
if roles.empty?
break OpenNebula::Error.new('Service has no roles in HOLD')
end
rc = release_roles(client,
roles,
'DEPLOYING',
'FAILED_DEPLOYING',
:wait_release_action,
service.report_ready?)
if !OpenNebula.is_error?(rc)
service.set_state(Service::STATE['DEPLOYING'])
else
service.set_state(Service::STATE['FAILED_DEPLOYING'])
end
service.update
rc
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
############################################################################
# Life cycle manager actions
############################################################################
# Deploy service networks
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
#
# @return [OpenNebula::Error] Error if any
def deploy_nets_action(client, service_id)
rc = @srv_pool.get(service_id, client) do |service|
# Create vnets only first time action is called
if service.state == Service::STATE['PENDING']
rc = service.deploy_networks
if OpenNebula.is_error?(rc)
service.set_state(Service::STATE['FAILED_DEPLOYING'])
service.update
break rc
end
end
service.set_state(Service::STATE['DEPLOYING_NETS'])
@event_manager.trigger_action(
:wait_deploy_nets_action,
service.id,
client,
service.id,
service.networks(true)
)
service.update
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Undeploy service networks
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
#
# @return [OpenNebula::Error] Error if any
def undeploy_nets_action(client, service_id)
rc = @srv_pool.get(service_id, client) do |service|
service.set_state(Service::STATE['UNDEPLOYING_NETS'])
@event_manager.trigger_action(
:wait_undeploy_nets_action,
service.id,
client,
service.id,
service.networks(false)
)
service.update
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Create new service
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
#
# @return [OpenNebula::Error] Error if any
def deploy_action(client, service_id)
rc = @srv_pool.get(service_id, client) do |service|
set_deploy_strategy(service)
roles = service.roles_deploy
# Maybe roles.empty? because are being deploying in other threads
if roles.empty?
if service.all_roles_running?
service.set_state(Service::STATE['RUNNING'])
service.update
@wd.add_service(service)
end
# If there is no node in PENDING the service is not modified.
break
end
rc = deploy_roles(client,
roles,
'DEPLOYING',
'FAILED_DEPLOYING',
:wait_deploy_action,
service.report_ready?)
if !OpenNebula.is_error?(rc) & service.on_hold?
service.set_state(Service::STATE['HOLD'])
elsif !OpenNebula.is_error?(rc) & !service.on_hold?
service.set_state(Service::STATE['DEPLOYING'])
else
service.set_state(Service::STATE['FAILED_DEPLOYING'])
end
service.update
rc
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Delete service
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param delete [Boolean] Force flow delete
#
# @return [OpenNebula::Error] Error if any
def undeploy_action(client, service_id, delete = false)
rc = @srv_pool.get(service_id, client) do |service|
if !service.can_undeploy? && !delete
break OpenNebula::Error.new(
'Service cannot be undeployed in state: ' \
"#{service.state_str}"
)
end
@wd.remove_service(service_id)
set_deploy_strategy(service)
roles = service.roles_shutdown
# If shutdown roles is empty, asume the service is in DONE and exit
if roles.empty?
service.delete
break
end
rc = undeploy_roles(client,
roles,
'UNDEPLOYING',
'FAILED_UNDEPLOYING',
:wait_undeploy_action)
if !OpenNebula.is_error?(rc)
service.set_state(Service::STATE['UNDEPLOYING'])
else
service.set_state(Service::STATE['FAILED_UNDEPLOYING'])
end
service.update
rc
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Scale service
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param role_name [String] Role to scale
# @param cardinality [Integer] Number of VMs to scale
# @param force [Boolean] True to force scaling
#
# @return [OpenNebula::Error] Error if any
def scale_action(client, service_id, role_name, cardinality, force)
rc = @srv_pool.get(service_id, client) do |service|
unless service.can_scale?
break OpenNebula::Error.new(
"Service cannot be scaled in state: #{service.state_str}"
)
end
@wd.remove_service(service_id)
role = service.roles[role_name]
if role.nil?
break OpenNebula::Error.new("Role #{role_name} not found")
end
rc = nil
cardinality_diff = cardinality - role.cardinality
rc = set_cardinality(role, cardinality, force)
if OpenNebula.is_error?(rc)
@wd.add_service(service)
break OpenNebula::Error.new(
"Can't scale service #{service_id}: #{rc.message}"
)
end
if cardinality_diff > 0
# change client to have right ownership
client = @cloud_auth.client("#{service.uname}:#{service.gid}")
service.replace_client(client)
role.scale_way('UP')
rc = deploy_roles(client,
{ role_name => role },
'SCALING',
'FAILED_SCALING',
:wait_scaleup_action,
service.report_ready?)
elsif cardinality_diff < 0
role.scale_way('DOWN')
rc = undeploy_roles(client,
{ role_name => role },
'SCALING',
'FAILED_SCALING',
:wait_scaledown_action)
else
break OpenNebula::Error.new(
"Cardinality of #{role_name} is already at #{cardinality}"
)
end
if !OpenNebula.is_error?(rc)
service.set_state(Service::STATE['SCALING'])
else
service.set_state(Service::STATE['FAILED_SCALING'])
end
service.update
rc
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Recover service
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param delete [Boolean] True to recover delete a service
#
# @return [OpenNebula::Error] Error if any
def recover_action(client, service_id, delete = false)
@event_manager.cancel_action(service_id.to_i)
return undeploy_action(client, service_id, true) if delete
rc = @srv_pool.get(service_id, client) do |service|
if service.can_recover_deploy?
recover_deploy(client, service)
elsif service.can_recover_undeploy?
recover_undeploy(client, service)
elsif service.can_recover_scale?
# change client to have right ownership
client = @cloud_auth.client("#{service.uname}:#{service.gid}")
service.replace_client(client)
recover_scale(client, service)
elsif service.can_recover_deploy_nets?
recover_nets(:wait_deploy_nets_action, client, service)
elsif service.can_recover_undeploy_nets?
recover_nets(:wait_undeploy_nets_action, client, service)
elsif Service::STATE['COOLDOWN'] == service.state
service.set_state(Service::STATE['RUNNING'])
service.roles.each do |_, role|
role.set_state(Role::STATE['RUNNING'])
end
else
break OpenNebula::Error.new(
'Service cannot be recovered in state: ' \
"#{service.state_str}"
)
end
service.update
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Update service template
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param new_tempalte [String] New template
# @param append [Boolean] True to append template
def service_update(client, service_id, new_template, append)
rc = @srv_pool.get(service_id, client) do |service|
unless service.can_update?
break OpenNebula::Error.new(
"Service cannot be updated in state: #{service.state_str}"
)
end
rc = service.check_new_template(new_template, append)
unless rc[0]
if rc[1] == 'name'
break OpenNebula::Error.new(
'To change `service/name` use rename operation'
)
else
break OpenNebula::Error.new(
"Immutable value: `#{rc[1]}` can not be changed"
)
end
end
service.update(new_template, append)
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Update role elasticity/schedule policies
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param role_name [String] Role to update
# @param policies [Hash] New policies values
# @param cooldown [Integer] New cooldown time
#
# @return [OpenNebula::Error] Error if any
def update_role_policies(client, service_id, role_name, policies, cooldown)
rc = @srv_pool.get(service_id, client) do |service|
role = service.roles[role_name]
elasticity_policies = policies['elasticity_policies']
scheduled_policies = policies['scheduled_policies']
if elasticity_policies && !elasticity_policies.empty?
role.update_elasticity_policies(elasticity_policies)
end
if scheduled_policies && !scheduled_policies.empty?
role.update_scheduled_policies(scheduled_policies)
end
role.update_cooldown(cooldown)
service.update
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Add role from running service
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param role [Hash] Role information
def add_role_action(client, service_id, role)
rc = @srv_pool.get(service_id, client) do |service|
unless service.running?
break OpenNebula::Error.new(
"Cannot modify roles in state: #{service.state_str}"
)
end
role = service.add_role(role)
break role if OpenNebula.is_error?(role)
service.update
rc = service.deploy_networks(false)
if OpenNebula.is_error?(rc)
service.set_state(Service::STATE['FAILED_DEPLOYING'])
service.update
break rc
end
service.update
add_role(client, service, role)
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Remove role from running service
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param role [Hash] Role information
def remove_role_action(client, service_id, role)
rc = @srv_pool.get(service_id, client) do |service|
unless service.running?
break OpenNebula::Error.new(
"Cannot modify roles in state: #{service.state_str}"
)
end
unless service.roles[role]
break OpenNebula::Error.new("Role #{role} does not exist")
end
remove_role(client, service, service.roles[role])
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
private
# Retry on authentication error
#
# @param client [OpenNebula::Client] Client to perform operation
# @param &block Code block to execute
def retry_op(client, &block)
finished = false
retries = 0
rc = nil
until finished
rc = block.call(client)
if OpenNebula.is_error?(rc) && rc.errno != 256
# There is an error different from authentication
finished = true
elsif !OpenNebula.is_error?(rc) || retries > @retries
# There is no error or the retries limit has been reached
finished = true
else
# Error is 256, reset the client to renew the token
client = nil
retries += 1
end
end
rc
end
############################################################################
# Callbacks
############################################################################
def deploy_cb(client, service_id, role_name, nodes)
undeploy = false
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.roles[role_name].set_state(Role::STATE['RUNNING'])
service.roles[role_name].nodes.delete_if do |node|
if nodes[node] && service.roles[role_name].cardinalitty > 0
service.roles[role_name].cardinality -= 1
end
nodes[node]
end
# If the role has 0 nodes, deleteƒ role
undeploy = service.check_role(service.roles[role_name])
if service.all_roles_running?
service.set_state(Service::STATE['RUNNING'])
elsif service.strategy == 'straight'
set_deploy_strategy(service)
deploy_roles(c,
service.roles_deploy,
'DEPLOYING',
'FAILED_DEPLOYING',
:wait_deploy_action,
service.report_ready?)
end
rc = service.update
return rc if OpenNebula.is_error?(rc)
@wd.add_service(service) if service.all_roles_running?
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
return unless undeploy
Log.info LOG_COMP, "Automatically deleting service #{service_id}"
undeploy_action(client, service_id)
end
def deploy_failure_cb(client, service_id, role_name)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
service.set_state(Service::STATE['FAILED_DEPLOYING'])
service.roles[role_name].set_state(
Role::STATE['FAILED_DEPLOYING']
)
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def deploy_nets_cb(client, service_id)
deploy_action(client, service_id)
end
def deploy_nets_failure_cb(client, service_id)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
service.set_state(Service::STATE['FAILED_DEPLOYING_NETS'])
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def undeploy_cb(client, service_id, role_name, nodes)
undeploy_nets = false
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.roles[role_name].set_state(Role::STATE['DONE'])
service.roles[role_name].nodes.delete_if do |node|
!nodes[:failure].include?(node['deploy_id']) &&
nodes[:successful].include?(node['deploy_id'])
end
if service.all_roles_done?
rc = service.delete_networks
if rc && !rc.empty?
Log.info LOG_COMP, 'Error trying to delete '\
"Virtual Networks #{rc}"
end
undeploy_nets = true
break
elsif service.strategy == 'straight'
set_deploy_strategy(service)
undeploy_roles(c,
service.roles_shutdown,
'UNDEPLOYING',
'FAILED_UNDEPLOYING',
:wait_undeploy_action)
end
service.update
end
end
undeploy_nets_action(client, service_id) if undeploy_nets
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def undeploy_nets_cb(client, service_id)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.delete
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def undeploy_nets_failure_cb(client, service_id)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
service.set_state(Service::STATE['FAILED_UNDEPLOYING_NETS'])
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def undeploy_failure_cb(client, service_id, role_name, nodes)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
service.set_state(Service::STATE['FAILED_UNDEPLOYING'])
service.roles[role_name].set_state(
Role::STATE['FAILED_UNDEPLOYING']
)
service.roles[role_name].nodes.delete_if do |node|
!nodes[:failure].include?(node['deploy_id']) &&
nodes[:successful].include?(node['deploy_id'])
end
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def scaleup_cb(client, service_id, role_name, nodes)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.roles[role_name].nodes.delete_if do |node|
if nodes[node] && service.roles[role_name].cardinalitty > 0
service.roles[role_name].cardinality -= 1
end
nodes[node]
end
service.set_state(Service::STATE['COOLDOWN'])
service.roles[role_name].set_state(Role::STATE['COOLDOWN'])
@event_manager.trigger_action(
:wait_cooldown_action,
service.id,
c,
service.id,
role_name,
service.roles[role_name].cooldown
)
service.roles[role_name].clean_scale_way
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def scaledown_cb(client, service_id, role_name, nodes)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.set_state(Service::STATE['COOLDOWN'])
service.roles[role_name].set_state(Role::STATE['COOLDOWN'])
service.roles[role_name].nodes.delete_if do |node|
!nodes[:failure].include?(node['deploy_id']) &&
nodes[:successful].include?(node['deploy_id'])
end
@event_manager.trigger_action(
:wait_cooldown_action,
service.id,
c,
service.id,
role_name,
service.roles[role_name].cooldown
)
service.roles[role_name].clean_scale_way
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def scaleup_failure_cb(client, service_id, role_name)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
service.set_state(Service::STATE['FAILED_SCALING'])
service.roles[role_name].set_state(
Role::STATE['FAILED_SCALING']
)
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def scaledown_failure_cb(client, service_id, role_name, nodes)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
role = service.roles[role_name]
service.set_state(Service::STATE['FAILED_SCALING'])
role.set_state(Role::STATE['FAILED_SCALING'])
role.nodes.delete_if do |node|
!nodes[:failure].include?(node['deploy_id']) &&
nodes[:successful].include?(node['deploy_id'])
end
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def cooldown_cb(client, service_id, role_name)
undeploy = false
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.set_state(Service::STATE['RUNNING'])
service.roles[role_name].set_state(Role::STATE['RUNNING'])
service.update
# If the role has 0 nodes, delete role
undeploy = service.check_role(service.roles[role_name])
@wd.add_service(service)
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
return unless undeploy
Log.info LOG_COMP, "Automatically deleting service #{service_id}"
undeploy_action(client, service_id)
end
def add_cb(client, service_id, role_name, _)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.roles[role_name].set_state(Role::STATE['RUNNING'])
service.set_state(Service::STATE['RUNNING'])
rc = service.update
return rc if OpenNebula.is_error?(rc)
@wd.add_service(service) if service.all_roles_running?
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def add_failure_cb(client, service_id, role_name)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
service.set_state(Service::STATE['FAILED_DEPLOYING'])
service.roles[role_name].set_state(Role::STATE['FAILED_DEPLOYING'])
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def remove_cb(client, service_id, role_name, _)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.remove_role(role_name)
if service.all_roles_done?
rc = service.delete_networks
if rc && !rc.empty?
Log.info LOG_COMP, 'Error trying to delete '\
"Virtual Networks #{rc}"
end
service.delete
else
service.set_state(Service::STATE['RUNNING'])
rc = service.update
return rc if OpenNebula.is_error?(rc)
@wd.add_service(service) if service.all_roles_running?
end
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def remove_failure_cb(client, service_id, role_name, nodes)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
service.set_state(Service::STATE['FAILED_UNDEPLOYING'])
service.roles[role_name].set_state(
Role::STATE['FAILED_UNDEPLOYING']
)
service.roles[role_name].nodes.delete_if do |node|
!nodes[:failure].include?(node['deploy_id']) &&
nodes[:successful].include?(node['deploy_id'])
end
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def hold_cb(client, service_id, role_name)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
if service.roles[role_name].state != Role::STATE['HOLD']
service.roles[role_name].set_state(Role::STATE['HOLD'])
end
if service.all_roles_hold? &&
service.state != Service::STATE['HOLD']
service.set_state(Service::STATE['HOLD'])
elsif service.strategy == 'straight'
set_deploy_strategy(service)
deploy_roles(
c,
service.roles_hold,
'DEPLOYING',
'FAILED_DEPLOYING',
:wait_deploy_action,
service.report_ready?
)
end
rc = service.update
return rc if OpenNebula.is_error?(rc)
end
end
Log.error 'WD', rc.message if OpenNebula.is_error?(rc)
end
def release_cb(client, service_id, role_name, nodes)
undeploy = false
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.roles[role_name].set_state(Role::STATE['RUNNING'])
service.roles[role_name].nodes.delete_if do |node|
if nodes[node] && service.roles[role_name].cardinalitty > 0
service.roles[role_name].cardinality -= 1
end
nodes[node]
end
# If the role has 0 nodes, delete role
undeploy = service.check_role(service.roles[role_name])
if service.all_roles_running?
service.set_state(Service::STATE['RUNNING'])
elsif service.strategy == 'straight'
set_deploy_strategy(service)
release_roles(
c,
service.roles_release,
'DEPLOYING',
'FAILED_DEPLOYING',
:wait_deploy_action,
service.report_ready?
)
end
rc = service.update
return rc if OpenNebula.is_error?(rc)
@wd.add_service(service) if service.all_roles_running?
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
return unless undeploy
Log.info LOG_COMP, "Automatically deleting service #{service_id}"
undeploy_action(client, service_id)
end
############################################################################
# WatchDog Callbacks
############################################################################
def error_wd_cb(client, service_id, role_name, _node)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
if service.state != Service::STATE['WARNING']
service.set_state(Service::STATE['WARNING'])
end
if service.roles[role_name].state != Role::STATE['WARNING']
service.roles[role_name].set_state(Role::STATE['WARNING'])
end
service.update
end
end
Log.error 'WD', rc.message if OpenNebula.is_error?(rc)
end
def done_wd_cb(client, service_id, role_name, node)
undeploy = false
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
role = service.roles[role_name]
next unless role
cardinality = role.cardinality - 1
next unless role.nodes.find {|n| n['deploy_id'] == node }
# just update if the cardinality is positive
set_cardinality(role, cardinality, true) if cardinality >= 0
role.nodes.delete_if {|n| n['deploy_id'] == node }
# If the role has 0 nodes, delete role
undeploy = service.check_role(role)
service.update
Log.info 'WD',
"Node #{node} is done, " \
"updating service #{service_id}:#{role_name} " \
"cardinality to #{cardinality}"
end
end
Log.error 'WD', rc.message if OpenNebula.is_error?(rc)
return unless undeploy
Log.info LOG_COMP, "Automatically deleting service #{service_id}"
undeploy_action(client, service_id)
end
def running_wd_cb(client, service_id, role_name, _node)
undeploy = false
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
role = service.roles[role_name]
if service.roles[role_name].state != Role::STATE['RUNNING']
service.roles[role_name].set_state(Role::STATE['RUNNING'])
end
if service.all_roles_running? &&
service.state != Service::STATE['RUNNING']
service.set_state(Service::STATE['RUNNING'])
end
# If the role has 0 nodes, delete role
undeploy = service.check_role(role)
service.update
end
end
Log.error 'WD', rc.message if OpenNebula.is_error?(rc)
return unless undeploy
Log.info LOG_COMP, "Automatically deleting service #{service_id}"
undeploy_action(client, service_id)
end
############################################################################
# Helpers
############################################################################
# Iterate through the services for catching up with the state of each servic
# used when the LCM starts
def catch_up(client)
Log.error LOG_COMP, 'Catching up...'
@srv_pool.info_all
@srv_pool.each do |service|
recover_action(client, service.id) if service.transient_state?
end
end
# Returns the deployment strategy for the given Service
# @param [Service] service the service
# rubocop:disable Naming/AccessorMethodName
def set_deploy_strategy(service)
# rubocop:enable Naming/AccessorMethodName
case service.strategy
when 'straight'
service.extend(Straight)
else
service.extend(Strategy)
end
end
# Returns true if the deployments of all roles was fine and
# update their state consequently
# @param [Array<Role>] roles to be deployed
# @param [Role::STATE] success_state new state of the role
# if deployed successfuly
# @param [Role::STATE] error_state new state of the role
# if deployed unsuccessfuly
# rubocop:disable Metrics/ParameterLists
def deploy_roles(client, roles, success_state, error_state, action, report)
# rubocop:enable Metrics/ParameterLists
roles.each do |name, role|
if role.state == Role::STATE['PENDING']
# Set all roles on hold if the on_hold option
# is set at service level
if role.service_on_hold?
role.hold(true)
elsif role.any_parent_on_hold?
role.hold(true)
end
end
rc = role.deploy
if !rc[0]
role.set_state(Role::STATE[error_state])
break OpenNebula::Error.new(
"Error deploying role #{name}: #{rc[1]}"
)
end
if role.on_hold? && role.state == Role::STATE['PENDING']
role.set_state(Role::STATE['HOLD'])
@event_manager.trigger_action(:wait_hold_action,
role.service.id,
client,
role.service.id,
role.name,
rc[0])
else
role.set_state(Role::STATE[success_state])
@event_manager.trigger_action(action,
role.service.id,
client,
role.service.id,
role.name,
rc[0],
report)
end
end
end
def undeploy_roles(client, roles, success_state, error_state, action)
roles.each do |name, role|
rc = role.shutdown(false)
if !rc[0]
role.set_state(Role::STATE[error_state])
break OpenNebula::Error.new(
"Error undeploying role #{name}: #{rc[1]}"
)
end
role.set_state(Role::STATE[success_state])
# TODO, take only subset of nodes which needs to
# be undeployed (new role.nodes_undeployed_ids ?)
@event_manager.trigger_action(action,
role.service.id,
client,
role.service.id,
role.name,
rc[0])
end
end
# rubocop:disable Metrics/ParameterLists
def release_roles(client, roles, success_state, error_state, action, report)
# rubocop:enable Metrics/ParameterLists
roles.each do |name, role|
rc = role.release
if !rc[1]
role.set_state(Role::STATE[error_state])
break OpenNebula::Error.new(
"Error releasing role #{name}: #{rc[1]}"
)
end
role.set_state(Role::STATE[success_state])
@event_manager.trigger_action(action,
role.service.id,
client,
role.service.id,
role.name,
rc[0],
report)
end
end
def set_cardinality(role, cardinality, force)
tmpl_json = "{ \"cardinality\" : #{cardinality},\n" \
" \"force\" : #{force} }"
rc = role.update(JSON.parse(tmpl_json))
return rc if OpenNebula.is_error?(rc)
nil
end
def recover_deploy(client, service)
service.roles.each do |name, role|
next unless role.can_recover_deploy?
nodes = role.recover_deploy(service.report_ready?)
@event_manager.trigger_action(:wait_deploy_action,
service.id,
client,
service.id,
name,
nodes,
service.report_ready?)
end
end
def recover_undeploy(client, service)
service.roles.each do |name, role|
next unless role.can_recover_undeploy?
nodes = role.recover_undeploy
@event_manager.trigger_action(:wait_undeploy_action,
service.id,
client,
service.id,
name,
nodes)
end
end
def recover_scale(client, service)
service.roles.each do |name, role|
next unless role.can_recover_scale?
nodes, up = role.recover_scale(service.report_ready?)
if up
@event_manager.trigger_action(:wait_scaleup_action,
service.id,
client,
service.id,
name,
nodes,
service.report_ready?)
else
@event_manager.trigger_action(:wait_scaledown_action,
service.id,
client,
service.id,
name,
nodes)
end
end
end
def recover_nets(action, client, service)
action == :wait_deploy_nets_action ? deploy = true : deploy = false
@event_manager.trigger_action(
action,
service.id,
client,
service.id,
service.networks(deploy)
)
end
def add_role(client, service, role)
@wd.remove_service(service.id)
set_deploy_strategy(service)
rc = deploy_roles(client,
{ role.name => role },
'DEPLOYING',
'FAILED_DEPLOYING',
:wait_add_action,
service.report_ready?)
if !OpenNebula.is_error?(rc)
service.set_state(Service::STATE['DEPLOYING'])
else
service.set_state(Service::STATE['FAILED_DEPLOYING'])
end
service.update
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
def remove_role(client, service, role)
@wd.remove_service(service.id)
set_deploy_strategy(service)
rc = undeploy_roles(client,
{ role.name => role },
'UNDEPLOYING',
'FAILED_UNDEPLOYING',
:wait_remove_action)
if !OpenNebula.is_error?(rc)
service.set_state(Service::STATE['UNDEPLOYING'])
else
service.set_state(Service::STATE['FAILED_UNDEPLOYING'])
end
service.update
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
end
# rubocop:enable Naming/FileName
L #-: fix linting typo (#2156)
# rubocop:disable Naming/FileName
# -------------------------------------------------------------------------- #
# Copyright 2002-2022, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
require 'strategy'
require 'ActionManager'
require 'ServiceWatchDog'
require 'ServiceAutoScaler'
# Service Life Cycle Manager
class ServiceLCM
attr_writer :event_manager
attr_reader :am
LOG_COMP = 'LCM'
ACTIONS = [
# Callbacks
:deploy_cb,
:deploy_failure_cb,
:undeploy_cb,
:undeploy_failure_cb,
:cooldown_cb,
:scaleup_cb,
:scaleup_failure_cb,
:scaledown_cb,
:scaledown_failure_cb,
:add_cb,
:add_failure_cb,
:remove_cb,
:remove_failure_cb,
# Network callbacks
:deploy_nets_cb,
:deploy_nets_failure_cb,
:undeploy_nets_cb,
:undeploy_nets_failure_cb,
# WD callbacks
:error_wd_cb,
:done_wd_cb,
:running_wd_cb,
# Hold/Release callbacks
:hold_cb,
:release_cb
]
def initialize(client, concurrency, cloud_auth, retries)
@cloud_auth = cloud_auth
@am = ActionManager.new(concurrency, true)
@srv_pool = ServicePool.new(@cloud_auth, nil)
@retries = retries
em_conf = {
:cloud_auth => @cloud_auth,
:lcm => @am
}
@event_manager = EventManager.new(em_conf).am
@wd = ServiceWD.new(em_conf)
# Register Action Manager actions
ACTIONS.each do |m|
@am.register_action(m, method(m.to_s))
end
Thread.new { @am.start_listener }
Thread.new { catch_up(client) }
Thread.new { @wd.start(@srv_pool) }
Thread.new do
ServiceAutoScaler.new(
@srv_pool,
@cloud_auth,
self
).start
end
end
# Change service ownership
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param u_id [Integer] User ID
# @param g_id [Integer] Group ID
#
# @return [OpenNebula::Error] Error if any
def chown_action(client, service_id, u_id, g_id)
rc = @srv_pool.get(service_id, client) do |service|
service.chown(u_id, g_id)
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Change service permissions
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param octet [Integer] Permissions in octet format
#
# @return [OpenNebula::Error] Error if any
def chmod_action(client, service_id, octet)
rc = @srv_pool.get(service_id, client) do |service|
service.chmod_octet(octet)
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Change service name
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param new_name [String] New service name
#
# @return [OpenNebula::Error] Error if any
def rename_action(client, service_id, new_name)
rc = @srv_pool.get(service_id, client) do |service|
service.rename(new_name)
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Add shced action to service
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param action [String] Action to perform
# @param period [Integer] When to execute the action
# @param number [Integer] How many VMs per period
# @param args [String] Action arguments
#
# @return [OpenNebula::Error] Error if any
# rubocop:disable Metrics/ParameterLists
def service_sched_action(client, service_id, action, period, number, args)
# rubocop:enable Metrics/ParameterLists
rc = @srv_pool.get(service_id, client) do |service|
service.roles.each do |_, role|
role.batch_action(action, period, number, args)
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Add shced action to service role
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param role_name [String] Role to add action
# @param action [String] Action to perform
# @param period [Integer] When to execute the action
# @param number [Integer] How many VMs per period
# @param args [String] Action arguments
#
# @return [OpenNebula::Error] Error if any
# rubocop:disable Metrics/ParameterLists
def sched_action(client,
service_id,
role_name,
action,
period,
number,
args)
# rubocop:enable Metrics/ParameterLists
rc = @srv_pool.get(service_id, client) do |service|
role = service.roles[role_name]
if role.nil?
break OpenNebula::Error.new("Role '#{role_name}' not found")
end
role.batch_action(action, period, number, args)
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Release a service on hold state
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
#
# @return [OpenNebula::Error] Error if any
def release_action(client, service_id)
rc = @srv_pool.get(service_id, client) do |service|
# Get roles that can be release
set_deploy_strategy(service)
roles = service.roles_release
if roles.empty?
break OpenNebula::Error.new('Service has no roles in HOLD')
end
rc = release_roles(client,
roles,
'DEPLOYING',
'FAILED_DEPLOYING',
:wait_release_action,
service.report_ready?)
if !OpenNebula.is_error?(rc)
service.set_state(Service::STATE['DEPLOYING'])
else
service.set_state(Service::STATE['FAILED_DEPLOYING'])
end
service.update
rc
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
############################################################################
# Life cycle manager actions
############################################################################
# Deploy service networks
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
#
# @return [OpenNebula::Error] Error if any
def deploy_nets_action(client, service_id)
rc = @srv_pool.get(service_id, client) do |service|
# Create vnets only first time action is called
if service.state == Service::STATE['PENDING']
rc = service.deploy_networks
if OpenNebula.is_error?(rc)
service.set_state(Service::STATE['FAILED_DEPLOYING'])
service.update
break rc
end
end
service.set_state(Service::STATE['DEPLOYING_NETS'])
@event_manager.trigger_action(
:wait_deploy_nets_action,
service.id,
client,
service.id,
service.networks(true)
)
service.update
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Undeploy service networks
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
#
# @return [OpenNebula::Error] Error if any
def undeploy_nets_action(client, service_id)
rc = @srv_pool.get(service_id, client) do |service|
service.set_state(Service::STATE['UNDEPLOYING_NETS'])
@event_manager.trigger_action(
:wait_undeploy_nets_action,
service.id,
client,
service.id,
service.networks(false)
)
service.update
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Create new service
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
#
# @return [OpenNebula::Error] Error if any
def deploy_action(client, service_id)
rc = @srv_pool.get(service_id, client) do |service|
set_deploy_strategy(service)
roles = service.roles_deploy
# Maybe roles.empty? because are being deploying in other threads
if roles.empty?
if service.all_roles_running?
service.set_state(Service::STATE['RUNNING'])
service.update
@wd.add_service(service)
end
# If there is no node in PENDING the service is not modified.
break
end
rc = deploy_roles(client,
roles,
'DEPLOYING',
'FAILED_DEPLOYING',
:wait_deploy_action,
service.report_ready?)
if !OpenNebula.is_error?(rc) & service.on_hold?
service.set_state(Service::STATE['HOLD'])
elsif !OpenNebula.is_error?(rc) & !service.on_hold?
service.set_state(Service::STATE['DEPLOYING'])
else
service.set_state(Service::STATE['FAILED_DEPLOYING'])
end
service.update
rc
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Delete service
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param delete [Boolean] Force flow delete
#
# @return [OpenNebula::Error] Error if any
def undeploy_action(client, service_id, delete = false)
rc = @srv_pool.get(service_id, client) do |service|
if !service.can_undeploy? && !delete
break OpenNebula::Error.new(
'Service cannot be undeployed in state: ' \
"#{service.state_str}"
)
end
@wd.remove_service(service_id)
set_deploy_strategy(service)
roles = service.roles_shutdown
# If shutdown roles is empty, asume the service is in DONE and exit
if roles.empty?
service.delete
break
end
rc = undeploy_roles(client,
roles,
'UNDEPLOYING',
'FAILED_UNDEPLOYING',
:wait_undeploy_action)
if !OpenNebula.is_error?(rc)
service.set_state(Service::STATE['UNDEPLOYING'])
else
service.set_state(Service::STATE['FAILED_UNDEPLOYING'])
end
service.update
rc
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Scale service
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param role_name [String] Role to scale
# @param cardinality [Integer] Number of VMs to scale
# @param force [Boolean] True to force scaling
#
# @return [OpenNebula::Error] Error if any
def scale_action(client, service_id, role_name, cardinality, force)
rc = @srv_pool.get(service_id, client) do |service|
unless service.can_scale?
break OpenNebula::Error.new(
"Service cannot be scaled in state: #{service.state_str}"
)
end
@wd.remove_service(service_id)
role = service.roles[role_name]
if role.nil?
break OpenNebula::Error.new("Role #{role_name} not found")
end
rc = nil
cardinality_diff = cardinality - role.cardinality
rc = set_cardinality(role, cardinality, force)
if OpenNebula.is_error?(rc)
@wd.add_service(service)
break OpenNebula::Error.new(
"Can't scale service #{service_id}: #{rc.message}"
)
end
if cardinality_diff > 0
# change client to have right ownership
client = @cloud_auth.client("#{service.uname}:#{service.gid}")
service.replace_client(client)
role.scale_way('UP')
rc = deploy_roles(client,
{ role_name => role },
'SCALING',
'FAILED_SCALING',
:wait_scaleup_action,
service.report_ready?)
elsif cardinality_diff < 0
role.scale_way('DOWN')
rc = undeploy_roles(client,
{ role_name => role },
'SCALING',
'FAILED_SCALING',
:wait_scaledown_action)
else
break OpenNebula::Error.new(
"Cardinality of #{role_name} is already at #{cardinality}"
)
end
if !OpenNebula.is_error?(rc)
service.set_state(Service::STATE['SCALING'])
else
service.set_state(Service::STATE['FAILED_SCALING'])
end
service.update
rc
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Recover service
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param delete [Boolean] True to recover delete a service
#
# @return [OpenNebula::Error] Error if any
def recover_action(client, service_id, delete = false)
@event_manager.cancel_action(service_id.to_i)
return undeploy_action(client, service_id, true) if delete
rc = @srv_pool.get(service_id, client) do |service|
if service.can_recover_deploy?
recover_deploy(client, service)
elsif service.can_recover_undeploy?
recover_undeploy(client, service)
elsif service.can_recover_scale?
# change client to have right ownership
client = @cloud_auth.client("#{service.uname}:#{service.gid}")
service.replace_client(client)
recover_scale(client, service)
elsif service.can_recover_deploy_nets?
recover_nets(:wait_deploy_nets_action, client, service)
elsif service.can_recover_undeploy_nets?
recover_nets(:wait_undeploy_nets_action, client, service)
elsif Service::STATE['COOLDOWN'] == service.state
service.set_state(Service::STATE['RUNNING'])
service.roles.each do |_, role|
role.set_state(Role::STATE['RUNNING'])
end
else
break OpenNebula::Error.new(
'Service cannot be recovered in state: ' \
"#{service.state_str}"
)
end
service.update
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Update service template
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param new_tempalte [String] New template
# @param append [Boolean] True to append template
def service_update(client, service_id, new_template, append)
rc = @srv_pool.get(service_id, client) do |service|
unless service.can_update?
break OpenNebula::Error.new(
"Service cannot be updated in state: #{service.state_str}"
)
end
rc = service.check_new_template(new_template, append)
unless rc[0]
if rc[1] == 'name'
break OpenNebula::Error.new(
'To change `service/name` use rename operation'
)
else
break OpenNebula::Error.new(
"Immutable value: `#{rc[1]}` can not be changed"
)
end
end
service.update(new_template, append)
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Update role elasticity/schedule policies
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param role_name [String] Role to update
# @param policies [Hash] New policies values
# @param cooldown [Integer] New cooldown time
#
# @return [OpenNebula::Error] Error if any
def update_role_policies(client, service_id, role_name, policies, cooldown)
rc = @srv_pool.get(service_id, client) do |service|
role = service.roles[role_name]
elasticity_policies = policies['elasticity_policies']
scheduled_policies = policies['scheduled_policies']
if elasticity_policies && !elasticity_policies.empty?
role.update_elasticity_policies(elasticity_policies)
end
if scheduled_policies && !scheduled_policies.empty?
role.update_scheduled_policies(scheduled_policies)
end
role.update_cooldown(cooldown)
service.update
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Add role from running service
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param role [Hash] Role information
def add_role_action(client, service_id, role)
rc = @srv_pool.get(service_id, client) do |service|
unless service.running?
break OpenNebula::Error.new(
"Cannot modify roles in state: #{service.state_str}"
)
end
role = service.add_role(role)
break role if OpenNebula.is_error?(role)
service.update
rc = service.deploy_networks(false)
if OpenNebula.is_error?(rc)
service.set_state(Service::STATE['FAILED_DEPLOYING'])
service.update
break rc
end
service.update
add_role(client, service, role)
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
# Remove role from running service
#
# @param client [OpenNebula::Client] Client executing action
# @param service_id [Integer] Service ID
# @param role [Hash] Role information
def remove_role_action(client, service_id, role)
rc = @srv_pool.get(service_id, client) do |service|
unless service.running?
break OpenNebula::Error.new(
"Cannot modify roles in state: #{service.state_str}"
)
end
unless service.roles[role]
break OpenNebula::Error.new("Role #{role} does not exist")
end
remove_role(client, service, service.roles[role])
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
private
# Retry on authentication error
#
# @param client [OpenNebula::Client] Client to perform operation
# @param &block Code block to execute
def retry_op(client, &block)
finished = false
retries = 0
rc = nil
until finished
rc = block.call(client)
if OpenNebula.is_error?(rc) && rc.errno != 256
# There is an error different from authentication
finished = true
elsif !OpenNebula.is_error?(rc) || retries > @retries
# There is no error or the retries limit has been reached
finished = true
else
# Error is 256, reset the client to renew the token
client = nil
retries += 1
end
end
rc
end
############################################################################
# Callbacks
############################################################################
def deploy_cb(client, service_id, role_name, nodes)
undeploy = false
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.roles[role_name].set_state(Role::STATE['RUNNING'])
service.roles[role_name].nodes.delete_if do |node|
if nodes[node] && service.roles[role_name].cardinalitty > 0
service.roles[role_name].cardinality -= 1
end
nodes[node]
end
# If the role has 0 nodes, deleteƒ role
undeploy = service.check_role(service.roles[role_name])
if service.all_roles_running?
service.set_state(Service::STATE['RUNNING'])
elsif service.strategy == 'straight'
set_deploy_strategy(service)
deploy_roles(c,
service.roles_deploy,
'DEPLOYING',
'FAILED_DEPLOYING',
:wait_deploy_action,
service.report_ready?)
end
rc = service.update
return rc if OpenNebula.is_error?(rc)
@wd.add_service(service) if service.all_roles_running?
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
return unless undeploy
Log.info LOG_COMP, "Automatically deleting service #{service_id}"
undeploy_action(client, service_id)
end
def deploy_failure_cb(client, service_id, role_name)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
service.set_state(Service::STATE['FAILED_DEPLOYING'])
service.roles[role_name].set_state(
Role::STATE['FAILED_DEPLOYING']
)
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def deploy_nets_cb(client, service_id)
deploy_action(client, service_id)
end
def deploy_nets_failure_cb(client, service_id)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
service.set_state(Service::STATE['FAILED_DEPLOYING_NETS'])
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def undeploy_cb(client, service_id, role_name, nodes)
undeploy_nets = false
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.roles[role_name].set_state(Role::STATE['DONE'])
service.roles[role_name].nodes.delete_if do |node|
!nodes[:failure].include?(node['deploy_id']) &&
nodes[:successful].include?(node['deploy_id'])
end
if service.all_roles_done?
rc = service.delete_networks
if rc && !rc.empty?
Log.info LOG_COMP, 'Error trying to delete '\
"Virtual Networks #{rc}"
end
undeploy_nets = true
break
elsif service.strategy == 'straight'
set_deploy_strategy(service)
undeploy_roles(c,
service.roles_shutdown,
'UNDEPLOYING',
'FAILED_UNDEPLOYING',
:wait_undeploy_action)
end
service.update
end
end
undeploy_nets_action(client, service_id) if undeploy_nets
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def undeploy_nets_cb(client, service_id)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.delete
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def undeploy_nets_failure_cb(client, service_id)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
service.set_state(Service::STATE['FAILED_UNDEPLOYING_NETS'])
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def undeploy_failure_cb(client, service_id, role_name, nodes)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
service.set_state(Service::STATE['FAILED_UNDEPLOYING'])
service.roles[role_name].set_state(
Role::STATE['FAILED_UNDEPLOYING']
)
service.roles[role_name].nodes.delete_if do |node|
!nodes[:failure].include?(node['deploy_id']) &&
nodes[:successful].include?(node['deploy_id'])
end
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def scaleup_cb(client, service_id, role_name, nodes)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.roles[role_name].nodes.delete_if do |node|
if nodes[node] && service.roles[role_name].cardinalitty > 0
service.roles[role_name].cardinality -= 1
end
nodes[node]
end
service.set_state(Service::STATE['COOLDOWN'])
service.roles[role_name].set_state(Role::STATE['COOLDOWN'])
@event_manager.trigger_action(
:wait_cooldown_action,
service.id,
c,
service.id,
role_name,
service.roles[role_name].cooldown
)
service.roles[role_name].clean_scale_way
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def scaledown_cb(client, service_id, role_name, nodes)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.set_state(Service::STATE['COOLDOWN'])
service.roles[role_name].set_state(Role::STATE['COOLDOWN'])
service.roles[role_name].nodes.delete_if do |node|
!nodes[:failure].include?(node['deploy_id']) &&
nodes[:successful].include?(node['deploy_id'])
end
@event_manager.trigger_action(
:wait_cooldown_action,
service.id,
c,
service.id,
role_name,
service.roles[role_name].cooldown
)
service.roles[role_name].clean_scale_way
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def scaleup_failure_cb(client, service_id, role_name)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
service.set_state(Service::STATE['FAILED_SCALING'])
service.roles[role_name].set_state(
Role::STATE['FAILED_SCALING']
)
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def scaledown_failure_cb(client, service_id, role_name, nodes)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
role = service.roles[role_name]
service.set_state(Service::STATE['FAILED_SCALING'])
role.set_state(Role::STATE['FAILED_SCALING'])
role.nodes.delete_if do |node|
!nodes[:failure].include?(node['deploy_id']) &&
nodes[:successful].include?(node['deploy_id'])
end
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def cooldown_cb(client, service_id, role_name)
undeploy = false
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.set_state(Service::STATE['RUNNING'])
service.roles[role_name].set_state(Role::STATE['RUNNING'])
service.update
# If the role has 0 nodes, delete role
undeploy = service.check_role(service.roles[role_name])
@wd.add_service(service)
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
return unless undeploy
Log.info LOG_COMP, "Automatically deleting service #{service_id}"
undeploy_action(client, service_id)
end
def add_cb(client, service_id, role_name, _)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.roles[role_name].set_state(Role::STATE['RUNNING'])
service.set_state(Service::STATE['RUNNING'])
rc = service.update
return rc if OpenNebula.is_error?(rc)
@wd.add_service(service) if service.all_roles_running?
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def add_failure_cb(client, service_id, role_name)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
service.set_state(Service::STATE['FAILED_DEPLOYING'])
service.roles[role_name].set_state(
Role::STATE['FAILED_DEPLOYING']
)
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def remove_cb(client, service_id, role_name, _)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.remove_role(role_name)
if service.all_roles_done?
rc = service.delete_networks
if rc && !rc.empty?
Log.info LOG_COMP, 'Error trying to delete '\
"Virtual Networks #{rc}"
end
service.delete
else
service.set_state(Service::STATE['RUNNING'])
rc = service.update
return rc if OpenNebula.is_error?(rc)
@wd.add_service(service) if service.all_roles_running?
end
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def remove_failure_cb(client, service_id, role_name, nodes)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
# stop actions for the service if deploy fails
@event_manager.cancel_action(service_id)
service.set_state(Service::STATE['FAILED_UNDEPLOYING'])
service.roles[role_name].set_state(
Role::STATE['FAILED_UNDEPLOYING']
)
service.roles[role_name].nodes.delete_if do |node|
!nodes[:failure].include?(node['deploy_id']) &&
nodes[:successful].include?(node['deploy_id'])
end
service.update
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
end
def hold_cb(client, service_id, role_name)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
if service.roles[role_name].state != Role::STATE['HOLD']
service.roles[role_name].set_state(Role::STATE['HOLD'])
end
if service.all_roles_hold? &&
service.state != Service::STATE['HOLD']
service.set_state(Service::STATE['HOLD'])
elsif service.strategy == 'straight'
set_deploy_strategy(service)
deploy_roles(
c,
service.roles_hold,
'DEPLOYING',
'FAILED_DEPLOYING',
:wait_deploy_action,
service.report_ready?
)
end
rc = service.update
return rc if OpenNebula.is_error?(rc)
end
end
Log.error 'WD', rc.message if OpenNebula.is_error?(rc)
end
def release_cb(client, service_id, role_name, nodes)
undeploy = false
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
service.roles[role_name].set_state(Role::STATE['RUNNING'])
service.roles[role_name].nodes.delete_if do |node|
if nodes[node] && service.roles[role_name].cardinalitty > 0
service.roles[role_name].cardinality -= 1
end
nodes[node]
end
# If the role has 0 nodes, delete role
undeploy = service.check_role(service.roles[role_name])
if service.all_roles_running?
service.set_state(Service::STATE['RUNNING'])
elsif service.strategy == 'straight'
set_deploy_strategy(service)
release_roles(
c,
service.roles_release,
'DEPLOYING',
'FAILED_DEPLOYING',
:wait_deploy_action,
service.report_ready?
)
end
rc = service.update
return rc if OpenNebula.is_error?(rc)
@wd.add_service(service) if service.all_roles_running?
end
end
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
return unless undeploy
Log.info LOG_COMP, "Automatically deleting service #{service_id}"
undeploy_action(client, service_id)
end
############################################################################
# WatchDog Callbacks
############################################################################
def error_wd_cb(client, service_id, role_name, _node)
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
if service.state != Service::STATE['WARNING']
service.set_state(Service::STATE['WARNING'])
end
if service.roles[role_name].state != Role::STATE['WARNING']
service.roles[role_name].set_state(Role::STATE['WARNING'])
end
service.update
end
end
Log.error 'WD', rc.message if OpenNebula.is_error?(rc)
end
def done_wd_cb(client, service_id, role_name, node)
undeploy = false
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
role = service.roles[role_name]
next unless role
cardinality = role.cardinality - 1
next unless role.nodes.find {|n| n['deploy_id'] == node }
# just update if the cardinality is positive
set_cardinality(role, cardinality, true) if cardinality >= 0
role.nodes.delete_if {|n| n['deploy_id'] == node }
# If the role has 0 nodes, delete role
undeploy = service.check_role(role)
service.update
Log.info 'WD',
"Node #{node} is done, " \
"updating service #{service_id}:#{role_name} " \
"cardinality to #{cardinality}"
end
end
Log.error 'WD', rc.message if OpenNebula.is_error?(rc)
return unless undeploy
Log.info LOG_COMP, "Automatically deleting service #{service_id}"
undeploy_action(client, service_id)
end
def running_wd_cb(client, service_id, role_name, _node)
undeploy = false
rc = retry_op(client) do |c|
@srv_pool.get(service_id, c) do |service|
role = service.roles[role_name]
if service.roles[role_name].state != Role::STATE['RUNNING']
service.roles[role_name].set_state(Role::STATE['RUNNING'])
end
if service.all_roles_running? &&
service.state != Service::STATE['RUNNING']
service.set_state(Service::STATE['RUNNING'])
end
# If the role has 0 nodes, delete role
undeploy = service.check_role(role)
service.update
end
end
Log.error 'WD', rc.message if OpenNebula.is_error?(rc)
return unless undeploy
Log.info LOG_COMP, "Automatically deleting service #{service_id}"
undeploy_action(client, service_id)
end
############################################################################
# Helpers
############################################################################
# Iterate through the services for catching up with the state of each servic
# used when the LCM starts
def catch_up(client)
Log.error LOG_COMP, 'Catching up...'
@srv_pool.info_all
@srv_pool.each do |service|
recover_action(client, service.id) if service.transient_state?
end
end
# Returns the deployment strategy for the given Service
# @param [Service] service the service
# rubocop:disable Naming/AccessorMethodName
def set_deploy_strategy(service)
# rubocop:enable Naming/AccessorMethodName
case service.strategy
when 'straight'
service.extend(Straight)
else
service.extend(Strategy)
end
end
# Returns true if the deployments of all roles was fine and
# update their state consequently
# @param [Array<Role>] roles to be deployed
# @param [Role::STATE] success_state new state of the role
# if deployed successfuly
# @param [Role::STATE] error_state new state of the role
# if deployed unsuccessfuly
# rubocop:disable Metrics/ParameterLists
def deploy_roles(client, roles, success_state, error_state, action, report)
# rubocop:enable Metrics/ParameterLists
roles.each do |name, role|
if role.state == Role::STATE['PENDING']
# Set all roles on hold if the on_hold option
# is set at service level
if role.service_on_hold?
role.hold(true)
elsif role.any_parent_on_hold?
role.hold(true)
end
end
rc = role.deploy
if !rc[0]
role.set_state(Role::STATE[error_state])
break OpenNebula::Error.new(
"Error deploying role #{name}: #{rc[1]}"
)
end
if role.on_hold? && role.state == Role::STATE['PENDING']
role.set_state(Role::STATE['HOLD'])
@event_manager.trigger_action(:wait_hold_action,
role.service.id,
client,
role.service.id,
role.name,
rc[0])
else
role.set_state(Role::STATE[success_state])
@event_manager.trigger_action(action,
role.service.id,
client,
role.service.id,
role.name,
rc[0],
report)
end
end
end
def undeploy_roles(client, roles, success_state, error_state, action)
roles.each do |name, role|
rc = role.shutdown(false)
if !rc[0]
role.set_state(Role::STATE[error_state])
break OpenNebula::Error.new(
"Error undeploying role #{name}: #{rc[1]}"
)
end
role.set_state(Role::STATE[success_state])
# TODO, take only subset of nodes which needs to
# be undeployed (new role.nodes_undeployed_ids ?)
@event_manager.trigger_action(action,
role.service.id,
client,
role.service.id,
role.name,
rc[0])
end
end
# rubocop:disable Metrics/ParameterLists
def release_roles(client, roles, success_state, error_state, action, report)
# rubocop:enable Metrics/ParameterLists
roles.each do |name, role|
rc = role.release
if !rc[1]
role.set_state(Role::STATE[error_state])
break OpenNebula::Error.new(
"Error releasing role #{name}: #{rc[1]}"
)
end
role.set_state(Role::STATE[success_state])
@event_manager.trigger_action(action,
role.service.id,
client,
role.service.id,
role.name,
rc[0],
report)
end
end
def set_cardinality(role, cardinality, force)
tmpl_json = "{ \"cardinality\" : #{cardinality},\n" \
" \"force\" : #{force} }"
rc = role.update(JSON.parse(tmpl_json))
return rc if OpenNebula.is_error?(rc)
nil
end
def recover_deploy(client, service)
service.roles.each do |name, role|
next unless role.can_recover_deploy?
nodes = role.recover_deploy(service.report_ready?)
@event_manager.trigger_action(:wait_deploy_action,
service.id,
client,
service.id,
name,
nodes,
service.report_ready?)
end
end
def recover_undeploy(client, service)
service.roles.each do |name, role|
next unless role.can_recover_undeploy?
nodes = role.recover_undeploy
@event_manager.trigger_action(:wait_undeploy_action,
service.id,
client,
service.id,
name,
nodes)
end
end
def recover_scale(client, service)
service.roles.each do |name, role|
next unless role.can_recover_scale?
nodes, up = role.recover_scale(service.report_ready?)
if up
@event_manager.trigger_action(:wait_scaleup_action,
service.id,
client,
service.id,
name,
nodes,
service.report_ready?)
else
@event_manager.trigger_action(:wait_scaledown_action,
service.id,
client,
service.id,
name,
nodes)
end
end
end
def recover_nets(action, client, service)
action == :wait_deploy_nets_action ? deploy = true : deploy = false
@event_manager.trigger_action(
action,
service.id,
client,
service.id,
service.networks(deploy)
)
end
def add_role(client, service, role)
@wd.remove_service(service.id)
set_deploy_strategy(service)
rc = deploy_roles(client,
{ role.name => role },
'DEPLOYING',
'FAILED_DEPLOYING',
:wait_add_action,
service.report_ready?)
if !OpenNebula.is_error?(rc)
service.set_state(Service::STATE['DEPLOYING'])
else
service.set_state(Service::STATE['FAILED_DEPLOYING'])
end
service.update
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
def remove_role(client, service, role)
@wd.remove_service(service.id)
set_deploy_strategy(service)
rc = undeploy_roles(client,
{ role.name => role },
'UNDEPLOYING',
'FAILED_UNDEPLOYING',
:wait_remove_action)
if !OpenNebula.is_error?(rc)
service.set_state(Service::STATE['UNDEPLOYING'])
else
service.set_state(Service::STATE['FAILED_UNDEPLOYING'])
end
service.update
Log.error LOG_COMP, rc.message if OpenNebula.is_error?(rc)
rc
end
end
# rubocop:enable Naming/FileName
|
#
# Author:: Ian Meyer (<ianmmeyer@gmail.com>)
# Copyright:: Copyright (c) 2010 Ian Meyer
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/knife'
class Chef
class Knife
class Status < Knife
deps do
require 'highline'
require 'chef/search/query'
end
banner "knife status QUERY (options)"
option :run_list,
:short => "-r",
:long => "--run-list",
:description => "Show the run list"
option :sort_reverse,
:short => "-s",
:long => "--sort-reverse",
:description => "Sort the status list by last run time descending"
option :hide_healthy,
:short => "-H",
:long => "--hide-healthy",
:description => "Hide nodes that have run chef in the last hour"
def highline
@h ||= HighLine.new
end
def run
all_nodes = []
q = Chef::Search::Query.new
query = @name_args[0] || "*:*"
q.search(:node, query) do |node|
all_nodes << node
end
all_nodes.sort { |n1, n2|
if (config[:sort_reverse] || Chef::Config[:knife][:sort_status_reverse]
(n2["ohai_time"] or 0) <=> (n1["ohai_time"] or 0)
else
(n1["ohai_time"] or 0) <=> (n2["ohai_time"] or 0)
end
}.each do |node|
if node.has_key?("ec2")
fqdn = node['ec2']['public_hostname']
ipaddress = node['ec2']['public_ipv4']
else
fqdn = node['fqdn']
ipaddress = node['ipaddress']
end
hours, minutes, seconds = time_difference_in_hms(node["ohai_time"])
hours_text = "#{hours} hour#{hours == 1 ? ' ' : 's'}"
minutes_text = "#{minutes} minute#{minutes == 1 ? ' ' : 's'}"
run_list = ", #{node.run_list}." if config[:run_list]
if hours > 24
color = "RED"
text = hours_text
elsif hours >= 1
color = "YELLOW"
text = hours_text
else
color = "GREEN"
text = minutes_text
end
line_parts = Array.new
line_parts << "<%= color('#{text}', #{color}) %> ago" << node.name
line_parts << fqdn if fqdn
line_parts << ipaddress if ipaddress
line_parts << run_list if run_list
if node['platform']
platform = node['platform']
if node['platform_version']
platform << " #{node['platform_version']}"
end
line_parts << platform
end
highline.say(line_parts.join(', ') + '.') unless (config[:hide_healthy] && hours < 1)
end
end
# :nodoc:
# TODO: this is duplicated from StatusHelper in the Webui. dedup.
def time_difference_in_hms(unix_time)
now = Time.now.to_i
difference = now - unix_time.to_i
hours = (difference / 3600).to_i
difference = difference % 3600
minutes = (difference / 60).to_i
seconds = (difference % 60)
return [hours, minutes, seconds]
end
end
end
end
Fix missing parenthesis in knife status.
#
# Author:: Ian Meyer (<ianmmeyer@gmail.com>)
# Copyright:: Copyright (c) 2010 Ian Meyer
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/knife'
class Chef
class Knife
class Status < Knife
deps do
require 'highline'
require 'chef/search/query'
end
banner "knife status QUERY (options)"
option :run_list,
:short => "-r",
:long => "--run-list",
:description => "Show the run list"
option :sort_reverse,
:short => "-s",
:long => "--sort-reverse",
:description => "Sort the status list by last run time descending"
option :hide_healthy,
:short => "-H",
:long => "--hide-healthy",
:description => "Hide nodes that have run chef in the last hour"
def highline
@h ||= HighLine.new
end
def run
all_nodes = []
q = Chef::Search::Query.new
query = @name_args[0] || "*:*"
q.search(:node, query) do |node|
all_nodes << node
end
all_nodes.sort { |n1, n2|
if (config[:sort_reverse] || Chef::Config[:knife][:sort_status_reverse])
(n2["ohai_time"] or 0) <=> (n1["ohai_time"] or 0)
else
(n1["ohai_time"] or 0) <=> (n2["ohai_time"] or 0)
end
}.each do |node|
if node.has_key?("ec2")
fqdn = node['ec2']['public_hostname']
ipaddress = node['ec2']['public_ipv4']
else
fqdn = node['fqdn']
ipaddress = node['ipaddress']
end
hours, minutes, seconds = time_difference_in_hms(node["ohai_time"])
hours_text = "#{hours} hour#{hours == 1 ? ' ' : 's'}"
minutes_text = "#{minutes} minute#{minutes == 1 ? ' ' : 's'}"
run_list = ", #{node.run_list}." if config[:run_list]
if hours > 24
color = "RED"
text = hours_text
elsif hours >= 1
color = "YELLOW"
text = hours_text
else
color = "GREEN"
text = minutes_text
end
line_parts = Array.new
line_parts << "<%= color('#{text}', #{color}) %> ago" << node.name
line_parts << fqdn if fqdn
line_parts << ipaddress if ipaddress
line_parts << run_list if run_list
if node['platform']
platform = node['platform']
if node['platform_version']
platform << " #{node['platform_version']}"
end
line_parts << platform
end
highline.say(line_parts.join(', ') + '.') unless (config[:hide_healthy] && hours < 1)
end
end
# :nodoc:
# TODO: this is duplicated from StatusHelper in the Webui. dedup.
def time_difference_in_hms(unix_time)
now = Time.now.to_i
difference = now - unix_time.to_i
hours = (difference / 3600).to_i
difference = difference % 3600
minutes = (difference / 60).to_i
seconds = (difference % 60)
return [hours, minutes, seconds]
end
end
end
end
|
module HBW
class TasksController < BaseController
def index
if entity_identifier.present?
@tasks = widget.entity_task_list(current_user_identifier, entity_identifier, entity_class)
else
@tasks = widget.task_list(current_user_identifier, entity_class)
end
end
def edit
form = find_form(task_id, entity_class)
if form
form.fetch_fields
render json: form.as_json.merge(csrf_token: csrf_token).to_json
else
record_not_found
end
end
def submit
result = widget.submit(current_user.email, entity_class, task_id, form_data)
if result
head :no_content
else
render nothing: true, status: :bad_request
end
end
def lookup
form = find_form(task_id, entity_class)
field = form.field(params[:name])
variants = field.lookup_values(params[:q])
render json: variants.to_json
end
private
def find_form(task_id, entity_class)
widget.form(current_user_identifier, entity_class, task_id)
end
def task_id
params.require(:id)
end
def form_data
params.permit(:form_data)
end
end
end
HBW-47 pass form_data to controller (#55)
module HBW
class TasksController < BaseController
def index
if entity_identifier.present?
@tasks = widget.entity_task_list(current_user_identifier, entity_identifier, entity_class)
else
@tasks = widget.task_list(current_user_identifier, entity_class)
end
end
def edit
form = find_form(task_id, entity_class)
if form
form.fetch_fields
render json: form.as_json.merge(csrf_token: csrf_token).to_json
else
record_not_found
end
end
def submit
result = widget.submit(current_user.email, entity_class, task_id, form_data)
if result
head :no_content
else
render nothing: true, status: :bad_request
end
end
def lookup
form = find_form(task_id, entity_class)
field = form.field(params[:name])
variants = field.lookup_values(params[:q])
render json: variants.to_json
end
private
def find_form(task_id, entity_class)
widget.form(current_user_identifier, entity_class, task_id)
end
def task_id
params.require(:id)
end
def form_data
params[:form_data]
end
end
end
|
module HBW
class TasksController < BaseController
def index
if entity_identifier.present?
@tasks = widget.entity_task_list(current_user_identifier, entity_identifier)
else
@tasks = widget.task_list(current_user_identifier)
end
end
def edit
form = find_form(task_id)
if form
form.fetch_fields
render json: form.as_json.merge(csrf_token: csrf_token).to_json
else
record_not_found
end
end
def submit
result = widget.submit(current_user.email, task_id, form_data)
if result
head :no_content
else
render nothing: true, status: :bad_request
end
end
def lookup
form = find_form(task_id)
field = form.field(params[:name])
variants = field.lookup_values(params[:q])
render json: variants.to_json
end
private
def find_form(task_id)
widget.form(current_user_identifier, task_id)
end
def task_id
params.require(:id)
end
def form_data
params.require(:form_data)
end
end
end
HOMS-114 Make the form able to be submitted without any fields
module HBW
class TasksController < BaseController
def index
if entity_identifier.present?
@tasks = widget.entity_task_list(current_user_identifier, entity_identifier)
else
@tasks = widget.task_list(current_user_identifier)
end
end
def edit
form = find_form(task_id)
if form
form.fetch_fields
render json: form.as_json.merge(csrf_token: csrf_token).to_json
else
record_not_found
end
end
def submit
result = widget.submit(current_user.email, task_id, form_data)
if result
head :no_content
else
render nothing: true, status: :bad_request
end
end
def lookup
form = find_form(task_id)
field = form.field(params[:name])
variants = field.lookup_values(params[:q])
render json: variants.to_json
end
private
def find_form(task_id)
widget.form(current_user_identifier, task_id)
end
def task_id
params.require(:id)
end
def form_data
params.permit(:form_data)
end
end
end
|
#!/usr/bin/env ruby
# encoding: UTF-8
require 'test_helper'
module CommonCore
class LoaderTest < ActiveSupport::TestCase
def setup
Singleton.send(:__init__,Master) #Force reinitialization
@master = Master.instance
end
# math_standards
test "should load standards from xml" do
@master.load_elements_from_paths(DATA_PATH+'/Math.xml',DATA_PATH+'/ELA-Literacy.xml')
assert_equal 967, @master.standards.keys.length
@master.standards.each do |key,standard|
assert standard.is_a?(Standard), "#{standard} expected to be a Standard"
assert standard.valid?, "#{standard.error_message} - #{standard}"
end
end
test "should load standard components from xml" do
@master.load_elements_from_paths(DATA_PATH+'/Math.xml',DATA_PATH+'/ELA-Literacy.xml')
assert_equal 569, @master.components.keys.length
@master.components.each do |key,component|
assert component.is_a?(Component), "#{component} expected to be a Component"
assert component.valid?, "#{component.error_message} - #{component}"
end
end
# math_domains
test "should load a single math domain from xml" do
@master.load_elements_from_paths(DATA_PATH+'/Mathematics/Grade1/Domain/Math_Grade1_G.xml')
assert_equal 1, @master.domains.keys.length
@master.domains.each do |key,domain|
assert domain.is_a?(Domain), "#{domain} expected to be a Domain"
assert domain.valid?, "#{domain} - #{domain.error_message}"
end
end
# math_clusters
test "should load a single math cluster from xml" do
@master.load_elements_from_paths(DATA_PATH+'/Mathematics/Grade1/Domain/Clusters/Math_Grade1_G_1.xml')
assert_equal 1, @master.clusters.keys.length
@master.clusters.each do |key,cluster|
assert cluster.is_a?(Cluster), "#{cluster} expected to be a Cluster"
assert cluster.valid?, "#{cluster} - #{cluster.error_message}"
end
end
test "should load all xml files for grade 1 math" do
@master.load_elements_from_paths(DATA_PATH+'/Mathematics/Grade1/**/*.xml')
assert_equal 1, @master.subject_grades.keys.length
assert_equal 4, @master.domains.keys.length
assert_equal 11, @master.clusters.keys.length
@master.subject_grades.each do |key,subject_grade|
assert subject_grade.is_a?(SubjectGrade), "#{subject_grade} expected to be a SubjectGrade"
assert subject_grade.valid?, "#{subject_grade} - #{subject_grade.error_message}"
end
@master.domains.each do |key,domain|
assert domain.is_a?(Domain), "#{domain} expected to be a Domain"
assert domain.valid?, "#{domain} - #{domain.error_message}"
end
@master.clusters.each do |key,cluster|
assert cluster.is_a?(Cluster), "#{cluster} expected to be a Cluster"
assert cluster.valid?, "#{cluster} - #{cluster.error_message}"
end
end
test "should load all xml files for math" do
@master.load_elements_from_paths(DATA_PATH+'/Math.xml',DATA_PATH+'/Mathematics/**/*.xml')
assert_equal 393, @master.standards.keys.length
assert_equal 124, @master.components.keys.length
assert_equal 15, @master.subject_grades.keys.length
assert_equal 65, @master.domains.keys.length
assert_equal 148, @master.clusters.keys.length
@master.subject_grades.each do |key,subject_grade|
assert subject_grade.is_a?(SubjectGrade), "#{subject_grade} expected to be a SubjectGrade"
assert subject_grade.valid?, "#{subject_grade} - #{subject_grade.error_message}"
end
@master.domains.each do |key,domain|
assert domain.is_a?(Domain), "#{domain} expected to be a Domain"
assert domain.valid?, "#{domain} - #{domain.error_message}"
end
@master.clusters.each do |key,cluster|
assert cluster.is_a?(Cluster), "#{cluster} expected to be a Cluster"
assert cluster.valid?, "#{cluster} - #{cluster.error_message}"
end
end
test "should load all xml files for language arts" do
@master.load_elements_from_paths(DATA_PATH+'/ELA/**/*.xml')
assert_equal 13, @master.subject_grades.keys.length
assert_equal 74, @master.domains.keys.length
assert_equal 1, @master.standard_types.keys.length
@master.subject_grades.each do |key,subject_grade|
assert subject_grade.is_a?(SubjectGrade), "#{subject_grade} expected to be a SubjectGrade"
assert subject_grade.valid?, "#{subject_grade} - #{subject_grade.error_message}"
end
@master.domains.each do |key,domain|
assert domain.is_a?(Domain), "#{domain} expected to be a Domain"
assert domain.valid?, "#{domain} - #{domain.error_message}"
end
@master.standard_types.each do |key,standard_type|
assert standard_type.is_a?(StandardType), "#{standard_type} expected to be a StandardType"
assert standard_type.valid?,"#{standard_type} -#{standard_type.error_message}"
end
end
test "should load all xml files for math and reunite parents with children" do
@master.load_elements_from_paths(DATA_PATH+'/**/*.xml')
orphan_elements = []
@master.elements.each do |key,element|
next unless (element.parent_ref_id and element.parent.nil?)
next if element.parent_ref_id == 'INTENTIONALLYORPHANED'
orphan_elements << element
end
assert_equal(0,orphan_elements.size, orphan_elements.map{|element| "#{element.class}:#{element.ref_id}"})
end
test 'math standards should have a cluster for a parent' do
@master.load_elements_from_paths(DATA_PATH+'/Math.xml',DATA_PATH+'/Mathematics/**/*.xml')
mismatched_standards = []
@master.standards.each do |key,standard|
next if standard.parent.is_a?(CommonCore::Cluster)
next if standard.parent_ref_id == 'INTENTIONALLYORPHANED'
mismatched_standards << standard
end
assert_equal(0,mismatched_standards.size, mismatched_standards.map{|standard| "#{standard.ref_id}::#{standard.code}:#{standard.parent.class}:#{standard.parent_ref_id}"})
end
test 'languange arts standards should have a cluster for a parent' do
@master.load_elements_from_paths(DATA_PATH+'/ELA-Literacy.xml',DATA_PATH+'/ELA/**/*.xml')
mismatched_standards = []
@master.standards.each do |key,standard|
next if standard.parent.is_a?(CommonCore::Domain)
next if standard.parent_ref_id == 'INTENTIONALLYORPHANED'
mismatched_standards << standard
end
assert_equal(0,mismatched_standards.size, mismatched_standards.map{|standard| "#{standard} === #{standard.parent_ref_id.blank?}=== #{standard.code.match(/CCSS\.ELA\-Literacy\.L\.3/)}"})
end
end
end
add more robust data sanity checks
#!/usr/bin/env ruby
# encoding: UTF-8
require 'test_helper'
module CommonCore
class LoaderTest < ActiveSupport::TestCase
def setup
Singleton.send(:__init__,Master) #Force reinitialization
@master = Master.instance
end
# math_standards
test "should load standards from xml" do
@master.load_elements_from_paths(DATA_PATH+'/Math.xml',DATA_PATH+'/ELA-Literacy.xml')
assert_equal 967, @master.standards.keys.length
@master.standards.each do |key,standard|
assert standard.is_a?(Standard), "#{standard} expected to be a Standard"
assert standard.valid?, "#{standard.error_message} - #{standard}"
end
end
test "should load standard components from xml" do
@master.load_elements_from_paths(DATA_PATH+'/Math.xml',DATA_PATH+'/ELA-Literacy.xml')
assert_equal 569, @master.components.keys.length
@master.components.each do |key,component|
assert component.is_a?(Component), "#{component} expected to be a Component"
assert component.valid?, "#{component.error_message} - #{component}"
end
end
# math_domains
test "should load a single math domain from xml" do
@master.load_elements_from_paths(DATA_PATH+'/Mathematics/Grade1/Domain/Math_Grade1_G.xml')
assert_equal 1, @master.domains.keys.length
@master.domains.each do |key,domain|
assert domain.is_a?(Domain), "#{domain} expected to be a Domain"
assert domain.valid?, "#{domain} - #{domain.error_message}"
end
end
# math_clusters
test "should load a single math cluster from xml" do
@master.load_elements_from_paths(DATA_PATH+'/Mathematics/Grade1/Domain/Clusters/Math_Grade1_G_1.xml')
assert_equal 1, @master.clusters.keys.length
@master.clusters.each do |key,cluster|
assert cluster.is_a?(Cluster), "#{cluster} expected to be a Cluster"
assert cluster.valid?, "#{cluster} - #{cluster.error_message}"
end
end
test "should load all xml files for grade 1 math" do
@master.load_elements_from_paths(DATA_PATH+'/Mathematics/Grade1/**/*.xml')
assert_equal 1, @master.subject_grades.keys.length
assert_equal 4, @master.domains.keys.length
assert_equal 11, @master.clusters.keys.length
@master.subject_grades.each do |key,subject_grade|
assert subject_grade.is_a?(SubjectGrade), "#{subject_grade} expected to be a SubjectGrade"
assert subject_grade.valid?, "#{subject_grade} - #{subject_grade.error_message}"
end
@master.domains.each do |key,domain|
assert domain.is_a?(Domain), "#{domain} expected to be a Domain"
assert domain.valid?, "#{domain} - #{domain.error_message}"
end
@master.clusters.each do |key,cluster|
assert cluster.is_a?(Cluster), "#{cluster} expected to be a Cluster"
assert cluster.valid?, "#{cluster} - #{cluster.error_message}"
end
end
test "should load all xml files for math" do
@master.load_elements_from_paths(DATA_PATH+'/Math.xml',DATA_PATH+'/Mathematics/**/*.xml')
assert_equal 393, @master.standards.keys.length
assert_equal 124, @master.components.keys.length
assert_equal 15, @master.subject_grades.keys.length
assert_equal 65, @master.domains.keys.length
assert_equal 148, @master.clusters.keys.length
@master.subject_grades.each do |key,subject_grade|
assert subject_grade.is_a?(SubjectGrade), "#{subject_grade} expected to be a SubjectGrade"
assert subject_grade.valid?, "#{subject_grade} - #{subject_grade.error_message}"
end
@master.domains.each do |key,domain|
assert domain.is_a?(Domain), "#{domain} expected to be a Domain"
assert domain.valid?, "#{domain} - #{domain.error_message}"
end
@master.clusters.each do |key,cluster|
assert cluster.is_a?(Cluster), "#{cluster} expected to be a Cluster"
assert cluster.valid?, "#{cluster} - #{cluster.error_message}"
end
end
test "should load all xml files for language arts" do
@master.load_elements_from_paths(DATA_PATH+'/ELA/**/*.xml')
assert_equal 13, @master.subject_grades.keys.length
assert_equal 74, @master.domains.keys.length
assert_equal 1, @master.standard_types.keys.length
@master.subject_grades.each do |key,subject_grade|
assert subject_grade.is_a?(SubjectGrade), "#{subject_grade} expected to be a SubjectGrade"
assert subject_grade.valid?, "#{subject_grade} - #{subject_grade.error_message}"
end
@master.domains.each do |key,domain|
assert domain.is_a?(Domain), "#{domain} expected to be a Domain"
assert domain.valid?, "#{domain} - #{domain.error_message}"
end
@master.standard_types.each do |key,standard_type|
assert standard_type.is_a?(StandardType), "#{standard_type} expected to be a StandardType"
assert standard_type.valid?,"#{standard_type} -#{standard_type.error_message}"
end
end
test "should load all xml files for math and reunite parents with children" do
@master.load_elements_from_paths(DATA_PATH+'/**/*.xml')
orphan_elements = []
@master.elements.each do |key,element|
next unless (element.parent_ref_id and element.parent.nil?)
next if element.parent_ref_id == 'INTENTIONALLYORPHANED'
orphan_elements << element
end
assert_equal(0,orphan_elements.size, orphan_elements.map{|element| "#{element.class}:#{element.ref_id}"})
end
test 'math standards should have a sane hierarchy' do
@master.load_elements_from_paths(DATA_PATH+'/Math.xml',DATA_PATH+'/Mathematics/**/*.xml')
insane_standards = []
@master.standards.each do |key,standard|
next if standard.parent_ref_id == 'INTENTIONALLYORPHANED'
insanity_flag_raised = false
insanity_flag_raised = true if standard.children.any? { |ref_id,child| ! child.is_a?(CommonCore::Component) }
insanity_flag_raised = true unless standard.parent.is_a?(CommonCore::Cluster)
insanity_flag_raised = true unless insanity_flag_raised or standard.parent.parent.is_a?(CommonCore::Domain)
insanity_flag_raised = true unless insanity_flag_raised or standard.parent.parent.parent.is_a?(CommonCore::SubjectGrade)
insane_standards << standard if insanity_flag_raised
end
assert_equal(0,insane_standards.size, insane_standards.map{|standard| "#{standard.ref_id}::#{standard.code}:#{standard.parent.class}:#{standard.parent_ref_id}"})
end
test 'languange arts standards should have sane hierarchy' do
@master.load_elements_from_paths(DATA_PATH+'/ELA-Literacy.xml',DATA_PATH+'/ELA/**/*.xml')
insane_standards = []
@master.standards.each do |key,standard|
next if standard.parent_ref_id == 'INTENTIONALLYORPHANED'
insanity_flag_raised = false
insanity_flag_raised = true if standard.children.any? { |ref_id,child| ! child.is_a?(CommonCore::Component) }
insanity_flag_raised = true unless standard.parent.is_a?(CommonCore::Domain)
insanity_flag_raised = true unless standard.parent.parent.is_a?(CommonCore::SubjectGrade)
insane_standards << standard if insanity_flag_raised
end
assert_equal(0,insane_standards.size, insane_standards.map{|standard| "#{standard} === #{standard.parent_ref_id.blank?}=== #{standard.code.match(/CCSS\.ELA\-Literacy\.L\.3/)}"})
end
end
end |
# encoding: UTF-8
require File.expand_path('../../test_helper', __FILE__)
require 'ostruct'
describe "Assets" do
include RackTestMethods
def app
Spontaneous::Rack::Back.application(site)
end
module LiveSimulation
# simulate a production + publishing environment
def live?
true
end
# usually set as part of the render process
def revision
99
end
end
def new_context(live, content = @page, format = :html, params = {})
renderer = if live
Spontaneous::Output::Template::PublishRenderer.new(site)
else
Spontaneous::Output::Template::PreviewRenderer.new(site)
end
output = content.output(format)
context = renderer.context(output, params, nil)
context.extend LiveSimulation if live
context.class_eval do
# Force us into production environment
# which is where most of the magic has to happen
def development?
false
end
end
context
end
def live_context(content = @page, format = :html, params = {})
new_context(true, content, format, params)
end
def preview_context(content = @page, format = :html, params = {})
new_context(false, content, format, params)
end
def development_context(content = @page, format = :html, params = {})
new_context(false, content, format, params).tap do |context|
context.class_eval do
def development?
true
end
end
end
end
def asset_digest(asset_relative_path)
digest = context.asset_environment.environment.digest
digest.update(File.read(File.join(fixture_root, asset_relative_path)))
digest.hexdigest
end
let(:y_png_digest) { asset_digest('public2/i/y.png') }
start do
fixture_root = File.expand_path("../../fixtures/assets", __FILE__)
site = setup_site
site.paths.add :assets, fixture_root / "public1", fixture_root / "public2"
site.config.tap do |c|
c.auto_login = 'root'
end
site.output_store(:Memory)
Spontaneous::Permissions::User.delete
user = Spontaneous::Permissions::User.create(:email => "root@example.com", :login => "root", :name => "root name", :password => "rootpass")
user.update(:level => Spontaneous::Permissions[:editor])
user.save.reload
key = user.generate_access_key("127.0.0.1")
Spontaneous::Permissions::User.stubs(:[]).with(:login => 'root').returns(user)
Spontaneous::Permissions::User.stubs(:[]).with(user.id).returns(user)
Spontaneous::Permissions::AccessKey.stubs(:authenticate).with(key.key_id).returns(key)
let(:site) { site }
let(:fixture_root) { fixture_root }
let(:user) { user }
let(:key) { key }
end
finish do
teardown_site
end
before do
@page = Page.create
end
after do
tmp = site.path('assets/tmp')
FileUtils.rm_r(tmp) if tmp.exist?
Content.delete
end
describe "Preview context" do
it "should not be flagged as publishing" do
refute preview_context.publishing?
end
it "should not have the development? flag set" do
refute preview_context.development?
end
end
describe "Development context" do
it "should not be flagged as publishing" do
refute development_context.publishing?
end
it "should have the development? flag set" do
assert development_context.development?
end
end
describe "Publishing context" do
it "be flagged as publishing" do
Spontaneous.stubs(:production?).returns(true)
assert live_context.publishing?
end
it "be flagged as live" do
Spontaneous.stubs(:production?).returns(true)
assert live_context.live?
end
it "be flagged as publishing" do
assert live_context.publishing?
end
end
describe "development" do
let(:context) { development_context }
let(:a_js_digest) { asset_digest('public1/js/a.js') }
let(:b_js_digest) { asset_digest('public2/js/b.js') }
let(:c_js_digest) { asset_digest('public2/js/c.js') }
let(:x_js_digest) { asset_digest('public1/x.js') }
# these are compiled so fairly complex to calculate their digests
# not impossible, but annoying
let(:n_js_digest) { '74f175e03a4bdc8c807aba4ae0314938' }
let(:m_js_digest) { 'dd35b142dc75b6ec15b2138e9e91c0c3' }
let(:all_js_digest) { 'd406fc3c21d90828a2f0a718c89e8d99' }
let(:a_css_digest) { '7b04d295476986c24d8c77245943e5b9' }
let(:b_css_digest) { '266643993e14da14f2473d45f003bd2c' }
let(:c_css_digest) { 'fc8ba0d0aae64081dc00b8444a198fb8' }
let(:x_css_digest) { '2560aec2891794825eba770bf84823fb' }
let(:all_css_digest) { 'cf61c624b91b9ea126804291ac55bd5d' }
it "includes all js dependencies" do
result = context.scripts('js/all', 'js/m', 'js/c', 'x')
result.must_equal [
%|<script type="text/javascript" src="/assets/js/a.js?body=1&#{a_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/b.js?body=1&#{b_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/n.js?body=1&#{n_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/all.js?body=1&#{all_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/m.js?body=1&#{m_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/c.js?body=1&#{c_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/x.js?body=1&#{x_js_digest}"></script>|
].join("\n")
end
it "doesn't bundle js files" do
get "/assets/js/all.js?body=1"
result = last_response.body
result.wont_match /elvis/
end
it "includes all css dependencies" do
result = context.stylesheets('css/all', 'css/c', 'x')
result.must_equal [
%|<link rel="stylesheet" href="/assets/css/b.css?body=1&#{b_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/css/a.css?body=1&#{a_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/css/all.css?body=1&#{all_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/css/c.css?body=1&#{c_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/x.css?body=1&#{x_css_digest}" />|
].join("\n")
end
it "doesn't bundle js files" do
get "/assets/css/all.css?body=1"
result = last_response.body
result.must_match %r(/\*\s+\*/)
end
it "allows for protocol agnostic absolute script urls" do
result = context.scripts('//use.typekit.com/abcde')
result.must_equal '<script type="text/javascript" src="//use.typekit.com/abcde"></script>'
end
end
describe "preview" do
let(:app) { Spontaneous::Rack::Back.application(site) }
let(:context) { preview_context }
let(:c_js_digest) { 'f669550dd7e10e9646ad781f44756950' }
let(:x_js_digest) { '6b4c9176b2838a4949a18284543fc19c' }
let(:n_js_digest) { '74f175e03a4bdc8c807aba4ae0314938' }
let(:m_js_digest) { 'dd35b142dc75b6ec15b2138e9e91c0c3' }
let(:all_js_digest) { 'cd1f681752f5038421be0bc5ea0e855d' }
let(:c_css_digest) { 'fc8ba0d0aae64081dc00b8444a198fb8' }
let(:x_css_digest) { '2560aec2891794825eba770bf84823fb' }
let(:all_css_digest) { 'bb2c289a27b3d5d4467dde6d60722fd3' }
describe "javascript" do
it "include scripts as separate files with finger prints" do
result = context.scripts('js/all', 'js/m.js', 'js/c.js', 'x')
result.must_equal [
%|<script type="text/javascript" src="/assets/js/all.js?#{all_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/m.js?#{m_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/c.js?#{c_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/x.js?#{x_js_digest}"></script>|
].join("\n")
end
it "handles urls passed as an array" do
result = context.scripts(['js/all', 'js/m.js'])
result.must_equal [
%|<script type="text/javascript" src="/assets/js/all.js?#{all_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/m.js?#{m_js_digest}"></script>|
].join("\n")
end
it "should ignore missing files" do
result = context.scripts('js/all', 'js/missing')
result.must_equal [
%|<script type="text/javascript" src="/assets/js/all.js?#{all_js_digest}"></script>|,
'<script type="text/javascript" src="js/missing.js"></script>'
].join("\n")
end
it "should pass through absolute urls" do
result = context.scripts('/js/all.js')
result.must_equal '<script type="text/javascript" src="/js/all.js"></script>'
end
it "should bundle assets" do
get "/assets/js/all.js"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match /var a = 1/
result.must_match /var b = 2/
result.must_match %r{alert\("I knew it!"\);}
end
it "should preprocess coffeescript" do
get "/assets/js/m.js"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match /square = function\(x\)/
end
it "should allow access to straight js" do
get "/assets/x.js"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{var x = 1;}
end
it "should use absolute URLs when encountered" do
context = preview_context
result = context.scripts('js/all', '//use.typekit.com/abcde', 'http://cdn.google.com/jquery.js', 'https://cdn.google.com/jquery.js')
result.must_equal [
%|<script type="text/javascript" src="/assets/js/all.js?#{all_js_digest}"></script>|,
'<script type="text/javascript" src="//use.typekit.com/abcde"></script>',
'<script type="text/javascript" src="http://cdn.google.com/jquery.js"></script>',
'<script type="text/javascript" src="https://cdn.google.com/jquery.js"></script>'
].join("\n")
end
end
describe "css" do
it "include css files as separate links" do
result = context.stylesheets('css/all', 'css/c', 'x')
result.must_equal [
%|<link rel="stylesheet" href="/assets/css/all.css?#{all_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/css/c.css?#{c_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/x.css?#{x_css_digest}" />|
].join("\n")
end
it "allows passing scripts as an array" do
result = context.stylesheets(['css/all', 'css/c', 'x'])
result.must_equal [
%|<link rel="stylesheet" href="/assets/css/all.css?#{all_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/css/c.css?#{c_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/x.css?#{x_css_digest}" />|
].join("\n")
end
it "should bundle dependencies" do
get "/assets/css/all.css"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{height: 42px;}
result.must_match %r{width: 8px;}
end
it "should compile sass" do
get "/assets/css/b.css"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{height: 42px;}
end
it "links to images" do
get "/assets/css/image1.css"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{background: url\(/assets/i/y\.png\)}
end
it "passes through non-existant images" do
get "/assets/css/missing.css"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{background: url\(i\/missing\.png\)}
end
it "can understand urls with hashes" do
get "/assets/css/urlhash.css"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{background: url\(/assets/i/y\.png\?query=true#hash\)}
end
it "embeds image data" do
get "/assets/css/data.css"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{background-image: url\(data:image\/png;base64,}
end
it "can include other assets" do
get "/assets/css/import.css"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{width: 8px;}
end
end
describe "templates" do
let(:renderer) { Spontaneous::Output::Template::PreviewRenderer.new(site) }
it "should allow for embedding asset images into templates" do
result = renderer.render_string("${ asset_path 'i/y.png' }", @page.output(:html))
result.must_equal "/assets/i/y.png?#{y_png_digest}"
end
it "should allow for embedding asset urls into templates" do
result = renderer.render_string("${ asset_url 'i/y.png' }", @page.output(:html))
result.must_equal "url(/assets/i/y.png?#{y_png_digest})"
end
end
end
describe "publishing" do
let(:app) { Spontaneous::Rack::Front.application(site) }
let(:context) { live_context }
let(:revision) { site.revision(context.revision) }
before do
FileUtils.rm_f(Spontaneous.revision_dir) if File.exist?(Spontaneous.revision_dir)
system "ln -nfs #{revision.root} #{Spontaneous.revision_dir}"
# FileUtils.ln_s(revision.root, Spontaneous.revision_dir)
end
after do
revision.path("assets").rmtree if revision.path("assets").exist?
end
describe "javascript" do
let(:all_sha) { "ed62549e8edc1f61a1e27136602f01d9" }
let(:x_sha) { "66e92be1e412458f6ff02f4c5dd9beb1" }
it "bundles & fingerprints local scripts" do
result = context.scripts('js/all', 'js/m.js', 'js/c.js', 'x')
result.must_equal [
%(<script type="text/javascript" src="/assets/js/all-#{all_sha}.js"></script>),
'<script type="text/javascript" src="/assets/js/m-a5be7324bc314d5cf470a59c3732ef10.js"></script>',
'<script type="text/javascript" src="/assets/js/c-c24bcbb4f9647b078cc919746aa7fc3a.js"></script>',
%(<script type="text/javascript" src="/assets/x-#{x_sha}.js"></script>)
].join("\n")
end
it "writes bundled assets to the revision directory" do
result = context.scripts('js/all')
asset_path = revision.path("assets/js/all-#{all_sha}.js")
assert asset_path.exist?
end
it "compresses local scripts" do
result = context.scripts('js/all')
asset_path = revision.path("assets/js/all-#{all_sha}.js")
js = asset_path.read
js.index("\n").must_be_nil
end
it "bundles locals scripts and includes remote ones" do
result = context.scripts('js/all', '//use.typekit.com/abcde', 'http://cdn.google.com/jquery.js', 'x')
result.must_equal [
%(<script type="text/javascript" src="/assets/js/all-#{all_sha}.js"></script>),
'<script type="text/javascript" src="//use.typekit.com/abcde"></script>',
'<script type="text/javascript" src="http://cdn.google.com/jquery.js"></script>',
%(<script type="text/javascript" src="/assets/x-#{x_sha}.js"></script>)
].join("\n")
end
it "makes bundled scripts available under /assets" do
context.scripts('js/all')
get "/assets/js/all-#{all_sha}.js"
asset_path = revision.path("assets/js/all-#{all_sha}.js")
last_response.body.must_equal asset_path.read
end
it "only bundles & compresses once" do
context.scripts('js/all')
asset_path = revision.path("assets/js/all-#{all_sha}.js")
assert asset_path.exist?
asset_path.open("w") do |file|
file.write("var cached = true;")
end
context.scripts('js/all')
asset_path.read.must_equal "var cached = true;"
end
describe "re-use" do
before do
@result = context.scripts('js/all', 'x')
end
it "uses assets from a previous publish if present" do
context = live_context
def context.revision; 100 end
revision = site.revision(context.revision)
manifest = Spontaneous::JSON.parse File.read(site.path("assets/tmp") + "manifest.json")
compiled = manifest[:assets][:"js/all.js"]
::File.open(site.path("assets/tmp")+compiled, 'w') do |file|
file.write("var reused = true;")
end
result = context.scripts('js/all', 'x')
rev = revision.path("assets") + compiled
File.read(rev).must_equal "var reused = true;"
end
end
end
describe "css" do
let(:all_sha) { "2e17f25ddeba996223a6cd1e28e7a319" }
let(:x_sha) { "2560aec2891794825eba770bf84823fb" }
it "bundles & fingerprints local stylesheets" do
result = context.stylesheets('css/all', 'css/a.css', 'x')
result.must_equal [
%(<link rel="stylesheet" href="/assets/css/all-#{all_sha}.css" />),
'<link rel="stylesheet" href="/assets/css/a-0164c6d5b696ec2f2c5e70cade040da8.css" />',
%(<link rel="stylesheet" href="/assets/x-#{x_sha}.css" />)
].join("\n")
end
it "ignores missing stylesheets" do
result = context.stylesheets('css/all', '/css/notfound', 'css/notfound')
result.must_equal [
%(<link rel="stylesheet" href="/assets/css/all-#{all_sha}.css" />),
'<link rel="stylesheet" href="/css/notfound" />',
'<link rel="stylesheet" href="css/notfound" />'
].join("\n")
end
it "bundles locals scripts and includes remote ones" do
result = context.stylesheets('css/all.css', '//stylesheet.com/responsive', 'http://cdn.google.com/normalize.css', 'x')
result.must_equal [
%(<link rel="stylesheet" href="/assets/css/all-#{all_sha}.css" />),
'<link rel="stylesheet" href="//stylesheet.com/responsive" />',
'<link rel="stylesheet" href="http://cdn.google.com/normalize.css" />',
%(<link rel="stylesheet" href="/assets/x-#{x_sha}.css" />)
].join("\n")
end
it "makes bundled scripts available under /assets" do
path = context.stylesheet_urls('css/all').first
get path
asset_path = revision.path(path)
last_response.body.must_equal asset_path.read
end
it "compresses local styles" do
path = context.stylesheet_urls('css/all').first
asset_path = revision.path(path)
css = asset_path.read
css.index(" ").must_be_nil
end
it "only bundles & compresses once" do
path = context.stylesheet_urls('css/all').first
asset_path = revision.path(path)
assert asset_path.exist?
asset_path.open("w") do |file|
file.write(".cached { }")
end
context.stylesheets('css/all')
asset_path.read.must_equal ".cached { }"
end
it "passes through non-existant images" do
path = context.stylesheet_urls('css/missing.css').first
get path
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match /background:url\(i\/missing\.png\)/
end
it "can include other assets" do
path = context.stylesheet_urls('css/import').first
get path
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match /width:8px/
end
end
describe "images" do
it "bundles images and links using fingerprinted asset url" do
path = context.stylesheet_urls('css/image1').first
get path
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{background:url\(/assets/i/y-#{y_png_digest}\.png\)}
asset_path = revision.path("/assets/i/y-#{y_png_digest}.png")
assert asset_path.exist?
end
it "can insert data urls for assets" do
path = context.stylesheet_urls('css/data').first
get path
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{background-image:url\(data:image/png;base64}
end
it "can understand urls with hashes" do
path = context.stylesheet_urls('css/urlhash').first
get path
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{background:url\(/assets/i/y-#{y_png_digest}\.png\?query=true#hash\)}
asset_path = revision.path("/assets/i/y-#{y_png_digest}.png")
assert asset_path.exist?
end
end
describe "templates" do
let(:renderer) { Spontaneous::Output::Template::PublishRenderer.new(site) }
it "should allow for embedding asset images into templates" do
result = renderer.render_string("${ asset_path 'i/y.png' }", @page.output(:html))
result.must_equal "/assets/i/y-#{y_png_digest}.png"
end
it "should allow for embedding asset urls into templates" do
result = renderer.render_string("${ asset_url 'i/y.png' }", @page.output(:html))
result.must_equal "url(/assets/i/y-#{y_png_digest}.png)"
end
end
end
end
Fix asset tests to work with new external asset publishing step
# encoding: UTF-8
require File.expand_path('../../test_helper', __FILE__)
require 'ostruct'
describe "Assets" do
include RackTestMethods
let(:app) {Spontaneous::Rack::Back.application(site)}
module LiveSimulation
# simulate a production + publishing environment
def live?
true
end
# usually set as part of the render process
def revision
99
end
end
def new_context(live, content = @page, format = :html, params = {})
renderer = if live
Spontaneous::Output::Template::PublishRenderer.new(site)
else
Spontaneous::Output::Template::PreviewRenderer.new(site)
end
output = content.output(format)
context = renderer.context(output, params, nil)
context.extend LiveSimulation if live
context.class_eval do
# Force us into production environment
# which is where most of the magic has to happen
def development?
false
end
end
context
end
def live_context(content = @page, format = :html, params = {})
new_context(true, content, format, params)
end
def preview_context(content = @page, format = :html, params = {})
new_context(false, content, format, params)
end
def development_context(content = @page, format = :html, params = {})
new_context(false, content, format, params).tap do |context|
context.class_eval do
def development?
true
end
end
end
end
def asset_digest(asset_relative_path)
digest = context.asset_environment.environment.digest
digest.update(File.read(File.join(fixture_root, asset_relative_path)))
digest.hexdigest
end
let(:y_png_digest) { asset_digest('public2/i/y.png') }
start do
fixture_root = File.expand_path("../../fixtures/assets", __FILE__)
site = setup_site
site.paths.add :assets, fixture_root / "public1", fixture_root / "public2"
site.config.tap do |c|
c.auto_login = 'root'
end
site.output_store(:Memory)
Spontaneous::Permissions::User.delete
user = Spontaneous::Permissions::User.create(:email => "root@example.com", :login => "root", :name => "root name", :password => "rootpass")
user.update(:level => Spontaneous::Permissions[:editor])
user.save.reload
key = user.generate_access_key("127.0.0.1")
Spontaneous::Permissions::User.stubs(:[]).with(:login => 'root').returns(user)
Spontaneous::Permissions::User.stubs(:[]).with(user.id).returns(user)
Spontaneous::Permissions::AccessKey.stubs(:authenticate).with(key.key_id).returns(key)
let(:site) { site }
let(:fixture_root) { fixture_root }
let(:user) { user }
let(:key) { key }
end
finish do
teardown_site
end
before do
@page = Page.create
end
after do
tmp = site.path('assets/tmp')
FileUtils.rm_r(tmp) if tmp.exist?
Content.delete
end
describe "Preview context" do
it "should not be flagged as publishing" do
refute preview_context.publishing?
end
it "should not have the development? flag set" do
refute preview_context.development?
end
end
describe "Development context" do
it "should not be flagged as publishing" do
refute development_context.publishing?
end
it "should have the development? flag set" do
assert development_context.development?
end
end
describe "Publishing context" do
it "be flagged as publishing" do
Spontaneous.stubs(:production?).returns(true)
assert live_context.publishing?
end
it "be flagged as live" do
Spontaneous.stubs(:production?).returns(true)
assert live_context.live?
end
it "be flagged as publishing" do
assert live_context.publishing?
end
end
describe "development" do
let(:context) { development_context }
let(:a_js_digest) { asset_digest('public1/js/a.js') }
let(:b_js_digest) { asset_digest('public2/js/b.js') }
let(:c_js_digest) { asset_digest('public2/js/c.js') }
let(:x_js_digest) { asset_digest('public1/x.js') }
# these are compiled so fairly complex to calculate their digests
# not impossible, but annoying
let(:n_js_digest) { '74f175e03a4bdc8c807aba4ae0314938' }
let(:m_js_digest) { 'dd35b142dc75b6ec15b2138e9e91c0c3' }
let(:all_js_digest) { 'd406fc3c21d90828a2f0a718c89e8d99' }
let(:a_css_digest) { '7b04d295476986c24d8c77245943e5b9' }
let(:b_css_digest) { '266643993e14da14f2473d45f003bd2c' }
let(:c_css_digest) { 'fc8ba0d0aae64081dc00b8444a198fb8' }
let(:x_css_digest) { '2560aec2891794825eba770bf84823fb' }
let(:all_css_digest) { 'cf61c624b91b9ea126804291ac55bd5d' }
it "includes all js dependencies" do
result = context.scripts('js/all', 'js/m', 'js/c', 'x')
result.must_equal [
%|<script type="text/javascript" src="/assets/js/a.js?body=1&#{a_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/b.js?body=1&#{b_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/n.js?body=1&#{n_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/all.js?body=1&#{all_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/m.js?body=1&#{m_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/c.js?body=1&#{c_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/x.js?body=1&#{x_js_digest}"></script>|
].join("\n")
end
it "doesn't bundle js files" do
get "/assets/js/all.js?body=1"
result = last_response.body
result.wont_match /elvis/
end
it "includes all css dependencies" do
result = context.stylesheets('css/all', 'css/c', 'x')
result.must_equal [
%|<link rel="stylesheet" href="/assets/css/b.css?body=1&#{b_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/css/a.css?body=1&#{a_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/css/all.css?body=1&#{all_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/css/c.css?body=1&#{c_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/x.css?body=1&#{x_css_digest}" />|
].join("\n")
end
it "doesn't bundle js files" do
get "/assets/css/all.css?body=1"
result = last_response.body
result.must_match %r(/\*\s+\*/)
end
it "allows for protocol agnostic absolute script urls" do
result = context.scripts('//use.typekit.com/abcde')
result.must_equal '<script type="text/javascript" src="//use.typekit.com/abcde"></script>'
end
end
describe "preview" do
let(:app) { Spontaneous::Rack::Back.application(site) }
let(:context) { preview_context }
let(:c_js_digest) { 'f669550dd7e10e9646ad781f44756950' }
let(:x_js_digest) { '6b4c9176b2838a4949a18284543fc19c' }
let(:n_js_digest) { '74f175e03a4bdc8c807aba4ae0314938' }
let(:m_js_digest) { 'dd35b142dc75b6ec15b2138e9e91c0c3' }
let(:all_js_digest) { 'cd1f681752f5038421be0bc5ea0e855d' }
let(:c_css_digest) { 'fc8ba0d0aae64081dc00b8444a198fb8' }
let(:x_css_digest) { '2560aec2891794825eba770bf84823fb' }
let(:all_css_digest) { 'bb2c289a27b3d5d4467dde6d60722fd3' }
describe "javascript" do
it "include scripts as separate files with finger prints" do
result = context.scripts('js/all', 'js/m.js', 'js/c.js', 'x')
result.must_equal [
%|<script type="text/javascript" src="/assets/js/all.js?#{all_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/m.js?#{m_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/c.js?#{c_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/x.js?#{x_js_digest}"></script>|
].join("\n")
end
it "handles urls passed as an array" do
result = context.scripts(['js/all', 'js/m.js'])
result.must_equal [
%|<script type="text/javascript" src="/assets/js/all.js?#{all_js_digest}"></script>|,
%|<script type="text/javascript" src="/assets/js/m.js?#{m_js_digest}"></script>|
].join("\n")
end
it "should ignore missing files" do
result = context.scripts('js/all', 'js/missing')
result.must_equal [
%|<script type="text/javascript" src="/assets/js/all.js?#{all_js_digest}"></script>|,
'<script type="text/javascript" src="js/missing.js"></script>'
].join("\n")
end
it "should pass through absolute urls" do
result = context.scripts('/js/all.js')
result.must_equal '<script type="text/javascript" src="/js/all.js"></script>'
end
it "should bundle assets" do
get "/assets/js/all.js"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match /var a = 1/
result.must_match /var b = 2/
result.must_match %r{alert\("I knew it!"\);}
end
it "should preprocess coffeescript" do
get "/assets/js/m.js"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match /square = function\(x\)/
end
it "should allow access to straight js" do
get "/assets/x.js"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{var x = 1;}
end
it "should use absolute URLs when encountered" do
context = preview_context
result = context.scripts('js/all', '//use.typekit.com/abcde', 'http://cdn.google.com/jquery.js', 'https://cdn.google.com/jquery.js')
result.must_equal [
%|<script type="text/javascript" src="/assets/js/all.js?#{all_js_digest}"></script>|,
'<script type="text/javascript" src="//use.typekit.com/abcde"></script>',
'<script type="text/javascript" src="http://cdn.google.com/jquery.js"></script>',
'<script type="text/javascript" src="https://cdn.google.com/jquery.js"></script>'
].join("\n")
end
end
describe "css" do
it "include css files as separate links" do
result = context.stylesheets('css/all', 'css/c', 'x')
result.must_equal [
%|<link rel="stylesheet" href="/assets/css/all.css?#{all_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/css/c.css?#{c_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/x.css?#{x_css_digest}" />|
].join("\n")
end
it "allows passing scripts as an array" do
result = context.stylesheets(['css/all', 'css/c', 'x'])
result.must_equal [
%|<link rel="stylesheet" href="/assets/css/all.css?#{all_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/css/c.css?#{c_css_digest}" />|,
%|<link rel="stylesheet" href="/assets/x.css?#{x_css_digest}" />|
].join("\n")
end
it "should bundle dependencies" do
get "/assets/css/all.css"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{height: 42px;}
result.must_match %r{width: 8px;}
end
it "should compile sass" do
get "/assets/css/b.css"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{height: 42px;}
end
it "links to images" do
get "/assets/css/image1.css"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{background: url\(/assets/i/y\.png\)}
end
it "passes through non-existant images" do
get "/assets/css/missing.css"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{background: url\(i\/missing\.png\)}
end
it "can understand urls with hashes" do
get "/assets/css/urlhash.css"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{background: url\(/assets/i/y\.png\?query=true#hash\)}
end
it "embeds image data" do
get "/assets/css/data.css"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{background-image: url\(data:image\/png;base64,}
end
it "can include other assets" do
get "/assets/css/import.css"
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{width: 8px;}
end
end
describe "templates" do
let(:renderer) { Spontaneous::Output::Template::PreviewRenderer.new(site) }
it "should allow for embedding asset images into templates" do
result = renderer.render_string("${ asset_path 'i/y.png' }", @page.output(:html))
result.must_equal "/assets/i/y.png?#{y_png_digest}"
end
it "should allow for embedding asset urls into templates" do
result = renderer.render_string("${ asset_url 'i/y.png' }", @page.output(:html))
result.must_equal "url(/assets/i/y.png?#{y_png_digest})"
end
end
end
describe "publishing" do
let(:app) { Spontaneous::Rack::Front.application(site) }
let(:context) { live_context }
let(:revision) { site.revision(context.revision) }
let(:progress) { Spontaneous::Publishing::Progress::Silent.new }
def publish_assets(revision)
context.asset_environment.manifest.compile!
Spontaneous::Publishing::Steps::CopyAssets.new(site, revision, [], progress).call
end
before do
FileUtils.rm_f(Spontaneous.revision_dir) if File.exist?(Spontaneous.revision_dir)
system "ln -nfs #{revision.root} #{Spontaneous.revision_dir}"
publish_assets(context.revision)
end
after do
revision.path("assets").rmtree if revision.path("assets").exist?
end
describe "javascript" do
let(:all_sha) { "ed62549e8edc1f61a1e27136602f01d9" }
let(:x_sha) { "66e92be1e412458f6ff02f4c5dd9beb1" }
it "bundles & fingerprints local scripts" do
result = context.scripts('js/all', 'js/m.js', 'js/c.js', 'x')
result.must_equal [
%(<script type="text/javascript" src="/assets/js/all-#{all_sha}.js"></script>),
'<script type="text/javascript" src="/assets/js/m-a5be7324bc314d5cf470a59c3732ef10.js"></script>',
'<script type="text/javascript" src="/assets/js/c-c24bcbb4f9647b078cc919746aa7fc3a.js"></script>',
%(<script type="text/javascript" src="/assets/x-#{x_sha}.js"></script>)
].join("\n")
end
it "writes bundled assets to the revision directory" do
result = context.scripts('js/all')
asset_path = revision.path("assets/js/all-#{all_sha}.js")
assert asset_path.exist?
end
it "compresses local scripts" do
result = context.scripts('js/all')
asset_path = revision.path("assets/js/all-#{all_sha}.js")
js = asset_path.read
js.index("\n").must_be_nil
end
it "bundles locals scripts and includes remote ones" do
result = context.scripts('js/all', '//use.typekit.com/abcde', 'http://cdn.google.com/jquery.js', 'x')
result.must_equal [
%(<script type="text/javascript" src="/assets/js/all-#{all_sha}.js"></script>),
'<script type="text/javascript" src="//use.typekit.com/abcde"></script>',
'<script type="text/javascript" src="http://cdn.google.com/jquery.js"></script>',
%(<script type="text/javascript" src="/assets/x-#{x_sha}.js"></script>)
].join("\n")
end
it "makes bundled scripts available under /assets" do
context.scripts('js/all')
get "/assets/js/all-#{all_sha}.js"
asset_path = revision.path("assets/js/all-#{all_sha}.js")
last_response.body.must_equal asset_path.read
end
it "only bundles & compresses once" do
context.scripts('js/all')
asset_path = revision.path("assets/js/all-#{all_sha}.js")
assert asset_path.exist?
asset_path.open("w") do |file|
file.write("var cached = true;")
end
context.scripts('js/all')
asset_path.read.must_equal "var cached = true;"
end
describe "re-use" do
before do
@result = context.scripts('js/all', 'x')
end
it "uses assets from a previous publish if present" do
context = live_context
def context.revision; 100 end
revision = site.revision(context.revision)
publish_assets(context.revision)
manifest = Spontaneous::JSON.parse File.read(site.path("assets/tmp") + "manifest.json")
compiled = manifest[:assets][:"js/all.js"]
::File.open(site.path("assets/tmp")+compiled, 'w') do |file|
file.write("var reused = true;")
end
result = context.scripts('js/all', 'x')
rev = revision.path("assets") + compiled
File.read(rev).must_equal "var reused = true;"
end
end
end
describe "css" do
let(:all_sha) { "2e17f25ddeba996223a6cd1e28e7a319" }
let(:x_sha) { "2560aec2891794825eba770bf84823fb" }
it "bundles & fingerprints local stylesheets" do
result = context.stylesheets('css/all', 'css/a.css', 'x')
result.must_equal [
%(<link rel="stylesheet" href="/assets/css/all-#{all_sha}.css" />),
'<link rel="stylesheet" href="/assets/css/a-0164c6d5b696ec2f2c5e70cade040da8.css" />',
%(<link rel="stylesheet" href="/assets/x-#{x_sha}.css" />)
].join("\n")
end
it "ignores missing stylesheets" do
result = context.stylesheets('css/all', '/css/notfound', 'css/notfound')
result.must_equal [
%(<link rel="stylesheet" href="/assets/css/all-#{all_sha}.css" />),
'<link rel="stylesheet" href="/css/notfound" />',
'<link rel="stylesheet" href="css/notfound" />'
].join("\n")
end
it "bundles locals scripts and includes remote ones" do
result = context.stylesheets('css/all.css', '//stylesheet.com/responsive', 'http://cdn.google.com/normalize.css', 'x')
result.must_equal [
%(<link rel="stylesheet" href="/assets/css/all-#{all_sha}.css" />),
'<link rel="stylesheet" href="//stylesheet.com/responsive" />',
'<link rel="stylesheet" href="http://cdn.google.com/normalize.css" />',
%(<link rel="stylesheet" href="/assets/x-#{x_sha}.css" />)
].join("\n")
end
it "makes bundled stylesheets available under /assets" do
path = context.stylesheet_urls('css/all').first
get path
asset_path = revision.path(path)
last_response.body.must_equal asset_path.read
end
it "compresses local styles" do
path = context.stylesheet_urls('css/all').first
asset_path = revision.path(path)
css = asset_path.read
css.index(" ").must_be_nil
end
it "only bundles & compresses once" do
path = context.stylesheet_urls('css/all').first
asset_path = revision.path(path)
assert asset_path.exist?
asset_path.open("w") do |file|
file.write(".cached { }")
end
context.stylesheets('css/all')
asset_path.read.must_equal ".cached { }"
end
it "passes through non-existant images" do
path = context.stylesheet_urls('css/missing.css').first
get path
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match /background:url\(i\/missing\.png\)/
end
it "can include other assets" do
path = context.stylesheet_urls('css/import').first
get path
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match /width:8px/
end
end
describe "images" do
it "bundles images and links using fingerprinted asset url" do
path = context.stylesheet_urls('css/image1').first
get path
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{background:url\(/assets/i/y-#{y_png_digest}\.png\)}
asset_path = revision.path("/assets/i/y-#{y_png_digest}.png")
assert asset_path.exist?
end
it "can insert data urls for assets" do
path = context.stylesheet_urls('css/data').first
get path
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{background-image:url\(data:image/png;base64}
end
it "can understand urls with hashes" do
path = context.stylesheet_urls('css/urlhash').first
get path
assert last_response.ok?, "Recieved #{last_response.status} not 200"
result = last_response.body
result.must_match %r{background:url\(/assets/i/y-#{y_png_digest}\.png\?query=true#hash\)}
asset_path = revision.path("/assets/i/y-#{y_png_digest}.png")
assert asset_path.exist?
end
end
describe "templates" do
let(:renderer) { Spontaneous::Output::Template::PublishRenderer.new(site) }
it "should allow for embedding asset images into templates" do
result = renderer.render_string("${ asset_path 'i/y.png' }", @page.output(:html))
result.must_equal "/assets/i/y-#{y_png_digest}.png"
end
it "should allow for embedding asset urls into templates" do
result = renderer.render_string("${ asset_url 'i/y.png' }", @page.output(:html))
result.must_equal "url(/assets/i/y-#{y_png_digest}.png)"
end
end
end
end
|
#
# Cookbook Name:: omnibus
# HWRP:: xcode_cli
#
# Author:: Yvonne Lam <yvonne@getchef.com>
#
# Copyright 2014, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'mixlib/shellout'
class Chef
class Resource::XcodeCli < Resource
identity_attr :version
attr_writer :exists
def initialize(version, run_context = nil)
super
require 'chef-sugar' unless defined?(Chef::Sugar)
# Set the resource name and provider
@resource_name = :xcode_cli
@provider = Provider::XcodeCli
# Set default actions and allowed actions
@action = :install
@allowed_actions.push(:install)
# Set the osx version
@version = version
end
#
# The osx version
#
# @param [String] arg
# @return [String]
#
def version(arg = nil)
set_or_return(:version, arg, kind_of: String)
end
#
# Determine if xcode-cli is already installed.
# This value is set by the provider when the current resource is loaded.
#
# @return [Boolean]
#
def exists?
!!@exists
end
end
end
class Chef
class Provider::XcodeCli < Provider
def load_current_resource
@current_resource ||= Chef::Resource::XcodeCli.new(new_resource.version)
if current_xcode_install
@current_resource.exists = true
@current_resource.version(new_resource.version)
end
end
#
# This provider supports whyrun
#
def whyrun_supported?
true
end
#
# Install the xcode cli.
#
def action_install
if @current_resource.exists?
Chef::Log.info("xcode-cli #{new_resource} installed - skipping")
else
converge_by("Create xcode-cli #{new_resource}") do
install_xcode_cli
end
end
end
private
#
# Load the current resource
#
def current_xcode_install
return @current_xcode_install if @current_xcode_install
if Chef::Sugar::Constraints.version(new_resource.version).satisfies?('~> 10.9')
cmd = 'pkgutil --pkg-info=com.apple.pkg.CLTools_Executables'
command = Mixlib::ShellOut.new(cmd, timeout: 30)
command.run_command
command.stdout.strip
return nil if command.status != 0
@current_xcode_install = {
version: new_resource.version,
}
@current_xcode_install
end
end
#
# Execute a script
#
def execute(*pieces)
command = pieces.join(' ')
command = Mixlib::ShellOut.new(command, timeout: 120)
command.run_command
command.error!
command.stdout.strip
end
#
# Install the Xcode CLI tools
#
def install_xcode_cli
if Chef::Sugar::Constraints.version(new_resource.version).satisfies?('~> 10.9')
bash_script = <<-EOF
touch /tmp/.com.apple.dt.CommandLineTools.installondemand.in-progress
PROD=$(softwareupdate -l | grep -B 1 "Developer" | head -n 1 | awk -F"*" '{print $2}')
softwareupdate -i $PROD -v
EOF
execute(bash_script)
end
end
end
end
Fail with a decent error message on unsupported versions of OS X.
Add missing docs and start cleanup of embedded bash commands.
#
# Cookbook Name:: omnibus
# HWRP:: xcode_cli
#
# Author:: Yvonne Lam <yvonne@getchef.com>
#
# Copyright 2014, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'mixlib/shellout'
class Chef
class Resource::XcodeCli < Resource
identity_attr :version
attr_writer :exists
def initialize(version, run_context = nil)
super
require 'chef-sugar' unless defined?(Chef::Sugar)
# Set the resource name and provider
@resource_name = :xcode_cli
@provider = Provider::XcodeCli
# Set default actions and allowed actions
@action = :install
@allowed_actions.push(:install)
# Set the osx version
@version = version
end
#
# The osx version
#
# @param [String] arg
# @return [String]
#
def version(arg = nil)
unless arg.nil? || Chef::Sugar::Constraints.version(arg).satisfies?('~> 10.9')
fail 'Installation of Xcode CLI tools is supported for Mac OS X 10.9 only'
end
set_or_return(:version, arg, kind_of: String)
end
#
# Determine if xcode-cli is already installed.
# This value is set by the provider when the current resource is loaded.
#
# @return [Boolean]
#
def exists?
!!@exists
end
end
end
class Chef
class Provider::XcodeCli < Provider
def load_current_resource
@current_resource ||= Chef::Resource::XcodeCli.new(new_resource.version)
if current_xcode_install
@current_resource.exists = true
@current_resource.version(new_resource.version)
end
end
#
# This provider supports whyrun
#
def whyrun_supported?
true
end
#
# Install the xcode cli.
#
def action_install
if @current_resource.exists?
Chef::Log.info("xcode-cli #{new_resource} installed - skipping")
else
converge_by("Create xcode-cli #{new_resource}") do
install_xcode_cli
end
end
end
private
#
# Load the current resource
# @return [STDOUT from install command]
#
def current_xcode_install
return @current_xcode_install if @current_xcode_install
fail 'Installation of Xcode CLI tools is supported for Mac OS X 10.9 only' unless Chef::Sugar::Constraints.version(new_resource.version).satisfies?('~> 10.9')
cmd = 'pkgutil --pkg-info=com.apple.pkg.CLTools_Executables'
command = Mixlib::ShellOut.new(cmd, timeout: 30)
command.run_command
command.stdout.strip
return nil if command.status != 0
@current_xcode_install = {
version: new_resource.version,
}
@current_xcode_install
end
#
# Execute a script
#
def execute(*pieces)
command = pieces.join(' ')
command = Mixlib::ShellOut.new(command, timeout: 120)
command.run_command
command.error!
command.stdout.strip
end
#
# Install the Xcode CLI tools
#
def install_xcode_cli
if Chef::Sugar::Constraints.version(new_resource.version).satisfies?('~> 10.9')
execute <<-EOH.gsub(/^ {8}/, '')
touch /tmp/.com.apple.dt.CommandLineTools.installondemand.in-progress
PROD=$(softwareupdate -l | grep -B 1 "Developer" | head -n 1 | awk -F"*" '{print $2}')
softwareupdate -i $PROD -v
EOH
end
end
end
end
|
MRuby::Gem::Specification.new('mruby-wslay') do |spec|
spec.license = 'Apache-2'
spec.author = 'Hendrik Beskow'
spec.summary = 'mruby wrapper for wslay'
spec.add_dependency 'mruby-sysrandom'
spec.add_dependency 'mruby-errno'
spec.add_dependency 'mruby-struct'
spec.add_dependency 'mruby-string-is-utf8'
end
Search header automatically.
MRuby::Gem::Specification.new('mruby-wslay') do |spec|
spec.license = 'Apache-2'
spec.author = 'Hendrik Beskow'
spec.summary = 'mruby wrapper for wslay'
spec.add_dependency 'mruby-sysrandom'
spec.add_dependency 'mruby-errno'
spec.add_dependency 'mruby-struct'
spec.add_dependency 'mruby-string-is-utf8'
if spec.cc.respond_to? :search_header_path
spec.cc.defines << 'HAVE_ARPA_INET_H' if spec.cc.search_header_path 'arpa/inet.h'
spec.cc.defines << 'HAVE_NETINET_IN_H' if spec.cc.search_header_path 'netinet/in.h'
spec.cc.defines << 'HAVE_WINSOCK2_H' if spec.cc.search_header_path 'winsock2.h'
end
end
|
Create mrbgem.rake
MRuby::Gem::Specification.new('mruby-tnetstrings') do |spec|
spec.license = 'MIT'
spec.author = 'Hendrik Beskow'
spec.summary = 'mruby TNetString parser/writer'
end
|
# frozen_string_literal: true
# Original source:
# https://github.com/Mogztter/asciidoctor-kroki/blob/master/ruby/lib/asciidoctor/extensions/asciidoctor_kroki/extension.rb
require 'asciidoctor/extensions' unless RUBY_ENGINE == 'opal'
# Asciidoctor extensions
#
module AsciidoctorExtensions
include Asciidoctor
# A block extension that converts a diagram into an image.
#
class KrokiBlockProcessor < Extensions::BlockProcessor
use_dsl
on_context :listing, :literal
name_positional_attributes 'target', 'format'
def process(parent, reader, attrs)
diagram_type = @name
diagram_text = reader.string
KrokiProcessor.process(self, parent, attrs, diagram_type, diagram_text)
end
end
# A block macro extension that converts a diagram into an image.
#
class KrokiBlockMacroProcessor < Asciidoctor::Extensions::BlockMacroProcessor
use_dsl
name_positional_attributes 'format'
def process(parent, target, attrs)
diagram_type = @name
target = parent.apply_subs(target, [:attributes])
diagram_text = read(target)
KrokiProcessor.process(self, parent, attrs, diagram_type, diagram_text)
end
def read(target)
if target.start_with?('http://') || target.start_with?('https://')
require 'open-uri'
URI.open(target, &:read)
else
File.open(target, &:read)
end
end
end
# Kroki API
#
module Kroki
SUPPORTED_DIAGRAM_NAMES = %w[
actdiag
blockdiag
bpmn
bytefield
c4plantuml
ditaa
erd
excalidraw
graphviz
mermaid
nomnoml
nwdiag
packetdiag
plantuml
rackdiag
seqdiag
svgbob
umlet
vega
vegalite
wavedrom
].freeze
end
# Internal processor
#
class KrokiProcessor
TEXT_FORMATS = %w[txt atxt utxt].freeze
class << self
def process(processor, parent, attrs, diagram_type, diagram_text)
doc = parent.document
diagram_text = prepend_plantuml_config(diagram_text, diagram_type, doc)
# If "subs" attribute is specified, substitute accordingly.
# Be careful not to specify "specialcharacters" or your diagram code won't be valid anymore!
if (subs = attrs['subs'])
diagram_text = parent.apply_subs(diagram_text, parent.resolve_subs(subs))
end
title = attrs.delete('title')
caption = attrs.delete('caption')
attrs.delete('opts')
role = attrs['role']
format = get_format(doc, attrs, diagram_type)
attrs['role'] = get_role(format, role)
attrs['format'] = format
kroki_diagram = KrokiDiagram.new(diagram_type, format, diagram_text)
kroki_client = KrokiClient.new(server_url(doc), http_method(doc), KrokiHttpClient)
if TEXT_FORMATS.include?(format)
text_content = kroki_client.text_content(kroki_diagram)
block = processor.create_block(parent, 'literal', text_content, attrs)
else
attrs['alt'] = get_alt(attrs)
attrs['target'] = create_image_src(doc, kroki_diagram, kroki_client)
block = processor.create_image_block(parent, attrs)
end
block.title = title if title
block.assign_caption(caption, 'figure')
block
end
private
def prepend_plantuml_config(diagram_text, diagram_type, doc)
if diagram_type == :plantuml && doc.attr?('kroki-plantuml-include')
# TODO: this behaves different than the JS version
# The file should be added by !include #{plantuml_include}" once we have a preprocessor for ruby
config = File.read(doc.attr('kroki-plantuml-include'))
diagram_text = config + "\n" + diagram_text
end
diagram_text
end
def get_alt(attrs)
if (title = attrs['title'])
title
elsif (target = attrs['target'])
target
else
'Diagram'
end
end
def get_role(format, role)
if role
if format
"#{role} kroki-format-#{format} kroki"
else
"#{role} kroki"
end
else
'kroki'
end
end
def get_format(doc, attrs, diagram_type)
format = attrs['format'] || 'svg'
# The JavaFX preview doesn't support SVG well, therefore we'll use PNG format...
if doc.attr?('kroki-force-png') && format == 'svg'
# ... unless the diagram library does not support PNG as output format!
# Currently, mermaid, nomnoml, svgbob, wavedrom only support SVG as output format.
svg_only_diagram_types = %w[:mermaid :nomnoml :svgbob :wavedrom]
format = 'png' unless svg_only_diagram_types.include?(diagram_type)
end
format
end
def create_image_src(doc, kroki_diagram, kroki_client)
if doc.attr('kroki-fetch-diagram')
kroki_diagram.save(output_dir_path(doc), kroki_client)
else
kroki_diagram.get_diagram_uri(server_url(doc))
end
end
def server_url(doc)
doc.attr('kroki-server-url', 'https://kroki.io')
end
def http_method(doc)
doc.attr('kroki-http-method', 'adaptive').downcase
end
def output_dir_path(doc)
images_output_dir = doc.attr('imagesoutdir')
out_dir = doc.attr('outdir')
to_dir = doc.attr('to_dir')
base_dir = doc.base_dir
images_dir = doc.attr('imagesdir', '')
if images_output_dir
images_output_dir
elsif out_dir
File.join(out_dir, images_dir)
elsif to_dir
File.join(to_dir, images_dir)
else
File.join(base_dir, images_dir)
end
end
end
end
# Kroki diagram
#
class KrokiDiagram
require 'fileutils'
require 'zlib'
require 'digest'
attr_reader :type
attr_reader :text
attr_reader :format
def initialize(type, format, text)
@text = text
@type = type
@format = format
end
def get_diagram_uri(server_url)
_join_uri_segments(server_url, @type, @format, encode)
end
def encode
Base64.urlsafe_encode64(Zlib::Deflate.deflate(@text, 9))
end
def save(output_dir_path, kroki_client)
diagram_url = get_diagram_uri(kroki_client.server_url)
diagram_name = "diag-#{Digest::SHA256.hexdigest diagram_url}.#{@format}"
file_path = File.join(output_dir_path, diagram_name)
encoding = if @format == 'txt' || @format == 'atxt' || @format == 'utxt'
'utf8'
elsif @format == 'svg'
'binary'
else
'binary'
end
# file is either (already) on the file system or we should read it from Kroki
contents = File.exist?(file_path) ? File.open(file_path, &:read) : kroki_client.get_image(self, encoding)
FileUtils.mkdir_p(output_dir_path)
if encoding == 'binary'
File.binwrite(file_path, contents)
else
File.write(file_path, contents)
end
diagram_name
end
private
def _join_uri_segments(base, *uris)
segments = []
# remove trailing slashes
segments.push(base.gsub(%r{[/]+$}, ''))
segments.concat(uris.map do |uri|
# remove leading and trailing slashes
uri.to_s
.gsub(%r{^[/]+}, '')
.gsub(%r{[/]+$}, '')
end)
segments.join('/')
end
end
# Kroki client
#
class KrokiClient
attr_reader :server_url
attr_reader :method
SUPPORTED_HTTP_METHODS = %w[get post adaptive].freeze
def initialize(server_url, http_method, http_client)
@server_url = server_url
@max_uri_length = 4096
@http_client = http_client
method = (http_method || 'adaptive').downcase
if SUPPORTED_HTTP_METHODS.include?(method)
@method = method
else
puts "Invalid value '#{method}' for kroki-http-method attribute. The value must be either: 'get', 'post' or 'adaptive'. Proceeding using: 'adaptive'."
@method = 'adaptive'
end
end
def text_content(kroki_diagram)
get_image(kroki_diagram, 'utf-8')
end
def get_image(kroki_diagram, encoding)
type = kroki_diagram.type
format = kroki_diagram.format
text = kroki_diagram.text
if @method == 'adaptive' || @method == 'get'
uri = kroki_diagram.get_diagram_uri(server_url)
if uri.length > @max_uri_length
# The request URI is longer than 4096.
if @method == 'get'
# The request might be rejected by the server with a 414 Request-URI Too Large.
# Consider using the attribute kroki-http-method with the value 'adaptive'.
@http_client.get(uri, encoding)
else
@http_client.post("#{@server_url}/#{type}/#{format}", text, encoding)
end
else
@http_client.get(uri, encoding)
end
else
@http_client.post("#{@server_url}/#{type}/#{format}", text, encoding)
end
end
end
# Kroki HTTP client
#
class KrokiHttpClient
require 'net/http'
require 'uri'
require 'json'
class << self
def get(uri, _)
::OpenURI.open_uri(uri, 'r', &:read)
end
def post(uri, data, _)
res = ::Net::HTTP.request_post(uri, data)
res.body
end
end
end
end
Extensions.register do
::AsciidoctorExtensions::Kroki::SUPPORTED_DIAGRAM_NAMES.each { |name|
block_macro AsciidoctorExtensions::KrokiBlockMacroProcessor, name
block AsciidoctorExtensions::KrokiBlockProcessor, name
}
end
avoid that kroki writes to the console and use in-line warnings that can be shown in the editor
# frozen_string_literal: true
# Original source:
# https://github.com/Mogztter/asciidoctor-kroki/blob/master/ruby/lib/asciidoctor/extensions/asciidoctor_kroki/extension.rb
require 'asciidoctor/extensions' unless RUBY_ENGINE == 'opal'
# Asciidoctor extensions
#
module AsciidoctorExtensions
include Asciidoctor
# A block extension that converts a diagram into an image.
#
class KrokiBlockProcessor < Extensions::BlockProcessor
use_dsl
on_context :listing, :literal
name_positional_attributes 'target', 'format'
def process(parent, reader, attrs)
diagram_type = @name
diagram_text = reader.string
KrokiProcessor.process(self, parent, attrs, diagram_type, diagram_text)
end
end
# A block macro extension that converts a diagram into an image.
#
class KrokiBlockMacroProcessor < Asciidoctor::Extensions::BlockMacroProcessor
use_dsl
name_positional_attributes 'format'
def process(parent, target, attrs)
diagram_type = @name
target = parent.apply_subs(target, [:attributes])
diagram_text = read(target)
KrokiProcessor.process(self, parent, attrs, diagram_type, diagram_text)
end
def read(target)
if target.start_with?('http://') || target.start_with?('https://')
require 'open-uri'
URI.open(target, &:read)
else
File.open(target, &:read)
end
end
end
# Kroki API
#
module Kroki
SUPPORTED_DIAGRAM_NAMES = %w[
actdiag
blockdiag
bpmn
bytefield
c4plantuml
ditaa
erd
excalidraw
graphviz
mermaid
nomnoml
nwdiag
packetdiag
plantuml
rackdiag
seqdiag
svgbob
umlet
vega
vegalite
wavedrom
].freeze
end
# Internal processor
#
class KrokiProcessor
TEXT_FORMATS = %w[txt atxt utxt].freeze
class << self
def process(processor, parent, attrs, diagram_type, diagram_text)
doc = parent.document
diagram_text = prepend_plantuml_config(diagram_text, diagram_type, doc)
# If "subs" attribute is specified, substitute accordingly.
# Be careful not to specify "specialcharacters" or your diagram code won't be valid anymore!
if (subs = attrs['subs'])
diagram_text = parent.apply_subs(diagram_text, parent.resolve_subs(subs))
end
title = attrs.delete('title')
caption = attrs.delete('caption')
attrs.delete('opts')
role = attrs['role']
format = get_format(doc, attrs, diagram_type)
attrs['role'] = get_role(format, role)
attrs['format'] = format
kroki_diagram = KrokiDiagram.new(diagram_type, format, diagram_text)
kroki_client = KrokiClient.new(server_url(doc), http_method(doc), KrokiHttpClient, doc.reader.cursor_at_mark)
if TEXT_FORMATS.include?(format)
text_content = kroki_client.text_content(kroki_diagram)
block = processor.create_block(parent, 'literal', text_content, attrs)
else
attrs['alt'] = get_alt(attrs)
attrs['target'] = create_image_src(doc, kroki_diagram, kroki_client)
block = processor.create_image_block(parent, attrs)
end
block.title = title if title
block.assign_caption(caption, 'figure')
block
end
private
def prepend_plantuml_config(diagram_text, diagram_type, doc)
if diagram_type == :plantuml && doc.attr?('kroki-plantuml-include')
# TODO: this behaves different than the JS version
# The file should be added by !include #{plantuml_include}" once we have a preprocessor for ruby
config = File.read(doc.attr('kroki-plantuml-include'))
diagram_text = config + "\n" + diagram_text
end
diagram_text
end
def get_alt(attrs)
if (title = attrs['title'])
title
elsif (target = attrs['target'])
target
else
'Diagram'
end
end
def get_role(format, role)
if role
if format
"#{role} kroki-format-#{format} kroki"
else
"#{role} kroki"
end
else
'kroki'
end
end
def get_format(doc, attrs, diagram_type)
format = attrs['format'] || 'svg'
# If the media we're preparing for doesn't support SVG well, use PNG instead...
if doc.attr?('kroki-force-png') && format == 'svg'
# ... unless the diagram library does not support PNG as output format!
# Currently, mermaid, nomnoml, svgbob, wavedrom only support SVG as output format.
svg_only_diagram_types = %w[:mermaid :nomnoml :svgbob :wavedrom]
format = 'png' unless svg_only_diagram_types.include?(diagram_type)
end
format
end
def create_image_src(doc, kroki_diagram, kroki_client)
if doc.attr('kroki-fetch-diagram')
kroki_diagram.save(output_dir_path(doc), kroki_client)
else
kroki_diagram.get_diagram_uri(server_url(doc))
end
end
def server_url(doc)
doc.attr('kroki-server-url', 'https://kroki.io')
end
def http_method(doc)
doc.attr('kroki-http-method', 'adaptive').downcase
end
def output_dir_path(doc)
images_output_dir = doc.attr('imagesoutdir')
out_dir = doc.attr('outdir')
to_dir = doc.attr('to_dir')
base_dir = doc.base_dir
images_dir = doc.attr('imagesdir', '')
if images_output_dir
images_output_dir
elsif out_dir
File.join(out_dir, images_dir)
elsif to_dir
File.join(to_dir, images_dir)
else
File.join(base_dir, images_dir)
end
end
end
end
# Kroki diagram
#
class KrokiDiagram
require 'fileutils'
require 'zlib'
require 'digest'
attr_reader :type
attr_reader :text
attr_reader :format
def initialize(type, format, text)
@text = text
@type = type
@format = format
end
def get_diagram_uri(server_url)
_join_uri_segments(server_url, @type, @format, encode)
end
def encode
Base64.urlsafe_encode64(Zlib::Deflate.deflate(@text, 9))
end
def save(output_dir_path, kroki_client)
diagram_url = get_diagram_uri(kroki_client.server_url)
diagram_name = "diag-#{Digest::SHA256.hexdigest diagram_url}.#{@format}"
file_path = File.join(output_dir_path, diagram_name)
encoding = if @format == 'txt' || @format == 'atxt' || @format == 'utxt'
'utf8'
elsif @format == 'svg'
'binary'
else
'binary'
end
# file is either (already) on the file system or we should read it from Kroki
contents = File.exist?(file_path) ? File.open(file_path, &:read) : kroki_client.get_image(self, encoding)
FileUtils.mkdir_p(output_dir_path)
if encoding == 'binary'
File.binwrite(file_path, contents)
else
File.write(file_path, contents)
end
diagram_name
end
private
def _join_uri_segments(base, *uris)
segments = []
# remove trailing slashes
segments.push(base.gsub(%r{[/]+$}, ''))
segments.concat(uris.map do |uri|
# remove leading and trailing slashes
uri.to_s
.gsub(%r{^[/]+}, '')
.gsub(%r{[/]+$}, '')
end)
segments.join('/')
end
end
# Kroki client
#
class KrokiClient
include Asciidoctor::Logging
attr_reader :server_url
attr_reader :method
SUPPORTED_HTTP_METHODS = %w[get post adaptive].freeze
def initialize(server_url, http_method, http_client, location = nil)
@server_url = server_url
@max_uri_length = 4096
@http_client = http_client
method = (http_method || 'adaptive').downcase
if SUPPORTED_HTTP_METHODS.include?(method)
@method = method
else
logger.warn message_with_context "Invalid value '#{method}' for kroki-http-method attribute. The value must be either: " \
"'get', 'post' or 'adaptive'. Proceeding using: 'adaptive'.", source_location: location
@method = 'adaptive'
end
end
def text_content(kroki_diagram)
get_image(kroki_diagram, 'utf-8')
end
def get_image(kroki_diagram, encoding)
type = kroki_diagram.type
format = kroki_diagram.format
text = kroki_diagram.text
if @method == 'adaptive' || @method == 'get'
uri = kroki_diagram.get_diagram_uri(server_url)
if uri.length > @max_uri_length
# The request URI is longer than 4096.
if @method == 'get'
# The request might be rejected by the server with a 414 Request-URI Too Large.
# Consider using the attribute kroki-http-method with the value 'adaptive'.
@http_client.get(uri, encoding)
else
@http_client.post("#{@server_url}/#{type}/#{format}", text, encoding)
end
else
@http_client.get(uri, encoding)
end
else
@http_client.post("#{@server_url}/#{type}/#{format}", text, encoding)
end
end
end
# Kroki HTTP client
#
class KrokiHttpClient
require 'net/http'
require 'uri'
require 'json'
class << self
def get(uri, _)
::OpenURI.open_uri(uri, 'r', &:read)
end
def post(uri, data, _)
res = ::Net::HTTP.request_post(uri, data)
res.body
end
end
end
end
Extensions.register do
::AsciidoctorExtensions::Kroki::SUPPORTED_DIAGRAM_NAMES.each { |name|
block_macro AsciidoctorExtensions::KrokiBlockMacroProcessor, name
block AsciidoctorExtensions::KrokiBlockProcessor, name
}
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'nbb/version'
Gem::Specification.new do |spec|
spec.name = 'nbb'
spec.version = Nbb::VERSION
spec.authors = ['Ivan Malykh']
spec.email = ['ivan@lesslines.com']
spec.summary = 'Ruby wrapper for Dutch Basketball Association JSON API'
spec.description = 'This Ruby gems wraps Nederlandse Basketbal Bond (NBB) JSON API'
spec.homepage = 'https://github.com/ivdma/nbb'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.required_ruby_version = '>= 1.9.3'
spec.add_dependency 'httparty', '~> 0.13'
spec.add_dependency 'activesupport', '>= 4.2', '< 5.1'
spec.add_development_dependency 'bundler', '~> 1.11'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'rspec', '~> 3.4'
spec.add_development_dependency 'guard-rspec', '~> 4.6'
spec.add_development_dependency 'guard-bundler', '~> 2.1'
spec.add_development_dependency 'webmock', '~> 1.22'
spec.add_development_dependency 'vcr', '~> 3.0'
spec.add_development_dependency 'pry', '~> 0.10'
spec.add_development_dependency 'rubocop', '~> 0.37'
spec.add_development_dependency 'guard-rubocop', '~> 1.2'
spec.add_development_dependency 'fabrication', '~> 2.14'
end
update activesupport
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'nbb/version'
Gem::Specification.new do |spec|
spec.name = 'nbb'
spec.version = Nbb::VERSION
spec.authors = ['Ivan Malykh']
spec.email = ['ivan@lesslines.com']
spec.summary = 'Ruby wrapper for Dutch Basketball Association JSON API'
spec.description = 'This Ruby gems wraps Nederlandse Basketbal Bond (NBB) JSON API'
spec.homepage = 'https://github.com/ivdma/nbb'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.required_ruby_version = '>= 1.9.3'
spec.add_dependency 'httparty', '~> 0.13'
spec.add_dependency 'activesupport', '~> 5.0'
spec.add_development_dependency 'bundler', '~> 1.11'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'rspec', '~> 3.4'
spec.add_development_dependency 'guard-rspec', '~> 4.6'
spec.add_development_dependency 'guard-bundler', '~> 2.1'
spec.add_development_dependency 'webmock', '~> 1.22'
spec.add_development_dependency 'vcr', '~> 3.0'
spec.add_development_dependency 'pry', '~> 0.10'
spec.add_development_dependency 'rubocop', '~> 0.37'
spec.add_development_dependency 'guard-rubocop', '~> 1.2'
spec.add_development_dependency 'fabrication', '~> 2.14'
end
|
class NoPhone < Sinatra::Base
post "/" do
halt 404 unless params["To"].is_a?(String)
if params["CallStatus"] == "ringing" && match = params["To"].match(/sip:(\+?\d+)@#{Regexp.escape(ENV["TWILIO_SIP_ENDPOINT"])}/)
builder do |xml|
xml.Response do |r|
r.Dial match[1], callerId: ENV["TWILIO_NUMBER"]
end
end
else
hangup
end
end
def hangup
builder do |xml|
xml.Response do |r|
r.Hangup
end
end
end
end
Silly message for incoming calls
class NoPhone < Sinatra::Base
post "/" do
halt 404 unless params["To"].is_a?(String)
if params["CallStatus"] == "ringing" && match = params["To"].match(/sip:(\+?\d+)@#{Regexp.escape(ENV["TWILIO_SIP_ENDPOINT"])}/)
builder do |xml|
xml.Response do |r|
r.Dial match[1], callerId: ENV["TWILIO_NUMBER"]
end
end
elsif params["To"] == ENV["TWILIO_NUMBER"]
message
else
hangup
end
end
def message
builder do |xml|
xml.Response do |r|
r.Say "Welcome to Collective Idea. For more information, please email us at info@collectiveidea.com", voice: "alice"
end
end
end
def hangup
builder do |xml|
xml.Response do |r|
r.Hangup
end
end
end
end |
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'nrl/version'
Gem::Specification.new do |spec|
spec.name = "nrl"
spec.version = NRL::VERSION
spec.authors = ["Cameron Attard"]
spec.email = ["cameron.m.attard@gmail.com"]
spec.summary = "A Ruby wrapper library for the NRL API."
spec.homepage = "https://github.com/cameronattard/nrl"
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.10"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_runtime_dependency "virtus", "~> 1.0.5"
spec.add_runtime_dependency "faraday", "~> 0.9.2"
spec.add_runtime_dependency "faraday_middleware"
end
Update gemspec
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'nrl/version'
Gem::Specification.new do |spec|
spec.name = "nrl"
spec.version = NRL::VERSION
spec.authors = ["Cameron Attard"]
spec.email = ["cameron.m.attard@gmail.com"]
spec.summary = "A Ruby wrapper library for the NRL API."
spec.homepage = "https://github.com/cameronattard/nrl"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.10"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_runtime_dependency "virtus", "~> 1.0"
spec.add_runtime_dependency "faraday", "~> 0.9"
spec.add_runtime_dependency "faraday_middleware", "~> 0.10"
end
|
# encoding: UTF-8
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper.rb')
describe "Yajl JSON encoder" do
FILES = Dir[File.dirname(__FILE__)+'/../../benchmark/subjects/*.json']
FILES.each do |file|
it "should encode #{File.basename(file)}" do
# we don't care about testing the stream subject as it has multiple JSON strings in it
if File.basename(file) != 'twitter_stream.json'
input = File.new(File.expand_path(file), 'r')
io = StringIO.new
parser = Yajl::Parser.new
encoder = Yajl::Encoder.new
hash = parser.parse(input)
output = encoder.encode(hash, io)
io.rewind
hash2 = parser.parse(io)
io.close
input.close
hash.should == hash2
end
end
end
it "should encode with :pretty turned on and a single space indent" do
output = "{\n \"foo\": {\n \"name\": \"bar\",\n \"id\": 1234\n }\n}\n"
obj = {:foo => {:id => 1234, :name => "bar"}}
io = StringIO.new
encoder = Yajl::Encoder.new(:pretty => true, :indent => ' ')
encoder.encode(obj, io)
io.rewind
io.read.should == output
end
it "should encode with :pretty turned on and a tab character indent" do
output = "{\n\t\"foo\": {\n\t\t\"name\": \"bar\",\n\t\t\"id\": 1234\n\t}\n}\n"
obj = {:foo => {:id => 1234, :name => "bar"}}
io = StringIO.new
encoder = Yajl::Encoder.new(:pretty => true, :indent => "\t")
encoder.encode(obj, io)
io.rewind
io.read.should == output
end
it "should encode with it's class method with :pretty and a tab character indent options set" do
output = "{\n\t\"foo\": {\n\t\t\"name\": \"bar\",\n\t\t\"id\": 1234\n\t}\n}\n"
obj = {:foo => {:id => 1234, :name => "bar"}}
io = StringIO.new
Yajl::Encoder.encode(obj, io, :pretty => true, :indent => "\t")
io.rewind
io.read.should == output
end
it "should encode multiple objects into a single stream" do
io = StringIO.new
obj = {:foo => "bar", :baz => 1234}
encoder = Yajl::Encoder.new
5.times do
encoder.encode(obj, io)
end
io.rewind
io.read.should == "{\"foo\":\"bar\",\"baz\":1234}\n{\"foo\":\"bar\",\"baz\":1234}\n{\"foo\":\"bar\",\"baz\":1234}\n{\"foo\":\"bar\",\"baz\":1234}\n{\"foo\":\"bar\",\"baz\":1234}\n"
end
end
fix lame hash key order difference for 1.9 spec test runs
# encoding: UTF-8
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper.rb')
describe "Yajl JSON encoder" do
FILES = Dir[File.dirname(__FILE__)+'/../../benchmark/subjects/*.json']
FILES.each do |file|
it "should encode #{File.basename(file)}" do
# we don't care about testing the stream subject as it has multiple JSON strings in it
if File.basename(file) != 'twitter_stream.json'
input = File.new(File.expand_path(file), 'r')
io = StringIO.new
parser = Yajl::Parser.new
encoder = Yajl::Encoder.new
hash = parser.parse(input)
output = encoder.encode(hash, io)
io.rewind
hash2 = parser.parse(io)
io.close
input.close
hash.should == hash2
end
end
end
it "should encode with :pretty turned on and a single space indent" do
output = "{\n \"foo\": {\n \"name\": \"bar\",\n \"id\": 1234\n }\n}\n"
if RUBY_VERSION.include?('1.9') # FIXME
output = "{\n \"foo\": {\n \"id\": 1234,\n \"name\": \"bar\"\n }\n}\n"
end
obj = {:foo => {:id => 1234, :name => "bar"}}
io = StringIO.new
encoder = Yajl::Encoder.new(:pretty => true, :indent => ' ')
encoder.encode(obj, io)
io.rewind
io.read.should == output
end
it "should encode with :pretty turned on and a tab character indent" do
output = "{\n\t\"foo\": {\n\t\t\"name\": \"bar\",\n\t\t\"id\": 1234\n\t}\n}\n"
if RUBY_VERSION.include?('1.9') # FIXME
output = "{\n\t\"foo\": {\n\t\t\"id\": 1234,\n\t\t\"name\": \"bar\"\n\t}\n}\n"
end
obj = {:foo => {:id => 1234, :name => "bar"}}
io = StringIO.new
encoder = Yajl::Encoder.new(:pretty => true, :indent => "\t")
encoder.encode(obj, io)
io.rewind
io.read.should == output
end
it "should encode with it's class method with :pretty and a tab character indent options set" do
output = "{\n\t\"foo\": {\n\t\t\"name\": \"bar\",\n\t\t\"id\": 1234\n\t}\n}\n"
if RUBY_VERSION.include?('1.9') # FIXME
output = "{\n\t\"foo\": {\n\t\t\"id\": 1234,\n\t\t\"name\": \"bar\"\n\t}\n}\n"
end
obj = {:foo => {:id => 1234, :name => "bar"}}
io = StringIO.new
Yajl::Encoder.encode(obj, io, :pretty => true, :indent => "\t")
io.rewind
io.read.should == output
end
it "should encode multiple objects into a single stream" do
io = StringIO.new
obj = {:foo => "bar", :baz => 1234}
encoder = Yajl::Encoder.new
5.times do
encoder.encode(obj, io)
end
io.rewind
io.read.should == "{\"foo\":\"bar\",\"baz\":1234}\n{\"foo\":\"bar\",\"baz\":1234}\n{\"foo\":\"bar\",\"baz\":1234}\n{\"foo\":\"bar\",\"baz\":1234}\n{\"foo\":\"bar\",\"baz\":1234}\n"
end
end |
module NationalRail
class VirginLiveDepartureBoards
class DetailsPageParser
include CellParser
def initialize(doc, time_parser)
@doc, @time_parser = doc, time_parser
end
def parse
will_call_at = []
table = @doc/"table[@summary='Will call at']"
if table.any?
(table/"tbody tr").each do |tr|
tds = tr/"td"
next unless tds.length == 3
will_call_at << {
:station => cell_text(tds[0]),
:timetabled_arrival => @time_parser.parse(cell_text(tds[1])),
:expected_arrival => @time_parser.parse(cell_text(tds[2]))
}
end
end
previous_calling_points = []
table = @doc/"table[@summary='Previous calling points']"
if table.any?
(table/"tbody tr").each do |tr|
tds = tr/"td"
next unless tds.length == 4
previous_calling_points << {
:station => cell_text(tds[0]),
:timetabled_departure => @time_parser.parse(cell_text(tds[1])),
:expected_departure => @time_parser.parse(cell_text(tds[2])),
:actual_departure => @time_parser.parse(cell_text(tds[3]))
}
end
end
{ :will_call_at => will_call_at, :previous_calling_points => previous_calling_points }
end
end
end
end
I'm hoping it'll be easier to work out whether we've crossed a day boundary by stepping backwards from the current time through the previous calling points.
module NationalRail
class VirginLiveDepartureBoards
class DetailsPageParser
include CellParser
def initialize(doc, time_parser)
@doc, @time_parser = doc, time_parser
end
def parse
will_call_at = []
table = @doc/"table[@summary='Will call at']"
if table.any?
(table/"tbody tr").each do |tr|
tds = tr/"td"
next unless tds.length == 3
will_call_at.push({
:station => cell_text(tds[0]),
:timetabled_arrival => @time_parser.parse(cell_text(tds[1])),
:expected_arrival => @time_parser.parse(cell_text(tds[2]))
})
end
end
previous_calling_points = []
table = @doc/"table[@summary='Previous calling points']"
if table.any?
(table/"tbody tr").reverse.each do |tr|
tds = tr/"td"
next unless tds.length == 4
previous_calling_points.unshift({
:station => cell_text(tds[0]),
:timetabled_departure => @time_parser.parse(cell_text(tds[1])),
:expected_departure => @time_parser.parse(cell_text(tds[2])),
:actual_departure => @time_parser.parse(cell_text(tds[3]))
})
end
end
{ :will_call_at => will_call_at, :previous_calling_points => previous_calling_points }
end
end
end
end
|
require 'spec_helper'
describe CharlockHolmes::EncodingDetector do
MAPPING = [
['repl2.cljs', 'ISO-8859-1'],
['core.rkt', 'UTF-8'],
['cl-messagepack.lisp', 'ISO-8859-1'],
['TwigExtensionsDate.es.yml', 'UTF-8'],
['AnsiGraph.psm1', 'UTF-16LE']
]
before :all do
@detector = CharlockHolmes::EncodingDetector.new
end
MAPPING.each do |mapping|
file, encoding = mapping
test "#{file} is detected as #{encoding}" do
path = File.expand_path "../fixtures/#{file}", __FILE__
content = File.read path
guessed = @detector.detect content
puts "Guessed: #{guessed.inspect} for file #{file}"
assert_equal encoding, guessed[:encoding]
if content.respond_to? :force_encoding
content.force_encoding guessed[:encoding]
assert content.valid_encoding?
end
end
end
end
some more tests
require 'spec_helper'
describe CharlockHolmes::EncodingDetector do
before :all do
@detector = CharlockHolmes::EncodingDetector.new
end
test "has a detect class-level method" do
CharlockHolmes::EncodingDetector.respond_to? :detect
detected = CharlockHolmes::EncodingDetector.detect "test"
assert_equal 'ISO-8859-1', detected[:encoding]
end
test "has a detect method" do
@detector.respond_to? :detect
detected = @detector.detect "test"
assert_equal 'ISO-8859-1', detected[:encoding]
end
test "has a detect_all method" do
@detector.respond_to? :detect_all
detected_list = @detector.detect_all "test"
encoding_list = detected_list.map {|d| d[:encoding]}.sort
assert_equal ["ISO-8859-1", "ISO-8859-2", "UTF-8"], encoding_list
end
context "is able to detect that" do
MAPPING = [
['repl2.cljs', 'ISO-8859-1'],
['core.rkt', 'UTF-8'],
['cl-messagepack.lisp', 'ISO-8859-1'],
['TwigExtensionsDate.es.yml', 'UTF-8'],
['AnsiGraph.psm1', 'UTF-16LE']
]
MAPPING.each do |mapping|
file, encoding = mapping
test "#{file} is #{encoding}" do
path = File.expand_path "../fixtures/#{file}", __FILE__
content = File.read path
guessed = @detector.detect content
assert_equal encoding, guessed[:encoding]
if content.respond_to? :force_encoding
content.force_encoding guessed[:encoding]
assert content.valid_encoding?
end
end
end
end
end |
FactoryGirl.define do
# factory :user do
# name "John Doe"
#end
factory :corporation do
names ["ABC Corporation"]
desc "test description"
jottings "test jottings"
factory :corp_with_many_names do
names ["ABC Corporation", "XYZ Inc.", "DEF Incorporated"]
end
factory :corp_with_associations do
ignore do
debts_count 4
preferred_equities_count 1
equities_count 2
options_count 3
end
after(:create) do |corp, evaluator|
FactoryGirl.create_list(:debt, evaluator.debts_count, corporation: corp)
FactoryGirl.create_list(:preferred_equity, evaluator.preferred_equities_count, corporation: corp)
FactoryGirl.create_list(:equity, evaluator.equities_count, corporation: corp)
FactoryGirl.create_list(:option, evaluator.options_count, corporation: corp)
end
end
end
factory :debt do
corporation
end
factory :preferred_equity do
corporation
end
factory :equity do
corporation
end
factory :option do
corporation
end
end
removed example comment
FactoryGirl.define do
factory :corporation do
names ["ABC Corporation"]
desc "test description"
jottings "test jottings"
factory :corp_with_many_names do
names ["ABC Corporation", "XYZ Inc.", "DEF Incorporated"]
end
factory :corp_with_associations do
ignore do
debts_count 4
preferred_equities_count 1
equities_count 2
options_count 3
end
after(:create) do |corp, evaluator|
FactoryGirl.create_list(:debt, evaluator.debts_count, corporation: corp)
FactoryGirl.create_list(:preferred_equity, evaluator.preferred_equities_count, corporation: corp)
FactoryGirl.create_list(:equity, evaluator.equities_count, corporation: corp)
FactoryGirl.create_list(:option, evaluator.options_count, corporation: corp)
end
end
end
factory :debt do
corporation
end
factory :preferred_equity do
corporation
end
factory :equity do
corporation
end
factory :option do
corporation
end
end |
FactoryGirl.define do
factory :transaction do
year Time.now.year
association :user, :factory => :user
association :updated_by_user, :factory => :user
sequence(:amount) { |n| n * 3 }
end
factory :tr_comp, :parent => :transaction do
trantype 'C' # comp
instrument nil
end
factory :tr_refund, :parent => :transaction do
trantype 'R'
instrument %w[K S].sample
end
factory :tr_sale, :parent => :transaction do
trantype 'S' # sale
ins = %w[C S K].sample
instrument ins
case ins
when 'C'
gwdate { Factory.next(:gwdate) }
sequence(:gwtranid) { |n| n }
when 'K'
check_number 101
end
end
sequence :gwdate do |n|
month = rand(11) + 1
day = rand(27) + 1
Time.utc(Time.now.year, month, day)
end
end
Factory.next is deprecated; use FactoryGirl.generate instead.
FactoryGirl.define do
factory :transaction do
year Time.now.year
association :user, :factory => :user
association :updated_by_user, :factory => :user
sequence(:amount) { |n| n * 3 }
end
factory :tr_comp, :parent => :transaction do
trantype 'C' # comp
instrument nil
end
factory :tr_refund, :parent => :transaction do
trantype 'R'
instrument %w[K S].sample
end
factory :tr_sale, :parent => :transaction do
trantype 'S' # sale
ins = %w[C S K].sample
instrument ins
case ins
when 'C'
gwdate { FactoryGirl.generate(:gwdate) }
sequence(:gwtranid) { |n| n }
when 'K'
check_number 101
end
end
sequence :gwdate do |n|
month = rand(11) + 1
day = rand(27) + 1
Time.utc(Time.now.year, month, day)
end
end
|
require 'spec_helper'
require 'erb'
# This feature spec is intended to be a comprehensive exercising of all of
# GitLab's non-standard Markdown parsing and the integration thereof.
#
# These tests should be very high-level. Anything low-level belongs in the specs
# for the corresponding HTML::Pipeline filter or helper method.
#
# The idea is to pass a Markdown document through our entire processing stack.
#
# The process looks like this:
#
# Raw Markdown
# -> `markdown` helper
# -> Redcarpet::Render::GitlabHTML converts Markdown to HTML
# -> Post-process HTML
# -> `gfm_with_options` helper
# -> HTML::Pipeline
# -> Sanitize
# -> Emoji
# -> Table of Contents
# -> Autolinks
# -> Rinku (http, https, ftp)
# -> Other schemes
# -> References
# -> `html_safe`
# -> Template
#
# See the MarkdownFeature class for setup details.
describe 'GitLab Markdown' do
include ActionView::Helpers::TagHelper
include ActionView::Helpers::UrlHelper
include Capybara::Node::Matchers
include GitlabMarkdownHelper
# `markdown` calls these two methods
def current_user
@feat.user
end
def user_color_scheme_class
:white
end
# Let's only parse this thing once
before(:all) do
@feat = MarkdownFeature.new
# `markdown` expects a `@project` variable
@project = @feat.project
@md = markdown(@feat.raw_markdown)
@doc = Nokogiri::HTML::DocumentFragment.parse(@md)
end
after(:all) do
@feat.teardown
end
# Given a header ID, goes to that element's parent (the header), then to its
# second sibling (the body).
def get_section(id)
@doc.at_css("##{id}").parent.next.next
end
# it 'writes to a file' do
# File.open(Rails.root.join('tmp/capybara/markdown_spec.html'), 'w') do |file|
# file.puts @md
# end
# end
describe 'Markdown' do
describe 'No Intra Emphasis' do
it 'does not parse emphasis inside of words' do
body = get_section('no-intra-emphasis')
expect(body.to_html).not_to match('foo<em>bar</em>baz')
end
end
describe 'Tables' do
it 'parses table Markdown' do
body = get_section('tables')
expect(body).to have_selector('th:contains("Header")')
expect(body).to have_selector('th:contains("Row")')
expect(body).to have_selector('th:contains("Example")')
end
it 'allows Markdown in tables' do
expect(@doc.at_css('td:contains("Baz")').children.to_html).
to eq '<strong>Baz</strong>'
end
end
describe 'Fenced Code Blocks' do
it 'parses fenced code blocks' do
expect(@doc).to have_selector('pre.code.highlight.white.c')
expect(@doc).to have_selector('pre.code.highlight.white.python')
end
end
describe 'Strikethrough' do
it 'parses strikethroughs' do
expect(@doc).to have_selector(%{del:contains("and this text doesn't")})
end
end
describe 'Superscript' do
it 'parses superscript' do
body = get_section('superscript')
expect(body.to_html).to match('1<sup>st</sup>')
expect(body.to_html).to match('2<sup>nd</sup>')
end
end
end
describe 'HTML::Pipeline' do
describe 'SanitizationFilter' do
it 'uses a permissive whitelist' do
expect(@doc).to have_selector('b#manual-b')
expect(@doc).to have_selector('em#manual-em')
expect(@doc).to have_selector("code#manual-code")
expect(@doc).to have_selector('kbd:contains("s")')
expect(@doc).to have_selector('strike:contains(Emoji)')
expect(@doc).to have_selector('img#manual-img')
expect(@doc).to have_selector('br#manual-br')
expect(@doc).to have_selector('hr#manual-hr')
end
it 'permits span elements' do
expect(@doc).to have_selector('span#span-class-light.light')
end
it 'permits table alignment' do
expect(@doc.at_css('th:contains("Header")')['style']).to eq 'text-align: center'
expect(@doc.at_css('th:contains("Row")')['style']).to eq 'text-align: right'
expect(@doc.at_css('th:contains("Example")')['style']).to eq 'text-align: left'
expect(@doc.at_css('td:contains("Foo")')['style']).to eq 'text-align: center'
expect(@doc.at_css('td:contains("Bar")')['style']).to eq 'text-align: right'
expect(@doc.at_css('td:contains("Baz")')['style']).to eq 'text-align: left'
end
it 'removes `rel` attribute from links' do
expect(@doc).to have_selector('a#a-rel-nofollow')
expect(@doc).not_to have_selector('a#a-rel-nofollow[rel]')
end
it "removes `href` from `a` elements if it's fishy" do
expect(@doc).to have_selector('a#a-href-javascript')
expect(@doc).not_to have_selector('a#a-href-javascript[href]')
end
end
describe 'Escaping' do
let(:table) { @doc.css('table').last.at_css('tbody') }
it 'escapes non-tag angle brackets' do
expect(table.at_xpath('.//tr[1]/td[3]').inner_html).to eq '1 < 3 & 5'
end
end
describe 'Edge Cases' do
it 'allows markup inside link elements' do
expect(@doc.at_css('a[href="#link-emphasis"]').to_html).
to eq %{<a href="#link-emphasis"><em>text</em></a>}
expect(@doc.at_css('a[href="#link-strong"]').to_html).
to eq %{<a href="#link-strong"><strong>text</strong></a>}
expect(@doc.at_css('a[href="#link-code"]').to_html).
to eq %{<a href="#link-code"><code>text</code></a>}
end
end
describe 'EmojiFilter' do
it 'parses Emoji' do
expect(@doc).to have_selector('img.emoji', count: 10)
end
end
describe 'TableOfContentsFilter' do
it 'creates anchors inside header elements' do
expect(@doc).to have_selector('h1 a#gitlab-markdown')
expect(@doc).to have_selector('h2 a#markdown')
expect(@doc).to have_selector('h3 a#autolinkfilter')
end
end
describe 'AutolinkFilter' do
let(:list) { get_section('autolinkfilter').parent.search('ul') }
def item(index)
list.at_css("li:nth-child(#{index})")
end
it 'autolinks http://' do
expect(item(1).children.first.name).to eq 'a'
expect(item(1).children.first['href']).to eq 'http://about.gitlab.com/'
end
it 'autolinks https://' do
expect(item(2).children.first.name).to eq 'a'
expect(item(2).children.first['href']).to eq 'https://google.com/'
end
it 'autolinks ftp://' do
expect(item(3).children.first.name).to eq 'a'
expect(item(3).children.first['href']).to eq 'ftp://ftp.us.debian.org/debian/'
end
it 'autolinks smb://' do
expect(item(4).children.first.name).to eq 'a'
expect(item(4).children.first['href']).to eq 'smb://foo/bar/baz'
end
it 'autolinks irc://' do
expect(item(5).children.first.name).to eq 'a'
expect(item(5).children.first['href']).to eq 'irc://irc.freenode.net/git'
end
it 'autolinks short, invalid URLs' do
expect(item(6).children.first.name).to eq 'a'
expect(item(6).children.first['href']).to eq 'http://localhost:3000'
end
%w(code a kbd).each do |elem|
it "ignores links inside '#{elem}' element" do
expect(@doc.at_css("#{elem}#autolink-#{elem}").child).to be_text
end
end
end
describe 'ReferenceFilter' do
it 'handles references in headers' do
header = @doc.at_css('#reference-filters-eg-1').parent
expect(header.css('a').size).to eq 2
end
it "handles references in Markdown" do
body = get_section('reference-filters-eg-1')
expect(body).to have_selector('em a.gfm-merge_request', count: 1)
end
it 'parses user references' do
body = get_section('userreferencefilter')
expect(body).to have_selector('a.gfm.gfm-project_member', count: 3)
end
it 'parses issue references' do
body = get_section('issuereferencefilter')
expect(body).to have_selector('a.gfm.gfm-issue', count: 2)
end
it 'parses merge request references' do
body = get_section('mergerequestreferencefilter')
expect(body).to have_selector('a.gfm.gfm-merge_request', count: 2)
end
it 'parses snippet references' do
body = get_section('snippetreferencefilter')
expect(body).to have_selector('a.gfm.gfm-snippet', count: 2)
end
it 'parses commit range references' do
body = get_section('commitrangereferencefilter')
expect(body).to have_selector('a.gfm.gfm-commit_range', count: 2)
end
it 'parses commit references' do
body = get_section('commitreferencefilter')
expect(body).to have_selector('a.gfm.gfm-commit', count: 2)
end
it 'parses label references' do
body = get_section('labelreferencefilter')
expect(body).to have_selector('a.gfm.gfm-label', count: 3)
end
end
end
end
# This is a helper class used by the GitLab Markdown feature spec
#
# Because the feature spec only cares about the output of the Markdown, and the
# test setup and teardown and parsing is fairly expensive, we only want to do it
# once. Unfortunately RSpec will not let you access `let`s in a `before(:all)`
# block, so we fake it by encapsulating all the shared setup in this class.
#
# The class contains the raw Markup used in the test, dynamically substituting
# real objects, created from factories and setup on-demand, when referenced in
# the Markdown.
class MarkdownFeature
include FactoryGirl::Syntax::Methods
def initialize
DatabaseCleaner.start
end
def teardown
DatabaseCleaner.clean
end
def user
@user ||= create(:user)
end
def group
unless @group
@group = create(:group)
@group.add_user(user, Gitlab::Access::DEVELOPER)
end
@group
end
# Direct references ----------------------------------------------------------
def project
@project ||= create(:project)
end
def issue
@issue ||= create(:issue, project: project)
end
def merge_request
@merge_request ||= create(:merge_request, :simple, source_project: project)
end
def snippet
@snippet ||= create(:project_snippet, project: project)
end
def commit
@commit ||= project.repository.commit
end
def commit_range
unless @commit_range
commit2 = project.repository.commit('HEAD~3')
@commit_range = CommitRange.new("#{commit.id}...#{commit2.id}")
end
@commit_range
end
def simple_label
@simple_label ||= create(:label, name: 'gfm', project: project)
end
def label
@label ||= create(:label, name: 'awaiting feedback', project: project)
end
# Cross-references -----------------------------------------------------------
def xproject
unless @xproject
namespace = create(:namespace, name: 'cross-reference')
@xproject = create(:project, namespace: namespace)
@xproject.team << [user, :developer]
end
@xproject
end
# Shortcut to "cross-reference/project"
def xref
xproject.path_with_namespace
end
def xissue
@xissue ||= create(:issue, project: xproject)
end
def xmerge_request
@xmerge_request ||= create(:merge_request, :simple, source_project: xproject)
end
def xsnippet
@xsnippet ||= create(:project_snippet, project: xproject)
end
def xcommit
@xcommit ||= xproject.repository.commit
end
def xcommit_range
unless @xcommit_range
xcommit2 = xproject.repository.commit('HEAD~2')
@xcommit_range = CommitRange.new("#{xcommit.id}...#{xcommit2.id}")
end
@xcommit_range
end
def raw_markdown
fixture = Rails.root.join('spec/fixtures/markdown.md.erb')
ERB.new(File.read(fixture)).result(binding)
end
end
Fix Markdown feature spec
require 'spec_helper'
require 'erb'
# This feature spec is intended to be a comprehensive exercising of all of
# GitLab's non-standard Markdown parsing and the integration thereof.
#
# These tests should be very high-level. Anything low-level belongs in the specs
# for the corresponding HTML::Pipeline filter or helper method.
#
# The idea is to pass a Markdown document through our entire processing stack.
#
# The process looks like this:
#
# Raw Markdown
# -> `markdown` helper
# -> Redcarpet::Render::GitlabHTML converts Markdown to HTML
# -> Post-process HTML
# -> `gfm_with_options` helper
# -> HTML::Pipeline
# -> Sanitize
# -> Emoji
# -> Table of Contents
# -> Autolinks
# -> Rinku (http, https, ftp)
# -> Other schemes
# -> References
# -> `html_safe`
# -> Template
#
# See the MarkdownFeature class for setup details.
describe 'GitLab Markdown' do
include ActionView::Helpers::TagHelper
include ActionView::Helpers::UrlHelper
include Capybara::Node::Matchers
include GitlabMarkdownHelper
# `markdown` calls these two methods
def current_user
@feat.user
end
def user_color_scheme_class
:white
end
# Let's only parse this thing once
before(:all) do
@feat = MarkdownFeature.new
# `markdown` expects a `@project` variable
@project = @feat.project
@md = markdown(@feat.raw_markdown)
@doc = Nokogiri::HTML::DocumentFragment.parse(@md)
end
after(:all) do
@feat.teardown
end
# Given a header ID, goes to that element's parent (the header), then to its
# second sibling (the body).
def get_section(id)
@doc.at_css("##{id}").parent.next_element
end
# it 'writes to a file' do
# File.open(Rails.root.join('tmp/capybara/markdown_spec.html'), 'w') do |file|
# file.puts @md
# end
# end
describe 'Markdown' do
describe 'No Intra Emphasis' do
it 'does not parse emphasis inside of words' do
body = get_section('no-intra-emphasis')
expect(body.to_html).not_to match('foo<em>bar</em>baz')
end
end
describe 'Tables' do
it 'parses table Markdown' do
body = get_section('tables')
expect(body).to have_selector('th:contains("Header")')
expect(body).to have_selector('th:contains("Row")')
expect(body).to have_selector('th:contains("Example")')
end
it 'allows Markdown in tables' do
expect(@doc.at_css('td:contains("Baz")').children.to_html).
to eq '<strong>Baz</strong>'
end
end
describe 'Fenced Code Blocks' do
it 'parses fenced code blocks' do
expect(@doc).to have_selector('pre.code.highlight.white.c')
expect(@doc).to have_selector('pre.code.highlight.white.python')
end
end
describe 'Strikethrough' do
it 'parses strikethroughs' do
expect(@doc).to have_selector(%{del:contains("and this text doesn't")})
end
end
describe 'Superscript' do
it 'parses superscript' do
body = get_section('superscript')
expect(body.to_html).to match('1<sup>st</sup>')
expect(body.to_html).to match('2<sup>nd</sup>')
end
end
end
describe 'HTML::Pipeline' do
describe 'SanitizationFilter' do
it 'uses a permissive whitelist' do
expect(@doc).to have_selector('b#manual-b')
expect(@doc).to have_selector('em#manual-em')
expect(@doc).to have_selector("code#manual-code")
expect(@doc).to have_selector('kbd:contains("s")')
expect(@doc).to have_selector('strike:contains(Emoji)')
expect(@doc).to have_selector('img#manual-img')
expect(@doc).to have_selector('br#manual-br')
expect(@doc).to have_selector('hr#manual-hr')
end
it 'permits span elements' do
expect(@doc).to have_selector('span#span-class-light.light')
end
it 'permits table alignment' do
expect(@doc.at_css('th:contains("Header")')['style']).to eq 'text-align: center'
expect(@doc.at_css('th:contains("Row")')['style']).to eq 'text-align: right'
expect(@doc.at_css('th:contains("Example")')['style']).to eq 'text-align: left'
expect(@doc.at_css('td:contains("Foo")')['style']).to eq 'text-align: center'
expect(@doc.at_css('td:contains("Bar")')['style']).to eq 'text-align: right'
expect(@doc.at_css('td:contains("Baz")')['style']).to eq 'text-align: left'
end
it 'removes `rel` attribute from links' do
expect(@doc).to have_selector('a#a-rel-nofollow')
expect(@doc).not_to have_selector('a#a-rel-nofollow[rel]')
end
it "removes `href` from `a` elements if it's fishy" do
expect(@doc).to have_selector('a#a-href-javascript')
expect(@doc).not_to have_selector('a#a-href-javascript[href]')
end
end
describe 'Escaping' do
let(:table) { @doc.css('table').last.at_css('tbody') }
it 'escapes non-tag angle brackets' do
expect(table.at_xpath('.//tr[1]/td[3]').inner_html).to eq '1 < 3 & 5'
end
end
describe 'Edge Cases' do
it 'allows markup inside link elements' do
expect(@doc.at_css('a[href="#link-emphasis"]').to_html).
to eq %{<a href="#link-emphasis"><em>text</em></a>}
expect(@doc.at_css('a[href="#link-strong"]').to_html).
to eq %{<a href="#link-strong"><strong>text</strong></a>}
expect(@doc.at_css('a[href="#link-code"]').to_html).
to eq %{<a href="#link-code"><code>text</code></a>}
end
end
describe 'EmojiFilter' do
it 'parses Emoji' do
expect(@doc).to have_selector('img.emoji', count: 10)
end
end
describe 'TableOfContentsFilter' do
it 'creates anchors inside header elements' do
expect(@doc).to have_selector('h1 a#gitlab-markdown')
expect(@doc).to have_selector('h2 a#markdown')
expect(@doc).to have_selector('h3 a#autolinkfilter')
end
end
describe 'AutolinkFilter' do
let(:list) { get_section('autolinkfilter').next_element }
def item(index)
list.at_css("li:nth-child(#{index})")
end
it 'autolinks http://' do
expect(item(1).children.first.name).to eq 'a'
expect(item(1).children.first['href']).to eq 'http://about.gitlab.com/'
end
it 'autolinks https://' do
expect(item(2).children.first.name).to eq 'a'
expect(item(2).children.first['href']).to eq 'https://google.com/'
end
it 'autolinks ftp://' do
expect(item(3).children.first.name).to eq 'a'
expect(item(3).children.first['href']).to eq 'ftp://ftp.us.debian.org/debian/'
end
it 'autolinks smb://' do
expect(item(4).children.first.name).to eq 'a'
expect(item(4).children.first['href']).to eq 'smb://foo/bar/baz'
end
it 'autolinks irc://' do
expect(item(5).children.first.name).to eq 'a'
expect(item(5).children.first['href']).to eq 'irc://irc.freenode.net/git'
end
it 'autolinks short, invalid URLs' do
expect(item(6).children.first.name).to eq 'a'
expect(item(6).children.first['href']).to eq 'http://localhost:3000'
end
%w(code a kbd).each do |elem|
it "ignores links inside '#{elem}' element" do
expect(@doc.at_css("#{elem}#autolink-#{elem}").child).to be_text
end
end
end
describe 'ReferenceFilter' do
it 'handles references in headers' do
header = @doc.at_css('#reference-filters-eg-1').parent
expect(header.css('a').size).to eq 2
end
it "handles references in Markdown" do
body = get_section('reference-filters-eg-1')
expect(body).to have_selector('em a.gfm-merge_request', count: 1)
end
it 'parses user references' do
body = get_section('userreferencefilter')
expect(body).to have_selector('a.gfm.gfm-project_member', count: 3)
end
it 'parses issue references' do
body = get_section('issuereferencefilter')
expect(body).to have_selector('a.gfm.gfm-issue', count: 2)
end
it 'parses merge request references' do
body = get_section('mergerequestreferencefilter')
expect(body).to have_selector('a.gfm.gfm-merge_request', count: 2)
end
it 'parses snippet references' do
body = get_section('snippetreferencefilter')
expect(body).to have_selector('a.gfm.gfm-snippet', count: 2)
end
it 'parses commit range references' do
body = get_section('commitrangereferencefilter')
expect(body).to have_selector('a.gfm.gfm-commit_range', count: 2)
end
it 'parses commit references' do
body = get_section('commitreferencefilter')
expect(body).to have_selector('a.gfm.gfm-commit', count: 2)
end
it 'parses label references' do
body = get_section('labelreferencefilter')
expect(body).to have_selector('a.gfm.gfm-label', count: 3)
end
end
end
end
# This is a helper class used by the GitLab Markdown feature spec
#
# Because the feature spec only cares about the output of the Markdown, and the
# test setup and teardown and parsing is fairly expensive, we only want to do it
# once. Unfortunately RSpec will not let you access `let`s in a `before(:all)`
# block, so we fake it by encapsulating all the shared setup in this class.
#
# The class contains the raw Markup used in the test, dynamically substituting
# real objects, created from factories and setup on-demand, when referenced in
# the Markdown.
class MarkdownFeature
include FactoryGirl::Syntax::Methods
def initialize
DatabaseCleaner.start
end
def teardown
DatabaseCleaner.clean
end
def user
@user ||= create(:user)
end
def group
unless @group
@group = create(:group)
@group.add_user(user, Gitlab::Access::DEVELOPER)
end
@group
end
# Direct references ----------------------------------------------------------
def project
@project ||= create(:project)
end
def issue
@issue ||= create(:issue, project: project)
end
def merge_request
@merge_request ||= create(:merge_request, :simple, source_project: project)
end
def snippet
@snippet ||= create(:project_snippet, project: project)
end
def commit
@commit ||= project.repository.commit
end
def commit_range
unless @commit_range
commit2 = project.repository.commit('HEAD~3')
@commit_range = CommitRange.new("#{commit.id}...#{commit2.id}")
end
@commit_range
end
def simple_label
@simple_label ||= create(:label, name: 'gfm', project: project)
end
def label
@label ||= create(:label, name: 'awaiting feedback', project: project)
end
# Cross-references -----------------------------------------------------------
def xproject
unless @xproject
namespace = create(:namespace, name: 'cross-reference')
@xproject = create(:project, namespace: namespace)
@xproject.team << [user, :developer]
end
@xproject
end
# Shortcut to "cross-reference/project"
def xref
xproject.path_with_namespace
end
def xissue
@xissue ||= create(:issue, project: xproject)
end
def xmerge_request
@xmerge_request ||= create(:merge_request, :simple, source_project: xproject)
end
def xsnippet
@xsnippet ||= create(:project_snippet, project: xproject)
end
def xcommit
@xcommit ||= xproject.repository.commit
end
def xcommit_range
unless @xcommit_range
xcommit2 = xproject.repository.commit('HEAD~2')
@xcommit_range = CommitRange.new("#{xcommit.id}...#{xcommit2.id}")
end
@xcommit_range
end
def raw_markdown
fixture = Rails.root.join('spec/fixtures/markdown.md.erb')
ERB.new(File.read(fixture)).result(binding)
end
end
|
# Tasks for publishing from the local project repo to CBIIT
namespace :publish do
PROJECT_PUBLICATION_LOCAL_ROOT = File.expand_path("../../publish-repo", __FILE__)
task :version_check do
projects.each do |p|
unless p.version =~ /^\d+\.\d+.\d+\.RELEASE$/
fail "#{p} has a non-release version.\nPlease set it to x.y.z.RELEASE (according to the policy in the README) and commit before attempting to release."
end
end
end
def which_vcs
@which_vcs ||= begin
p = project('ctms-commons')
if File.directory?(p._('.git'))
:git
elsif File.directory?(p._('.svn'))
:svn
end
end
end
task :vcs_check do
case which_vcs
when :git
unless `git status -s`.empty?
fail "Outstanding changes in the working directory. Please resolve them before releasing."
end
when :svn
unless `svn status`.empty?
fail "Outstanding changes in the working directory. Please resolve them before releasing."
end
end
end
desc "Ensure that the project is ready to publish"
task :check => [:version_check, :vcs_check] do
info "Everything seems to be ready to publish."
end
desc "Tag a release version in the project subversion repo"
task :tag => :check do
unless which_vcs == :svn
fail "Tagging may only be done from an svn checkout."
end
system("svn cp ^/trunk ^/tags/releases/#{CTMS_COMMONS_VERSION} -m 'Tag #{CTMS_COMMONS_VERSION}'")
fail "Tagging failed" unless $? == 0
end
task :url do |t|
class << t; attr_accessor :value; end
t.value = "https://ncisvn.nci.nih.gov/svn/cbiit-ivy-repo/trunk/#{CTMS_COMMONS_IVY_ORG}"
end
task :repo => :url do |t|
class << t; attr_accessor :path; end
mkdir_p PROJECT_PUBLICATION_LOCAL_ROOT
FileUtils.cd PROJECT_PUBLICATION_LOCAL_ROOT do
if File.directory?(File.join(CTMS_COMMONS_IVY_ORG, '.svn'))
info "Updating publish repo checkout at #{File.expand_path('.')}"
system("svn update #{CTMS_COMMONS_IVY_ORG}")
unless $? == 0
fail "Update failed. Please check the subversion output for clues."
end
else
url = task("publish:url").value
info "Checking out publish repo"
info " from #{url}"
info " to #{File.expand_path('.')}"
system("svn checkout #{task("publish:url").value}")
unless $? == 0
fail "Checkout failed. Please check the subversion output for clues."
end
end
end
t.path = File.join(PROJECT_PUBLICATION_LOCAL_ROOT, CTMS_COMMONS_IVY_ORG)
end
task :check_clean_repo => :repo do |t|
repo = task("publish:repo").path
statuses = repo_status.collect { |st, path| st }.uniq
unless statuses.empty?
fail "The local copy of the publish repo is dirty (#{statuses.inspect}). Please clean it up before proceeding."
end
end
# TODO: this is not working
task :build => [task("clean"), task("package")]
task :copy => [:check_clean_repo, :repo] do
publish_repo = task("publish:repo").path
project_repo = ProjectIvyRepo::PROJECT_REPO_ROOT
prefix = File.join(project_repo, CTMS_COMMONS_IVY_ORG) + "/"
artifacts = projects.collect { |p| p.version }.uniq.
collect { |version| Dir[File.join(prefix, "*", version, "**/*")] }.flatten
artifacts.each do |artifact|
target = File.join(publish_repo, artifact.sub(prefix, ''))
FileUtils.mkdir_p File.dirname(target)
FileUtils.cp artifact, target
system("svn add --parents #{target}")
end
info "Copied #{artifacts.size} artifacts to the local publish repo."
end
desc "Does a sanity check on the prepared artifacts"
task :sanity => :repo do
problems = repo_status.collect { |st, path|
if path =~ /ivy.xml$/
if File.read(path) =~ /\.DEV/
"#{path} contains a dependency on a development artifact."
end
end
}.compact
unless problems.empty?
fail "There are problems with the soon-to-be-published artifacts. " <<
"Please fix them before committing.\n- #{problems.join("\n- ")}"
end
end
desc "Prepare the project artifacts for publication"
task :prepare => [:check, :build, :copy, :sanity] do
info "The local checkout of the target repo now contains the artifacts for #{CTMS_COMMONS_VERSION}."
info "Please verify they are correct, then run `buildr publish:commit`."
info "(The local checkout is in #{task("publish:repo").path}.)"
end
desc "Commit the prepared artifacts"
task :commit => [:repo, :sanity] do
all_statuses = repo_status.collect { |st, path| st }.uniq
unless all_statuses == %w(A)
fail "You can only publish adds, not changes: #{all_statuses.join(' ')}"
end
info "Committing #{repo_status.size} changes."
system("svn commit #{task("publish:repo").path} -m 'Publishing #{CTMS_COMMONS_VERSION}'")
info "If the commit succeeded, please run `buildr publish:tag`."
info "Then update the version in the buildfile to the next development version and commit."
end
desc "Remove all pre-publish artifacts from the local copy of the publish repo"
task :clean => :repo do
repo = task("publish:repo").path
system("svn revert --recursive #{repo}")
repo_status.select { |st, path| st == '?' }.collect { |st, path| path }.
each { |file| FileUtils.rm_rf file }
end
desc "Remove all changes to already-published artifacts from the local copy of the publish repo"
task :unmodify => :repo do
repo = task("publish:repo").path
paths = repo_status.select { |st, path| st == 'M' }.collect { |st, path| "'#{path}'" }.join(' ')
system("svn revert #{paths}")
end
def repo_status
`svn status #{task("publish:repo").path}`.split("\n").collect { |line| line.split(/\s+/, 2) }
end
end
Correct publish:build task.
SVN-Revision: 467
# Tasks for publishing from the local project repo to CBIIT
namespace :publish do
PROJECT_PUBLICATION_LOCAL_ROOT = File.expand_path("../../publish-repo", __FILE__)
task :version_check do
projects.each do |p|
unless p.version =~ /^\d+\.\d+.\d+\.RELEASE$/
fail "#{p} has a non-release version.\nPlease set it to x.y.z.RELEASE (according to the policy in the README) and commit before attempting to release."
end
end
end
def which_vcs
@which_vcs ||= begin
p = project('ctms-commons')
if File.directory?(p._('.git'))
:git
elsif File.directory?(p._('.svn'))
:svn
end
end
end
task :vcs_check do
case which_vcs
when :git
unless `git status -s`.empty?
fail "Outstanding changes in the working directory. Please resolve them before releasing."
end
when :svn
unless `svn status`.empty?
fail "Outstanding changes in the working directory. Please resolve them before releasing."
end
end
end
desc "Ensure that the project is ready to publish"
task :check => [:version_check, :vcs_check] do
info "Everything seems to be ready to publish."
end
desc "Tag a release version in the project subversion repo"
task :tag => :check do
unless which_vcs == :svn
fail "Tagging may only be done from an svn checkout."
end
system("svn cp ^/trunk ^/tags/releases/#{CTMS_COMMONS_VERSION} -m 'Tag #{CTMS_COMMONS_VERSION}'")
fail "Tagging failed" unless $? == 0
end
task :url do |t|
class << t; attr_accessor :value; end
t.value = "https://ncisvn.nci.nih.gov/svn/cbiit-ivy-repo/trunk/#{CTMS_COMMONS_IVY_ORG}"
end
task :repo => :url do |t|
class << t; attr_accessor :path; end
mkdir_p PROJECT_PUBLICATION_LOCAL_ROOT
FileUtils.cd PROJECT_PUBLICATION_LOCAL_ROOT do
if File.directory?(File.join(CTMS_COMMONS_IVY_ORG, '.svn'))
info "Updating publish repo checkout at #{File.expand_path('.')}"
system("svn update #{CTMS_COMMONS_IVY_ORG}")
unless $? == 0
fail "Update failed. Please check the subversion output for clues."
end
else
url = task("publish:url").value
info "Checking out publish repo"
info " from #{url}"
info " to #{File.expand_path('.')}"
system("svn checkout #{task("publish:url").value}")
unless $? == 0
fail "Checkout failed. Please check the subversion output for clues."
end
end
end
t.path = File.join(PROJECT_PUBLICATION_LOCAL_ROOT, CTMS_COMMONS_IVY_ORG)
end
task :check_clean_repo => :repo do |t|
repo = task("publish:repo").path
statuses = repo_status.collect { |st, path| st }.uniq
unless statuses.empty?
fail "The local copy of the publish repo is dirty (#{statuses.inspect}). Please clean it up before proceeding."
end
end
task :build => ["rake:clean", "rake:package"]
task :copy => [:check_clean_repo, :repo] do
publish_repo = task("publish:repo").path
project_repo = ProjectIvyRepo::PROJECT_REPO_ROOT
prefix = File.join(project_repo, CTMS_COMMONS_IVY_ORG) + "/"
artifacts = projects.collect { |p| p.version }.uniq.
collect { |version| Dir[File.join(prefix, "*", version, "**/*")] }.flatten
artifacts.each do |artifact|
target = File.join(publish_repo, artifact.sub(prefix, ''))
FileUtils.mkdir_p File.dirname(target)
FileUtils.cp artifact, target
system("svn add --parents #{target}")
end
info "Copied #{artifacts.size} artifacts to the local publish repo."
end
desc "Does a sanity check on the prepared artifacts"
task :sanity => :repo do
problems = repo_status.collect { |st, path|
if path =~ /ivy.xml$/
if File.read(path) =~ /\.DEV/
"#{path} contains a dependency on a development artifact."
end
end
}.compact
unless problems.empty?
fail "There are problems with the soon-to-be-published artifacts. " <<
"Please fix them before committing.\n- #{problems.join("\n- ")}"
end
end
desc "Prepare the project artifacts for publication"
task :prepare => [:check, :build, :copy, :sanity] do
info "The local checkout of the target repo now contains the artifacts for #{CTMS_COMMONS_VERSION}."
info "Please verify they are correct, then run `buildr publish:commit`."
info "(The local checkout is in #{task("publish:repo").path}.)"
end
desc "Commit the prepared artifacts"
task :commit => [:repo, :sanity] do
all_statuses = repo_status.collect { |st, path| st }.uniq
unless all_statuses == %w(A)
fail "You can only publish adds, not changes: #{all_statuses.join(' ')}"
end
info "Committing #{repo_status.size} changes."
system("svn commit #{task("publish:repo").path} -m 'Publishing #{CTMS_COMMONS_VERSION}'")
info "If the commit succeeded, please run `buildr publish:tag`."
info "Then update the version in the buildfile to the next development version and commit."
end
desc "Remove all pre-publish artifacts from the local copy of the publish repo"
task :clean => :repo do
repo = task("publish:repo").path
system("svn revert --recursive #{repo}")
repo_status.select { |st, path| st == '?' }.collect { |st, path| path }.
each { |file| FileUtils.rm_rf file }
end
desc "Remove all changes to already-published artifacts from the local copy of the publish repo"
task :unmodify => :repo do
repo = task("publish:repo").path
paths = repo_status.select { |st, path| st == 'M' }.collect { |st, path| "'#{path}'" }.join(' ')
system("svn revert #{paths}")
end
def repo_status
`svn status #{task("publish:repo").path}`.split("\n").collect { |line| line.split(/\s+/, 2) }
end
end
|
require 'spec_helper'
feature 'Projects' do
scenario 'User creates a public project' do
@user = FactoryGirl.create(
:user,
password: 'secret12345',
password_confirmation: 'secret12345'
)
sign_in_with(@user.email, 'secret12345')
expect(page.current_path).to eq('/dashboard')
click_button 'Create first project!'
fill_in 'project_name', with: 'testproject1'
click_button 'Public'
expect(find('.project header')).to have_content('testproject1')
end
scenario 'User creates a private project' do
@user = FactoryGirl.create(
:user,
password: 'secret12345',
password_confirmation: 'secret12345'
)
sign_in_with(@user.email, 'secret12345')
expect(page.current_path).to eq('/dashboard')
click_button 'Create first project!'
fill_in 'project_name', with: 'testproject1'
click_button 'Private'
expect(find('.project header')).to have_content('testproject1')
end
scenario "User deletes a project and can't see it" do
sign_up_with('t@test.com', 'test1', 'secret12345')
click_button 'Create first project!'
fill_in 'project_name', with: 't1'
click_button 'Public'
find('.project').click_link 'Settings'
click_link 'Delete project'
expect(page.current_path).to eq('/dashboard')
expect(page).to have_content(
"Welcome aboard! Without wasting any further time, let's get you started!"
)
visit '/test1/t1'
expect(page).to have_content('The project you requested had been deleted.')
end
describe 'Multiple users interaction' do
before :each do
sign_up_with('t@test.com', 'test1', 'secret12345')
click_button 'Create first project!'
fill_in 'project_name', with: 'public_project'
click_button 'Public'
click_link 'New Project'
fill_in 'project_name', with: 'private_project'
click_link 'logout'
sign_up_with('t2@test.com', 'test2', 'secret12345')
end
scenario 'Users see public projects of others as inspiring' do
visit '/inspire'
expect(page).to have_no_content(
"Uh oh, looks like everyone's gotten lazy ;)"
)
expect(page).to have_content('public_project')
expect(page).to have_no_content('private_project')
end
scenario 'User sees other users projects' do
visit '/test1/projects'
expect(page).to have_content('public_project')
expect(page).to have_no_content('private_project')
end
scenario 'User follows and unfollows other users projects' do
visit '/test1/public_project'
click_link 'Follow'
expect(find('.action')).to have_link('Unfollow')
visit '/test2/followed/projects'
expect(page).to have_link('public_project')
click_link 'public_project'
click_link 'Unfollow'
expect(find('.action')).to have_link('Follow')
visit '/test2/followed/projects'
expect(page).to have_no_content('public_project')
end
scenario 'User forks other users projects' do
visit '/test1/public_project'
click_link 'Fork'
expect(page.current_path).to eq('/test2/public_project')
expect(find('.parent_project')).to have_content(
'from test1 / public_project'
)
click_link 'logout'
sign_up_with('t3@test.com', 'test3', 'secret12345')
visit '/test2/public_project'
click_link 'Fork'
expect(page.current_path).to eq('/test3/public_project')
expect(find('.parent_project')).to have_content(
'from test2 / public_project'
)
visit '/test2/projects'
expect(find('.album')).to have_content(
'public_project from test1 / public_project'
)
end
scenario 'User sees network of a project including deleted ones' do
visit 'test1/public_project'
click_link 'Fork'
click_link 'logout'
sign_in_with('t@test.com', 'secret12345')
visit '/test1/public_project/settings'
click_link 'Delete project'
click_link 'logout'
sign_up_with('t3@test.com', 'test3', 'secret12345')
visit '/test2/public_project'
click_link 'Fork'
click_link 'logout'
sign_up_with('t4@test.com', 'test4', 'secret12345')
visit '/test2/public_project'
click_link 'Fork'
click_link 'Network'
expect(find(:xpath, '/html/body/div/article/section/div/ul/li'))
.to have_link('test1')
expect(find(:xpath, '/html/body/div/article/section/div/ul/li'))
.to have_no_link('public_project')
expect(find(:xpath, '/html/body/div/article/section/div/ul/ul'))
.to have_no_content('test1')
expect(find(:xpath, '/html/body/div/article/section/div/ul/ul/li'))
.to have_link('test2')
expect(find(:xpath, '/html/body/div/article/section/div/ul/ul/li'))
.to have_link('public_project')
expect(find(:xpath, '/html/body/div/article/section/div/ul/ul/ul'))
.to have_no_content('test2')
expect(find(:xpath, '/html/body/div/article/section/div/ul/ul/ul'))
.to have_link('test3')
expect(find(:xpath, '/html/body/div/article/section/div/ul/ul/ul'))
.to have_link('test4')
end
end
shared_examples 'online project services' do |type|
before :each do
sign_up_with('t@test.com', 'test1', 'secret12345')
click_button 'Create first project!'
fill_in 'project_name', with: 'testproject1'
if type == 'private'
click_button 'Private'
else
click_button 'Public'
end
end
scenario "User can't create a new branch in an empty project" do
click_link 'Branches'
expect(page).to have_no_content 'Create new branch'
expect(page).to have_content \
'The project is empty. There are no branches.'
end
describe 'After creating a branch' do
before :each do
page.attach_file(
'file[]', 'spec/factories/files/happypanda.png'
)
click_button 'Save changes'
click_link 'Branches'
fill_in 'branch_name', with: 'test_branch'
click_button 'Create new branch!'
end
scenario 'User is redirected to the branch tree page' do
project = Project.last
expect(page.current_path).to eq(
tree_user_project_path(
project.user,
project,
project.uniqueurl,
'test_branch'
)
)
end
scenario 'User sees master images in the branch tree' do
expect(page).to have_link 'happypanda.png'
data = find('.album//img')['src']
click_link 'Branches'
click_link 'master'
expect(find('.album//img')['src']).to eq(data)
end
scenario 'User sees the branch in the list of branches' do
click_link 'Branches'
expect(page).to have_link 'master'
expect(page).to have_link 'test_branch'
end
scenario 'User is not able to create a branch with the same name' do
project = Project.last
click_link 'Branches'
fill_in 'branch_name', with: 'test_branch'
click_button 'Create new branch!'
expect(page.current_path).to eq(
branches_user_project_path(
project.user,
project,
project.uniqueurl
)
)
expect(page).to have_content(
'Something went wrong, the branch was not created!'
)
end
scenario 'User comments on a tree' do
fill_in 'comment_body', with: 'test comment'
click_button 'Create Comment'
expect(find('.comments')).to have_content('test comment')
end
scenario 'User uploads an image from the tree view' do
page.attach_file(
'file[]', 'spec/factories/files/1.png'
)
click_button 'Save changes'
expect(page).to have_content 'test_branch'
expect(page).to have_content '1.png'
end
describe 'After uploading an image to the branch' do
before :each do
page.attach_file(
'file[]', 'spec/factories/files/1.png'
)
click_button 'Save changes'
end
scenario 'User is redirected to the tree view of the branch' do
project = Project.last
expect(page.current_path).to eq(
tree_user_project_path(
project.user,
project,
project.uniqueurl,
'test_branch'
)
)
end
context 'On the new branch' do
scenario 'User sees his uploaded image along with old images' do
expect(page).to have_content '1.png'
expect(page).to have_content 'happypanda.png'
end
scenario 'User sees a new commit in the log' do
click_link 'Log'
expect(page).to have_content 'Add 1 image: 1.png'
expect(page).to have_content 'Add 1 image: happypanda.png'
end
end
context 'On master branch' do
before :each do
click_link 'Branches'
click_link 'master'
end
scenario 'User sees old images only' do
expect(page).to have_content 'happypanda.png'
expect(page).to have_no_content '1.png'
end
scenario 'User sees old commits only' do
click_link 'Log'
expect(page).to have_content 'Add 1 image: happypanda.png'
expect(page).to have_no_content 'Add 1 image: 1.png'
end
end
end
describe 'After updating an image on the branch' do
before :each do
find('a', text: 'happypanda.png').click
@old = find('.album//img')['src']
page.attach_file(
'file', 'spec/factories/files/1.png'
)
fill_in 'message', with: 'test update on branch'
click_button 'Save changes'
end
scenario 'User is redirected to the new blob page' do
project = Project.last
expect(page.current_path).to eq(
blob_user_project_path(
project.user,
project,
project.uniqueurl,
'test_branch',
'happypanda.png'
)
)
end
context 'On the new branch' do
scenario 'User sees updated image' do
expect(find('.album//img')['src']).to_not eq(@old)
end
scenario 'User sees a new commit in the log' do
click_link 'Log'
expect(page).to have_content('test update on branch')
end
end
context 'On master branch' do
before :each do
click_link 'Branches'
click_link 'master'
find('a', text: 'happypanda.png').click
end
scenario 'User sees old image' do
expect(find('.album//img')['src']).to eq(@old)
end
scenario 'User does not see a new commit in the log' do
click_link 'Log'
expect(page).to have_no_content('test update on branch')
end
end
end
end
scenario 'User uploads multiple images' do
page.attach_file(
'file[]',
['spec/factories/files/happypanda.png',
'spec/factories/files/naruto.png']
)
click_button 'Save changes'
click_link 'Current'
expect(page).to have_content 'happypanda.png'
expect(page).to have_content 'naruto.png'
end
scenario 'User gets a flash alert when no files are selected' do
click_button 'Save changes'
expect(page).to have_content 'No image selected!'
end
describe 'After image upload' do
before :each do
page.attach_file('file[]', 'spec/factories/files/happypanda.png')
click_button 'Save changes'
click_link 'Current'
end
scenario 'User sees uploaded image' do
expect(page).to have_content 'happypanda.png'
find('a', text: 'happypanda.png').click
expect(find('.photo')).to have_selector('img')
end
scenario 'User sees logs for a project' do
click_link 'Log'
expect(page).to have_link 'Add 1 image: happypanda.png'
last_commit_id = Project.last.barerepo.head.target_id
expect(page).to have_selector("img[src$='#{last_commit_id}']")
img_link = find('.feed//article//img')['src']
visit img_link
expect(page.status_code).to eq(200)
end
scenario 'User comments on a specific commit' do
click_link 'Log'
click_link 'Add 1 image: happypanda.png'
fill_in 'comment_body', with: 'test comment'
click_button 'Create Comment'
expect(find('.comments')).to have_content('test comment')
end
scenario 'User comments on a specific blob' do
find('a', text: 'happypanda.png').click
fill_in 'comment_body', with: 'test comment'
click_button 'Create Comment'
expect(find('.comments')).to have_content('test comment')
end
describe 'After image update' do
before :each do
find('a', text: 'happypanda.png').click
@old = find('.photo//img')['src']
page.attach_file('file', 'spec/factories/files/1.png')
fill_in 'message', with: 'updated commit test'
click_button 'Save changes'
end
scenario 'User is redirected to the updated image' do
project = Project.last
expect(page.current_path).to eq(
blob_user_project_path(
project.user,
project,
project.uniqueurl,
'master',
'happypanda.png'
)
)
end
scenario 'User sees updated image' do
expect(find('.photo')).to have_selector('img')
expect(find('.photo//img')['src']).not_to eq(@old)
end
scenario 'User sees a new commit in the logs' do
click_link 'Log'
# Make sure the most recent commit doesn't contain the first message
expect(first('.feed//article')).to \
have_no_content 'Add 1 image: happypanda.png'
expect(first('.feed//article')).to have_content 'updated commit test'
img_link = first('.feed//article//img')['src']
visit img_link
expect(page.status_code).to eq(200)
end
end
describe 'After more images upload' do
before :each do
page.attach_file(
'file[]',
['spec/factories/files/naruto.png', 'spec/factories/files/1.png']
)
click_button 'Save changes'
click_link 'Log'
end
scenario 'User sees multiple images uploaded together as one commit' do
expect(page).to have_content('Add 2 images: 1.png and naruto.png')
end
scenario 'User sees only files changed in a commit' do
click_link 'Add 2 images: 1.png and naruto.png'
expect(page).to have_content('naruto.png')
expect(page).to have_content('1.png')
expect(page).to have_no_content('happypanda.png')
click_link 'Log'
click_link 'Add 1 image: happypanda.png'
expect(page).to have_content('happypanda.png')
expect(page).to have_no_content('naruto.png')
expect(page).to have_no_content('1.png')
end
scenario 'User sees all files at a certain commit through tree' do
click_link 'Add 1 image: happypanda.png'
click_button 'Browse files at this commit'
expect(page).to have_content('happypanda.png')
expect(page).to have_no_content('naruto.png')
expect(page).to have_no_content('1.png')
click_link 'Current'
click_link 'Log'
click_link 'Add 2 images: 1.png and naruto.png'
click_button 'Browse files at this commit'
expect(page).to have_content('naruto.png')
expect(page).to have_content('1.png')
expect(page).to have_content('happypanda.png')
end
end
end
scenario 'User gets a flash alert when adding a directory with no name' do
click_button 'Add Directory'
expect(page).to have_content 'No name provided for the directory!'
end
describe 'After creating a directory' do
before :each do
fill_in 'directory', with: 'test dir'
click_button 'Add Directory'
end
scenario 'User is redirected to the new directory path' do
project = Project.last
expect(page.current_path).to eq(
tree_user_project_path(
project.user,
project,
project.uniqueurl,
'master',
'test dir'
)
)
end
scenario 'User sees a new commit in the log with thumbnail' do
click_link 'Log'
expect(page).to have_content 'Add directory test dir'
thumb = first('.feed//img')['src']
visit thumb
expect(page.status_code).to eq(200)
end
scenario 'User sees the new directory in the project' do
page.find('.breadcrumb').click_link 'testproject1'
expect(page).to have_link 'test dir'
end
scenario 'User creates a sub directory inside the new directory' do
project = Project.last
fill_in 'directory', with: 'test sub dir'
click_button 'Add Directory'
expect(page.current_path).to eq(
tree_user_project_path(
project.user,
project,
project.uniqueurl,
'master',
'test dir/test sub dir'
)
)
expect(page).to have_content 'test sub dir'
end
scenario 'User sees no files in the new directory' do
expect(page).to have_content 'There are no files here.'
expect(page.find('.album')).to have_no_selector('img')
end
describe 'After uploading a new image to the directory' do
before :each do
page.attach_file(
'file[]',
'spec/factories/files/happypanda.png'
)
click_button 'Save changes'
end
scenario 'User is redirected to the directory path' do
project = Project.last
expect(page.current_path).to eq(
tree_user_project_path(
project.user,
project,
project.uniqueurl,
'master',
'test dir'
)
)
end
scenario 'User sees the new image in the directory' do
expect(page).to have_no_content 'There are no files here.'
expect(page.find('.album')).to have_selector('img')
expect(page.find('.album')).to have_content 'happypanda.png'
end
scenario 'User sees a new commit for the added image' do
click_link 'Log'
expect(page).to have_content 'Add 1 image: happypanda.png'
thumb = first('.feed//img')['src']
visit thumb
expect(page.status_code).to eq(200)
end
scenario 'User does not see the new image outside the directory' do
page.find('.breadcrumb').click_link 'testproject1'
expect(page).to have_no_content 'happypanda.png'
end
scenario 'User updates the new image' do
find('a', text: 'happypanda.png').click
old = find('.album//img')['src']
page.attach_file(
'file',
'spec/factories/files/naruto.png'
)
click_button 'Save changes'
expect(find('.album//img')['src']).to_not eq(old)
end
end
end
end
it_behaves_like 'online project services', 'public'
it_behaves_like 'online project services', 'private'
end
Change test of image update to equality
Previously blob images used blob data, hence they changed after image update.
However, now they are served through file and hence no change in src.
require 'spec_helper'
feature 'Projects' do
scenario 'User creates a public project' do
@user = FactoryGirl.create(
:user,
password: 'secret12345',
password_confirmation: 'secret12345'
)
sign_in_with(@user.email, 'secret12345')
expect(page.current_path).to eq('/dashboard')
click_button 'Create first project!'
fill_in 'project_name', with: 'testproject1'
click_button 'Public'
expect(find('.project header')).to have_content('testproject1')
end
scenario 'User creates a private project' do
@user = FactoryGirl.create(
:user,
password: 'secret12345',
password_confirmation: 'secret12345'
)
sign_in_with(@user.email, 'secret12345')
expect(page.current_path).to eq('/dashboard')
click_button 'Create first project!'
fill_in 'project_name', with: 'testproject1'
click_button 'Private'
expect(find('.project header')).to have_content('testproject1')
end
scenario "User deletes a project and can't see it" do
sign_up_with('t@test.com', 'test1', 'secret12345')
click_button 'Create first project!'
fill_in 'project_name', with: 't1'
click_button 'Public'
find('.project').click_link 'Settings'
click_link 'Delete project'
expect(page.current_path).to eq('/dashboard')
expect(page).to have_content(
"Welcome aboard! Without wasting any further time, let's get you started!"
)
visit '/test1/t1'
expect(page).to have_content('The project you requested had been deleted.')
end
describe 'Multiple users interaction' do
before :each do
sign_up_with('t@test.com', 'test1', 'secret12345')
click_button 'Create first project!'
fill_in 'project_name', with: 'public_project'
click_button 'Public'
click_link 'New Project'
fill_in 'project_name', with: 'private_project'
click_link 'logout'
sign_up_with('t2@test.com', 'test2', 'secret12345')
end
scenario 'Users see public projects of others as inspiring' do
visit '/inspire'
expect(page).to have_no_content(
"Uh oh, looks like everyone's gotten lazy ;)"
)
expect(page).to have_content('public_project')
expect(page).to have_no_content('private_project')
end
scenario 'User sees other users projects' do
visit '/test1/projects'
expect(page).to have_content('public_project')
expect(page).to have_no_content('private_project')
end
scenario 'User follows and unfollows other users projects' do
visit '/test1/public_project'
click_link 'Follow'
expect(find('.action')).to have_link('Unfollow')
visit '/test2/followed/projects'
expect(page).to have_link('public_project')
click_link 'public_project'
click_link 'Unfollow'
expect(find('.action')).to have_link('Follow')
visit '/test2/followed/projects'
expect(page).to have_no_content('public_project')
end
scenario 'User forks other users projects' do
visit '/test1/public_project'
click_link 'Fork'
expect(page.current_path).to eq('/test2/public_project')
expect(find('.parent_project')).to have_content(
'from test1 / public_project'
)
click_link 'logout'
sign_up_with('t3@test.com', 'test3', 'secret12345')
visit '/test2/public_project'
click_link 'Fork'
expect(page.current_path).to eq('/test3/public_project')
expect(find('.parent_project')).to have_content(
'from test2 / public_project'
)
visit '/test2/projects'
expect(find('.album')).to have_content(
'public_project from test1 / public_project'
)
end
scenario 'User sees network of a project including deleted ones' do
visit 'test1/public_project'
click_link 'Fork'
click_link 'logout'
sign_in_with('t@test.com', 'secret12345')
visit '/test1/public_project/settings'
click_link 'Delete project'
click_link 'logout'
sign_up_with('t3@test.com', 'test3', 'secret12345')
visit '/test2/public_project'
click_link 'Fork'
click_link 'logout'
sign_up_with('t4@test.com', 'test4', 'secret12345')
visit '/test2/public_project'
click_link 'Fork'
click_link 'Network'
expect(find(:xpath, '/html/body/div/article/section/div/ul/li'))
.to have_link('test1')
expect(find(:xpath, '/html/body/div/article/section/div/ul/li'))
.to have_no_link('public_project')
expect(find(:xpath, '/html/body/div/article/section/div/ul/ul'))
.to have_no_content('test1')
expect(find(:xpath, '/html/body/div/article/section/div/ul/ul/li'))
.to have_link('test2')
expect(find(:xpath, '/html/body/div/article/section/div/ul/ul/li'))
.to have_link('public_project')
expect(find(:xpath, '/html/body/div/article/section/div/ul/ul/ul'))
.to have_no_content('test2')
expect(find(:xpath, '/html/body/div/article/section/div/ul/ul/ul'))
.to have_link('test3')
expect(find(:xpath, '/html/body/div/article/section/div/ul/ul/ul'))
.to have_link('test4')
end
end
shared_examples 'online project services' do |type|
before :each do
sign_up_with('t@test.com', 'test1', 'secret12345')
click_button 'Create first project!'
fill_in 'project_name', with: 'testproject1'
if type == 'private'
click_button 'Private'
else
click_button 'Public'
end
end
scenario "User can't create a new branch in an empty project" do
click_link 'Branches'
expect(page).to have_no_content 'Create new branch'
expect(page).to have_content \
'The project is empty. There are no branches.'
end
describe 'After creating a branch' do
before :each do
page.attach_file(
'file[]', 'spec/factories/files/happypanda.png'
)
click_button 'Save changes'
click_link 'Branches'
fill_in 'branch_name', with: 'test_branch'
click_button 'Create new branch!'
end
scenario 'User is redirected to the branch tree page' do
project = Project.last
expect(page.current_path).to eq(
tree_user_project_path(
project.user,
project,
project.uniqueurl,
'test_branch'
)
)
end
scenario 'User sees master images in the branch tree' do
expect(page).to have_link 'happypanda.png'
data = find('.album//img')['src']
click_link 'Branches'
click_link 'master'
expect(find('.album//img')['src']).to eq(data)
end
scenario 'User sees the branch in the list of branches' do
click_link 'Branches'
expect(page).to have_link 'master'
expect(page).to have_link 'test_branch'
end
scenario 'User is not able to create a branch with the same name' do
project = Project.last
click_link 'Branches'
fill_in 'branch_name', with: 'test_branch'
click_button 'Create new branch!'
expect(page.current_path).to eq(
branches_user_project_path(
project.user,
project,
project.uniqueurl
)
)
expect(page).to have_content(
'Something went wrong, the branch was not created!'
)
end
scenario 'User comments on a tree' do
fill_in 'comment_body', with: 'test comment'
click_button 'Create Comment'
expect(find('.comments')).to have_content('test comment')
end
scenario 'User uploads an image from the tree view' do
page.attach_file(
'file[]', 'spec/factories/files/1.png'
)
click_button 'Save changes'
expect(page).to have_content 'test_branch'
expect(page).to have_content '1.png'
end
describe 'After uploading an image to the branch' do
before :each do
page.attach_file(
'file[]', 'spec/factories/files/1.png'
)
click_button 'Save changes'
end
scenario 'User is redirected to the tree view of the branch' do
project = Project.last
expect(page.current_path).to eq(
tree_user_project_path(
project.user,
project,
project.uniqueurl,
'test_branch'
)
)
end
context 'On the new branch' do
scenario 'User sees his uploaded image along with old images' do
expect(page).to have_content '1.png'
expect(page).to have_content 'happypanda.png'
end
scenario 'User sees a new commit in the log' do
click_link 'Log'
expect(page).to have_content 'Add 1 image: 1.png'
expect(page).to have_content 'Add 1 image: happypanda.png'
end
end
context 'On master branch' do
before :each do
click_link 'Branches'
click_link 'master'
end
scenario 'User sees old images only' do
expect(page).to have_content 'happypanda.png'
expect(page).to have_no_content '1.png'
end
scenario 'User sees old commits only' do
click_link 'Log'
expect(page).to have_content 'Add 1 image: happypanda.png'
expect(page).to have_no_content 'Add 1 image: 1.png'
end
end
end
describe 'After updating an image on the branch' do
before :each do
find('a', text: 'happypanda.png').click
@old = find('.album//img')['src']
page.attach_file(
'file', 'spec/factories/files/1.png'
)
fill_in 'message', with: 'test update on branch'
click_button 'Save changes'
end
scenario 'User is redirected to the new blob page' do
project = Project.last
expect(page.current_path).to eq(
blob_user_project_path(
project.user,
project,
project.uniqueurl,
'test_branch',
'happypanda.png'
)
)
end
context 'On the new branch' do
scenario 'User sees updated image under same name' do
expect(find('.album//img')['src']).to eq(@old)
end
scenario 'User sees a new commit in the log' do
click_link 'Log'
expect(page).to have_content('test update on branch')
end
end
context 'On master branch' do
before :each do
click_link 'Branches'
click_link 'master'
find('a', text: 'happypanda.png').click
end
scenario 'User sees old image' do
expect(find('.album//img')['src']).to eq(@old)
end
scenario 'User does not see a new commit in the log' do
click_link 'Log'
expect(page).to have_no_content('test update on branch')
end
end
end
end
scenario 'User uploads multiple images' do
page.attach_file(
'file[]',
['spec/factories/files/happypanda.png',
'spec/factories/files/naruto.png']
)
click_button 'Save changes'
click_link 'Current'
expect(page).to have_content 'happypanda.png'
expect(page).to have_content 'naruto.png'
end
scenario 'User gets a flash alert when no files are selected' do
click_button 'Save changes'
expect(page).to have_content 'No image selected!'
end
describe 'After image upload' do
before :each do
page.attach_file('file[]', 'spec/factories/files/happypanda.png')
click_button 'Save changes'
click_link 'Current'
end
scenario 'User sees uploaded image' do
expect(page).to have_content 'happypanda.png'
find('a', text: 'happypanda.png').click
expect(find('.photo')).to have_selector('img')
end
scenario 'User sees logs for a project' do
click_link 'Log'
expect(page).to have_link 'Add 1 image: happypanda.png'
last_commit_id = Project.last.barerepo.head.target_id
expect(page).to have_selector("img[src$='#{last_commit_id}']")
img_link = find('.feed//article//img')['src']
visit img_link
expect(page.status_code).to eq(200)
end
scenario 'User comments on a specific commit' do
click_link 'Log'
click_link 'Add 1 image: happypanda.png'
fill_in 'comment_body', with: 'test comment'
click_button 'Create Comment'
expect(find('.comments')).to have_content('test comment')
end
scenario 'User comments on a specific blob' do
find('a', text: 'happypanda.png').click
fill_in 'comment_body', with: 'test comment'
click_button 'Create Comment'
expect(find('.comments')).to have_content('test comment')
end
describe 'After image update' do
before :each do
find('a', text: 'happypanda.png').click
@old = find('.photo//img')['src']
page.attach_file('file', 'spec/factories/files/1.png')
fill_in 'message', with: 'updated commit test'
click_button 'Save changes'
end
scenario 'User is redirected to the updated image' do
project = Project.last
expect(page.current_path).to eq(
blob_user_project_path(
project.user,
project,
project.uniqueurl,
'master',
'happypanda.png'
)
)
end
scenario 'User sees updated image under same name' do
expect(find('.photo')).to have_selector('img')
expect(find('.photo//img')['src']).to eq(@old)
end
scenario 'User sees a new commit in the logs' do
click_link 'Log'
# Make sure the most recent commit doesn't contain the first message
expect(first('.feed//article')).to \
have_no_content 'Add 1 image: happypanda.png'
expect(first('.feed//article')).to have_content 'updated commit test'
img_link = first('.feed//article//img')['src']
visit img_link
expect(page.status_code).to eq(200)
end
end
describe 'After more images upload' do
before :each do
page.attach_file(
'file[]',
['spec/factories/files/naruto.png', 'spec/factories/files/1.png']
)
click_button 'Save changes'
click_link 'Log'
end
scenario 'User sees multiple images uploaded together as one commit' do
expect(page).to have_content('Add 2 images: 1.png and naruto.png')
end
scenario 'User sees only files changed in a commit' do
click_link 'Add 2 images: 1.png and naruto.png'
expect(page).to have_content('naruto.png')
expect(page).to have_content('1.png')
expect(page).to have_no_content('happypanda.png')
click_link 'Log'
click_link 'Add 1 image: happypanda.png'
expect(page).to have_content('happypanda.png')
expect(page).to have_no_content('naruto.png')
expect(page).to have_no_content('1.png')
end
scenario 'User sees all files at a certain commit through tree' do
click_link 'Add 1 image: happypanda.png'
click_button 'Browse files at this commit'
expect(page).to have_content('happypanda.png')
expect(page).to have_no_content('naruto.png')
expect(page).to have_no_content('1.png')
click_link 'Current'
click_link 'Log'
click_link 'Add 2 images: 1.png and naruto.png'
click_button 'Browse files at this commit'
expect(page).to have_content('naruto.png')
expect(page).to have_content('1.png')
expect(page).to have_content('happypanda.png')
end
end
end
scenario 'User gets a flash alert when adding a directory with no name' do
click_button 'Add Directory'
expect(page).to have_content 'No name provided for the directory!'
end
describe 'After creating a directory' do
before :each do
fill_in 'directory', with: 'test dir'
click_button 'Add Directory'
end
scenario 'User is redirected to the new directory path' do
project = Project.last
expect(page.current_path).to eq(
tree_user_project_path(
project.user,
project,
project.uniqueurl,
'master',
'test dir'
)
)
end
scenario 'User sees a new commit in the log with thumbnail' do
click_link 'Log'
expect(page).to have_content 'Add directory test dir'
thumb = first('.feed//img')['src']
visit thumb
expect(page.status_code).to eq(200)
end
scenario 'User sees the new directory in the project' do
page.find('.breadcrumb').click_link 'testproject1'
expect(page).to have_link 'test dir'
end
scenario 'User creates a sub directory inside the new directory' do
project = Project.last
fill_in 'directory', with: 'test sub dir'
click_button 'Add Directory'
expect(page.current_path).to eq(
tree_user_project_path(
project.user,
project,
project.uniqueurl,
'master',
'test dir/test sub dir'
)
)
expect(page).to have_content 'test sub dir'
end
scenario 'User sees no files in the new directory' do
expect(page).to have_content 'There are no files here.'
expect(page.find('.album')).to have_no_selector('img')
end
describe 'After uploading a new image to the directory' do
before :each do
page.attach_file(
'file[]',
'spec/factories/files/happypanda.png'
)
click_button 'Save changes'
end
scenario 'User is redirected to the directory path' do
project = Project.last
expect(page.current_path).to eq(
tree_user_project_path(
project.user,
project,
project.uniqueurl,
'master',
'test dir'
)
)
end
scenario 'User sees the new image in the directory' do
expect(page).to have_no_content 'There are no files here.'
expect(page.find('.album')).to have_selector('img')
expect(page.find('.album')).to have_content 'happypanda.png'
end
scenario 'User sees a new commit for the added image' do
click_link 'Log'
expect(page).to have_content 'Add 1 image: happypanda.png'
thumb = first('.feed//img')['src']
visit thumb
expect(page.status_code).to eq(200)
end
scenario 'User does not see the new image outside the directory' do
page.find('.breadcrumb').click_link 'testproject1'
expect(page).to have_no_content 'happypanda.png'
end
scenario 'User updates the new image' do
find('a', text: 'happypanda.png').click
old = find('.album//img')['src']
page.attach_file(
'file',
'spec/factories/files/naruto.png'
)
click_button 'Save changes'
expect(find('.album//img')['src']).to eq(old)
end
end
end
end
it_behaves_like 'online project services', 'public'
it_behaves_like 'online project services', 'private'
end
|
# frozen_string_literal: true
require "spec_helper"
describe GraphQL::ListType do
let(:float_list) { GraphQL::ListType.new(of_type: GraphQL::FLOAT_TYPE) }
it "coerces elements in the list" do
assert_equal([1.0, 2.0, 3.0].inspect, float_list.coerce_isolated_input([1, 2, 3]).inspect)
end
it "converts items that are not lists into lists" do
assert_equal([1.0].inspect, float_list.coerce_isolated_input(1.0).inspect)
end
describe "validate_input with bad input" do
let(:bad_num) { "bad_num" }
let(:result) { float_list.validate_isolated_input([bad_num, 2.0, 3.0]) }
it "returns an invalid result" do
assert(!result.valid?)
end
it "has one problem" do
assert_equal(result.problems.length, 1)
end
it "has path [0]" do
assert_equal(result.problems[0]["path"], [0])
end
it "has the correct explanation" do
expected = GraphQL::FLOAT_TYPE.validate_isolated_input(bad_num).problems[0]["explanation"]
actual = result.problems[0]["explanation"]
assert_equal(actual, expected)
end
end
describe "list of input objects" do
let(:input_object) do
GraphQL::InputObjectType.define do
name "SomeInputObjectType"
argument :float, !types.Float
end
end
let(:input_object_list) { input_object.to_list_type }
it "converts hashes into lists of hashes" do
hash = { 'float' => 1.0 }
assert_equal([hash].inspect, input_object_list.coerce_isolated_input(hash).map(&:to_h).inspect)
end
end
end
fix list_type_spec
# frozen_string_literal: true
require "spec_helper"
describe GraphQL::ListType do
let(:float_list) { GraphQL::ListType.new(of_type: GraphQL::FLOAT_TYPE) }
it "coerces elements in the list" do
assert_equal([1.0, 2.0, 3.0].inspect, float_list.coerce_isolated_input([1, 2, 3]).inspect)
end
it "converts items that are not lists into lists" do
assert_equal([1.0].inspect, float_list.coerce_isolated_input(1.0).inspect)
end
describe "validate_input with bad input" do
let(:bad_num) { "bad_num" }
let(:result) { float_list.validate_isolated_input([bad_num, 2.0, 3.0]) }
it "returns an invalid result" do
assert(!result.valid?)
end
it "has one problem" do
assert_equal(result.problems.length, 1)
end
it "has path [0]" do
assert_equal(result.problems[0]["path"], [0])
end
it "has the correct explanation" do
expected = GraphQL::FLOAT_TYPE.validate_isolated_input(bad_num).problems[0]["explanation"]
actual = result.problems[0]["explanation"]
assert_equal(actual, expected)
end
end
describe "list of input objects" do
let(:input_object) do
input_object = GraphQL::InputObjectType.define do
name "SomeInputObjectType"
argument :float, !types.Float
end
input_object.arguments_class = GraphQL::Query::Arguments.construct_arguments_class(
argument_definitions: input_object.arguments,
)
input_object
end
let(:input_object_list) { input_object.to_list_type }
it "converts hashes into lists of hashes" do
hash = { 'float' => 1.0 }
assert_equal([hash].inspect, input_object_list.coerce_isolated_input(hash).map(&:to_h).inspect)
end
end
end
|
# Prefer local library over installed version.
$:.unshift( File.join( File.dirname(__FILE__), "..", "lib" ) )
$:.unshift( File.join( File.dirname(__FILE__), "..", "ext", "rubygame" ) )
require 'rubygame'
include Rubygame
HasEventHandler = Rubygame::EventHandler::HasEventHandler
class HandledObject
include HasEventHandler
def initialize
super
end
end
describe HasEventHandler do
before :each do
@object = HandledObject.new
@results = []
end
###############
# MAGIC HOOKS #
###############
it "should have a #magic_hooks method" do
@object.should respond_to(:magic_hooks)
end
describe "#magic_hooks" do
it "should accept a hash" do
lambda { @object.magic_hooks({}) }.should_not raise_error
end
end
end
HasEventHandler #magic_hooks should reject non-hashes.
# Prefer local library over installed version.
$:.unshift( File.join( File.dirname(__FILE__), "..", "lib" ) )
$:.unshift( File.join( File.dirname(__FILE__), "..", "ext", "rubygame" ) )
require 'rubygame'
include Rubygame
HasEventHandler = Rubygame::EventHandler::HasEventHandler
class HandledObject
include HasEventHandler
def initialize
super
end
end
describe HasEventHandler do
before :each do
@object = HandledObject.new
@results = []
end
###############
# MAGIC HOOKS #
###############
it "should have a #magic_hooks method" do
@object.should respond_to(:magic_hooks)
end
describe "#magic_hooks" do
it "should accept a hash" do
lambda { @object.magic_hooks({}) }.should_not raise_error
end
it "should reject non-hashes" do
lambda { @object.magic_hooks([]) }.should raise_error
lambda { @object.magic_hooks(EventHook.new) }.should raise_error
lambda { @object.magic_hooks("string") }.should raise_error
end
end
end
|
require "spec_helper"
require "heroku/command/pg"
module Heroku::Command
describe Pg do
before do
@pg = prepare_command(Pg)
@pg.stub!(:config_vars).and_return({
"DATABASE_URL" => "postgres://database_url",
"SHARED_DATABASE_URL" => "postgres://other_database_url",
"HEROKU_POSTGRESQL_RONIN_URL" => "postgres://database_url",
"HEROKU_SHARED_POSTGRESQL_BLACK_URL" => "postgres://database_url"
})
@pg.stub!(:args).and_return ["DATABASE_URL"]
@pg.heroku.stub!(:info).and_return({})
end
it "resets the app's database if user confirms" do
@pg.stub!(:confirm_command).and_return(true)
fake_client = mock("heroku_postgresql_client")
fake_client.should_receive("reset")
@pg.should_receive(:heroku_postgresql_client).with("postgres://database_url").and_return(fake_client)
@pg.reset
end
it "doesn't reset the app's database if the user doesn't confirm" do
@pg.stub!(:confirm_command).and_return(false)
@pg.should_not_receive(:heroku_postgresql_client)
@pg.reset
end
context "info" do
it "requests the info from the server" do
fake_client = mock("heroku_postgresql_client")
fake_client.should_receive("get_database").and_return(:info => [
{'name' => "State", 'value' => "available"},
{'name' => "whatever", 'values' => ['one', 'eh']}
])
@pg.should_receive(:heroku_postgresql_client).with("postgres://database_url").and_return(fake_client)
@pg.info
end
end
context "promotion" do
it "promotes the specified database" do
@pg.stub!(:args).and_return ["SHARED_DATABASE_URL"]
@pg.stub!(:confirm_command).and_return(true)
@pg.heroku.should_receive(:add_config_vars).with("myapp", {"DATABASE_URL" => "postgres://other_database_url"})
@pg.promote
end
it "promotes the specified database url case-sensitively" do
@pg.stub!(:args).and_return ["postgres://john:S3nsit1ve@my.example.com/db_name"]
@pg.stub!(:confirm_command).and_return(true)
@pg.heroku.should_receive(:add_config_vars).with("myapp", {"DATABASE_URL" => "postgres://john:S3nsit1ve@my.example.com/db_name"})
@pg.promote
end
it "fails if no database is specified" do
@pg.stub(:args).and_return []
@pg.stub!(:confirm_command).and_return(true)
@pg.heroku.should_not_receive(:add_config_vars)
@pg.should_receive(:error).with("Usage: heroku pg:promote <DATABASE>").and_raise(SystemExit)
lambda { @pg.promote }.should raise_error SystemExit
end
it "does not repromote the current DATABASE_URL" do
@pg.stub(:options).and_return(:db => "HEROKU_POSTGRESQL_RONIN")
@pg.stub!(:confirm_command).and_return(true)
@pg.heroku.should_not_receive(:add_config_vars)
@pg.should_receive(:error).with("DATABASE_URL is already set to HEROKU_POSTGRESQL_RONIN").and_raise(SystemExit)
lambda { @pg.promote }.should raise_error SystemExit
end
it "does not promote DATABASE_URL" do
@pg.stub(:args).and_return(['DATABASE_URL'])
@pg.stub!(:confirm_command).and_return(true)
@pg.heroku.should_not_receive(:add_config_vars)
@pg.should_receive(:error).with("DATABASE_URL is already set to HEROKU_POSTGRESQL_RONIN").and_raise(SystemExit)
lambda { @pg.promote }.should raise_error SystemExit
end
end
end
end
Remove breaking configvar.
require "spec_helper"
require "heroku/command/pg"
module Heroku::Command
describe Pg do
before do
@pg = prepare_command(Pg)
@pg.stub!(:config_vars).and_return({
"DATABASE_URL" => "postgres://database_url",
"SHARED_DATABASE_URL" => "postgres://other_database_url",
"HEROKU_POSTGRESQL_RONIN_URL" => "postgres://database_url",
})
@pg.stub!(:args).and_return ["DATABASE_URL"]
@pg.heroku.stub!(:info).and_return({})
end
it "resets the app's database if user confirms" do
@pg.stub!(:confirm_command).and_return(true)
fake_client = mock("heroku_postgresql_client")
fake_client.should_receive("reset")
@pg.should_receive(:heroku_postgresql_client).with("postgres://database_url").and_return(fake_client)
@pg.reset
end
it "doesn't reset the app's database if the user doesn't confirm" do
@pg.stub!(:confirm_command).and_return(false)
@pg.should_not_receive(:heroku_postgresql_client)
@pg.reset
end
context "info" do
it "requests the info from the server" do
fake_client = mock("heroku_postgresql_client")
fake_client.should_receive("get_database").and_return(:info => [
{'name' => "State", 'value' => "available"},
{'name' => "whatever", 'values' => ['one', 'eh']}
])
@pg.should_receive(:heroku_postgresql_client).with("postgres://database_url").and_return(fake_client)
@pg.info
end
end
context "promotion" do
it "promotes the specified database" do
@pg.stub!(:args).and_return ["SHARED_DATABASE_URL"]
@pg.stub!(:confirm_command).and_return(true)
@pg.heroku.should_receive(:add_config_vars).with("myapp", {"DATABASE_URL" => "postgres://other_database_url"})
@pg.promote
end
it "promotes the specified database url case-sensitively" do
@pg.stub!(:args).and_return ["postgres://john:S3nsit1ve@my.example.com/db_name"]
@pg.stub!(:confirm_command).and_return(true)
@pg.heroku.should_receive(:add_config_vars).with("myapp", {"DATABASE_URL" => "postgres://john:S3nsit1ve@my.example.com/db_name"})
@pg.promote
end
it "fails if no database is specified" do
@pg.stub(:args).and_return []
@pg.stub!(:confirm_command).and_return(true)
@pg.heroku.should_not_receive(:add_config_vars)
@pg.should_receive(:error).with("Usage: heroku pg:promote <DATABASE>").and_raise(SystemExit)
lambda { @pg.promote }.should raise_error SystemExit
end
it "does not repromote the current DATABASE_URL" do
@pg.stub(:options).and_return(:db => "HEROKU_POSTGRESQL_RONIN")
@pg.stub!(:confirm_command).and_return(true)
@pg.heroku.should_not_receive(:add_config_vars)
@pg.should_receive(:error).with("DATABASE_URL is already set to HEROKU_POSTGRESQL_RONIN").and_raise(SystemExit)
lambda { @pg.promote }.should raise_error SystemExit
end
it "does not promote DATABASE_URL" do
@pg.stub(:args).and_return(['DATABASE_URL'])
@pg.stub!(:confirm_command).and_return(true)
@pg.heroku.should_not_receive(:add_config_vars)
@pg.should_receive(:error).with("DATABASE_URL is already set to HEROKU_POSTGRESQL_RONIN").and_raise(SystemExit)
lambda { @pg.promote }.should raise_error SystemExit
end
end
end
end
|
module RailsSqlViews
module ConnectionAdapters # :nodoc:
module SchemaStatements
# Create a materialized view
# The +options+ hash can include the following keys:
# [<tt>:check_option</tt>]
# [<tt>:primary_key</tt>]
# [<tt>:refresh_schedule</tt>]
# If refresh schedule is given, job is scheduled to refresh the materialized view
# and email SOMPROG with any refresh errors
def create_materialized_view(name, select_query, options={})
return unless supports_materialized_views?
view_definition = ViewDefinition.new(self, select_query)
if block_given?
yield view_definition
end
create_sql = "CREATE MATERIALIZED VIEW #{quote_table_name(name)} "
create_sql << 'PCTFREE 0 PCTUSED 0 COMPRESS FOR OLTP NOLOGGING REFRESH COMPLETE ON DEMAND '
create_sql << "AS #{view_definition.select_query}"
create_sql << " WITH #{options[:check_option]} CHECK OPTION" if options[:check_option]
execute create_sql
if options[:primary_key]
create_primary_key_for_view name, options[:primary_key]
end
if options[:refresh_schedule]
create_mv_refresh_job name, options[:refresh_schedule]
end
end
# Create a view.
# The +options+ hash can include the following keys:
# [<tt>:check_option</tt>]
# Specify restrictions for inserts or updates in updatable views. ANSI SQL 92 defines two check option
# values: CASCADED and LOCAL. See your database documentation for allowed values.
def create_view(name, select_query, options={})
if supports_views?
view_definition = ViewDefinition.new(self, select_query)
if block_given?
yield view_definition
end
if options[:force]
drop_view(name) rescue nil
end
create_sql = "CREATE VIEW "
create_sql << "#{quote_table_name(name)} "
if supports_view_columns_definition? && !view_definition.to_sql.blank?
create_sql << "("
create_sql << view_definition.to_sql
create_sql << ") "
end
create_sql << "AS #{view_definition.select_query}"
create_sql << " WITH #{options[:check_option]} CHECK OPTION" if options[:check_option]
execute create_sql
end
if options[:primary_key]
create_primary_key_for_view name, options[:primary_key]
end
end
def create_primary_key_for_view(name, primary_key)
sql = "ALTER VIEW #{quote_table_name(name)} ADD CONSTRAINT #{quote_table_name(name + "_pk")} PRIMARY KEY(#{primary_key}) DISABLE"
execute sql
end
# Schedule a job to refresh given materialized view.
# Refresh schedule string must follow oracle repeat_interval syntax:
# http://www.oracle-base.com/articles/10g/scheduler-10g.php#configuring_the_scheduler
def create_mv_refresh_job(name, refresh_schedule)
schema = current_schema
job_action = %Q[BEGIN
DBMS_MVIEW.REFRESH ('#{schema}.#{name.upcase}', 'C');
EXCEPTION
WHEN OTHERS THEN
mail.send_mail(
from_s => '#{schema}',
to_r => 'SOMPROG@LISTS.UPENN.EDU',
subject => 'MV refreshing failed',
message => SQLCODE || SUBSTR(SQLERRM, 1, 100));
END;]
job = %Q[BEGIN DBMS_SCHEDULER.CREATE_JOB (
job_name => '#{schema}.#{name.upcase}_refresh',
job_type => 'plsql_block',
job_action => q'[#{job_action}]',
repeat_interval => '#{refresh_schedule}',
enabled => true,
auto_drop => false,
comments => 'Refresh #{quote_table_name(name)} against source.'
);
END;]
execute job
end
# Also creates a view, with the specific purpose of remapping column names
# to make non-ActiveRecord tables friendly with the naming
# conventions, while maintaining legacy app compatibility.
def create_mapping_view(old_name, new_name, options = {})
return unless supports_views?
col_names = columns(old_name).collect { |col| col.name.to_sym }
mapper = MappingDefinition.new(col_names)
yield mapper
if options[:force]
drop_view(new_name) rescue nil
end
view_sql = "CREATE VIEW #{new_name} "
if supports_view_columns_definition?
view_sql << "(#{mapper.view_cols.collect { |c| quote_column_name(c) }.join(', ')}) "
end
view_sql << "AS SELECT #{mapper.select_cols.collect { |c| quote_column_name(c) }.join(', ')} FROM #{old_name}"
execute view_sql
end
# Drop a materialized view
def drop_materialized_view(name)
return unless supports_materialized_views?
drop_mv_refresh_job(name)
execute "DROP MATERIALIZED VIEW #{quote_table_name(name)}"
end
# Drops a scheduled job. Catches job does not exist exception as it is run when dropping
# a materialized view, which may not have a scheduled job.
def drop_mv_refresh_job(name)
schema = current_schema
begin
execute "BEGIN DBMS_SCHEDULER.DROP_JOB ('#{schema}.#{name.upcase}_refresh'); END;"
rescue => exception
case exception.to_s
when /ORA-27475/ # Job does not exist
return true
else
raise
end
end
end
# Drop a view.
# The +options+ hash can include the following keys:
# [<tt>:drop_behavior</tt>]
# Specify the drop behavior. ANSI SQL 92 defines two drop behaviors, CASCADE and RESTRICT. See your
# database documentation to determine what drop behaviors are available.
def drop_view(name, options={})
if supports_views?
drop_sql = "DROP VIEW #{quote_table_name(name)}"
drop_sql << " #{options[:drop_behavior]}" if options[:drop_behavior]
execute drop_sql
end
end
end
end
end
changed scheduler job comment for MV scheduling
module RailsSqlViews
module ConnectionAdapters # :nodoc:
module SchemaStatements
# Create a materialized view
# The +options+ hash can include the following keys:
# [<tt>:check_option</tt>]
# [<tt>:primary_key</tt>]
# [<tt>:refresh_schedule</tt>]
# If refresh schedule is given, job is scheduled to refresh the materialized view
# and email SOMPROG with any refresh errors
def create_materialized_view(name, select_query, options={})
return unless supports_materialized_views?
view_definition = ViewDefinition.new(self, select_query)
if block_given?
yield view_definition
end
create_sql = "CREATE MATERIALIZED VIEW #{quote_table_name(name)} "
create_sql << 'PCTFREE 0 PCTUSED 0 COMPRESS FOR OLTP NOLOGGING REFRESH COMPLETE ON DEMAND '
create_sql << "AS #{view_definition.select_query}"
create_sql << " WITH #{options[:check_option]} CHECK OPTION" if options[:check_option]
execute create_sql
if options[:primary_key]
create_primary_key_for_view name, options[:primary_key]
end
if options[:refresh_schedule]
create_mv_refresh_job name, options[:refresh_schedule]
end
end
# Create a view.
# The +options+ hash can include the following keys:
# [<tt>:check_option</tt>]
# Specify restrictions for inserts or updates in updatable views. ANSI SQL 92 defines two check option
# values: CASCADED and LOCAL. See your database documentation for allowed values.
def create_view(name, select_query, options={})
if supports_views?
view_definition = ViewDefinition.new(self, select_query)
if block_given?
yield view_definition
end
if options[:force]
drop_view(name) rescue nil
end
create_sql = "CREATE VIEW "
create_sql << "#{quote_table_name(name)} "
if supports_view_columns_definition? && !view_definition.to_sql.blank?
create_sql << "("
create_sql << view_definition.to_sql
create_sql << ") "
end
create_sql << "AS #{view_definition.select_query}"
create_sql << " WITH #{options[:check_option]} CHECK OPTION" if options[:check_option]
execute create_sql
end
if options[:primary_key]
create_primary_key_for_view name, options[:primary_key]
end
end
def create_primary_key_for_view(name, primary_key)
sql = "ALTER VIEW #{quote_table_name(name)} ADD CONSTRAINT #{quote_table_name(name + "_pk")} PRIMARY KEY(#{primary_key}) DISABLE"
execute sql
end
# Schedule a job to refresh given materialized view.
# Refresh schedule string must follow oracle repeat_interval syntax:
# http://www.oracle-base.com/articles/10g/scheduler-10g.php#configuring_the_scheduler
def create_mv_refresh_job(name, refresh_schedule)
schema = current_schema
job_action = %Q[BEGIN
DBMS_MVIEW.REFRESH ('#{schema}.#{name.upcase}', 'C');
EXCEPTION
WHEN OTHERS THEN
mail.send_mail(
from_s => '#{schema}',
to_r => 'SOMPROG@LISTS.UPENN.EDU',
subject => 'MV refreshing failed',
message => SQLCODE || SUBSTR(SQLERRM, 1, 100));
END;]
job = %Q[BEGIN DBMS_SCHEDULER.CREATE_JOB (
job_name => '#{schema}.#{name.upcase}_refresh',
job_type => 'plsql_block',
job_action => q'[#{job_action}]',
repeat_interval => '#{refresh_schedule}',
enabled => true,
auto_drop => false,
comments => 'Refresh #{schema}.#{name.upcase} against source.'
);
END;]
execute job
end
# Also creates a view, with the specific purpose of remapping column names
# to make non-ActiveRecord tables friendly with the naming
# conventions, while maintaining legacy app compatibility.
def create_mapping_view(old_name, new_name, options = {})
return unless supports_views?
col_names = columns(old_name).collect { |col| col.name.to_sym }
mapper = MappingDefinition.new(col_names)
yield mapper
if options[:force]
drop_view(new_name) rescue nil
end
view_sql = "CREATE VIEW #{new_name} "
if supports_view_columns_definition?
view_sql << "(#{mapper.view_cols.collect { |c| quote_column_name(c) }.join(', ')}) "
end
view_sql << "AS SELECT #{mapper.select_cols.collect { |c| quote_column_name(c) }.join(', ')} FROM #{old_name}"
execute view_sql
end
# Drop a materialized view
def drop_materialized_view(name)
return unless supports_materialized_views?
drop_mv_refresh_job(name)
execute "DROP MATERIALIZED VIEW #{quote_table_name(name)}"
end
# Drops a scheduled job. Catches job does not exist exception as it is run when dropping
# a materialized view, which may not have a scheduled job.
def drop_mv_refresh_job(name)
schema = current_schema
begin
execute "BEGIN DBMS_SCHEDULER.DROP_JOB ('#{schema}.#{name.upcase}_refresh'); END;"
rescue => exception
case exception.to_s
when /ORA-27475/ # Job does not exist
return true
else
raise
end
end
end
# Drop a view.
# The +options+ hash can include the following keys:
# [<tt>:drop_behavior</tt>]
# Specify the drop behavior. ANSI SQL 92 defines two drop behaviors, CASCADE and RESTRICT. See your
# database documentation to determine what drop behaviors are available.
def drop_view(name, options={})
if supports_views?
drop_sql = "DROP VIEW #{quote_table_name(name)}"
drop_sql << " #{options[:drop_behavior]}" if options[:drop_behavior]
execute drop_sql
end
end
end
end
end
|
# Copyright (c) 2010-2011, Diaspora Inc. This file is
# licensed under the Affero General Public License version 3 or later. See
# the COPYRIGHT file.
require 'hydra_wrapper'
describe HydraWrapper do
before do
@wrapper = HydraWrapper.new(stub, [stub, stub, stub], "<encoded_xml>", stub)
end
describe 'initialize' do
it 'it sets the proper instance variables' do
user = "user"
people = ["person"]
encoded_object_xml = "encoded xml"
dispatcher_class = "Postzord::Dispatcher::Private"
wrapper = HydraWrapper.new(user, people, encoded_object_xml, dispatcher_class)
wrapper.user.should == user
wrapper.people.should == people
wrapper.encoded_object_xml.should == encoded_object_xml
end
end
describe '#run' do
it 'delegates #run to the @hydra' do
@wrapper.hydra = stub.as_null_object
@wrapper.hydra.should_receive(:run)
@wrapper.run
end
end
describe '#salmon' do
it 'calls the salmon method on the dispatcher class (and memoizes)' do
Base64.stub(:decode64).and_return(@wrapper.encoded_object_xml + 'decoded')
decoded = Base64.decode64(@wrapper.encoded_object_xml)
@wrapper.dispatcher_class.should_receive(:salmon).with(@wrapper.user, decoded).once.and_return(true)
@wrapper.salmon
@wrapper.salmon
end
end
describe '#grouped_people' do
it 'groups people given their receive_urls' do
@wrapper.people.each do |person|
@wrapper.dispatcher_class.should_receive(:receive_url_for).with(person).and_return("foo.com")
end
@wrapper.grouped_people.should == {"foo.com" => @wrapper.people}
end
end
describe '#enqueue_batch' do
it 'calls #grouped_people' do
@wrapper.should_receive(:grouped_people).and_return([])
@wrapper.enqueue_batch
end
it 'inserts a job for every group of people' do
Base64.stub(:decode64)
@wrapper.dispatcher_class = stub(:salmon => stub(:xml_for => "<XML>"))
@wrapper.stub(:grouped_people).and_return({'https://foo.com' => @wrapper.people})
@wrapper.people.should_receive(:first).once
@wrapper.should_receive(:insert_job).with('https://foo.com', "<XML>", @wrapper.people).once
@wrapper.enqueue_batch
end
it 'does not insert a job for a person whos xml returns false' do
Base64.stub(:decode64)
@wrapper.stub(:grouped_people).and_return({'https://foo.com' => [stub]})
@wrapper.dispatcher_class = stub(:salmon => stub(:xml_for => false))
@wrapper.should_not_receive(:insert_job)
@wrapper.enqueue_batch
end
end
describe '#insert_job' do
it 'creates a new request object'
it 'calls #prepare_request! on a new request object'
it 'adds request to the hydra queue'
end
describe '#prepare_request!' do
it 'calls Pod.find_or_create_by_url'
it 'calls Person.url_batch_update'
end
describe '#redirecting_to_https?!' do
it 'does not execute unless response has a 3xx code' do
resp = stub(:code => 200)
@wrapper.redirecting_to_https?(resp).should be_false
end
it "returns true if just the protocol is different" do
host = "the-same.com/"
resp = stub(:request => stub(:url => "http://#{host}"), :code => 302, :headers_hash => {'Location' => "https://#{host}"})
@wrapper.redirecting_to_https?(resp).should be_true
end
it "returns false if not just the protocol is different" do
host = "the-same.com/"
resp = stub(:request => stub(:url => "http://#{host}"), :code => 302, :headers_hash => {'Location' => "https://not-the-same/"})
@wrapper.redirecting_to_https?(resp).should be_false
end
end
end
more verbose testing of grouped_people
# Copyright (c) 2010-2011, Diaspora Inc. This file is
# licensed under the Affero General Public License version 3 or later. See
# the COPYRIGHT file.
require 'hydra_wrapper'
describe HydraWrapper do
before do
@people = ["person", "person2", "person3"]
@wrapper = HydraWrapper.new(stub, @people, "<encoded_xml>", stub)
end
describe 'initialize' do
it 'it sets the proper instance variables' do
user = "user"
encoded_object_xml = "encoded xml"
dispatcher_class = "Postzord::Dispatcher::Private"
wrapper = HydraWrapper.new(user, @people, encoded_object_xml, dispatcher_class)
wrapper.user.should == user
wrapper.people.should == @people
wrapper.encoded_object_xml.should == encoded_object_xml
end
end
describe '#run' do
it 'delegates #run to the @hydra' do
@wrapper.hydra = stub.as_null_object
@wrapper.hydra.should_receive(:run)
@wrapper.run
end
end
describe '#salmon' do
it 'calls the salmon method on the dispatcher class (and memoizes)' do
Base64.stub(:decode64).and_return(@wrapper.encoded_object_xml + 'decoded')
decoded = Base64.decode64(@wrapper.encoded_object_xml)
@wrapper.dispatcher_class.should_receive(:salmon).with(@wrapper.user, decoded).once.and_return(true)
@wrapper.salmon
@wrapper.salmon
end
end
describe '#grouped_people' do
it 'groups people given their receive_urls' do
@wrapper.dispatcher_class.should_receive(:receive_url_for).and_return("foo.com","bar.com","bar.com")
@wrapper.grouped_people.should == {"foo.com" => [@people[0]], "bar.com" => @people[1,2]}
end
end
describe '#enqueue_batch' do
it 'calls #grouped_people' do
@wrapper.should_receive(:grouped_people).and_return([])
@wrapper.enqueue_batch
end
it 'inserts a job for every group of people' do
Base64.stub(:decode64)
@wrapper.dispatcher_class = stub(:salmon => stub(:xml_for => "<XML>"))
@wrapper.stub(:grouped_people).and_return({'https://foo.com' => @wrapper.people})
@wrapper.people.should_receive(:first).once
@wrapper.should_receive(:insert_job).with('https://foo.com', "<XML>", @wrapper.people).once
@wrapper.enqueue_batch
end
it 'does not insert a job for a person whos xml returns false' do
Base64.stub(:decode64)
@wrapper.stub(:grouped_people).and_return({'https://foo.com' => [stub]})
@wrapper.dispatcher_class = stub(:salmon => stub(:xml_for => false))
@wrapper.should_not_receive(:insert_job)
@wrapper.enqueue_batch
end
end
describe '#insert_job' do
it 'creates a new request object'
it 'calls #prepare_request! on a new request object'
it 'adds request to the hydra queue'
end
describe '#prepare_request!' do
it 'calls Pod.find_or_create_by_url'
it 'calls Person.url_batch_update'
end
describe '#redirecting_to_https?!' do
it 'does not execute unless response has a 3xx code' do
resp = stub(:code => 200)
@wrapper.redirecting_to_https?(resp).should be_false
end
it "returns true if just the protocol is different" do
host = "the-same.com/"
resp = stub(:request => stub(:url => "http://#{host}"), :code => 302, :headers_hash => {'Location' => "https://#{host}"})
@wrapper.redirecting_to_https?(resp).should be_true
end
it "returns false if not just the protocol is different" do
host = "the-same.com/"
resp = stub(:request => stub(:url => "http://#{host}"), :code => 302, :headers_hash => {'Location' => "https://not-the-same/"})
@wrapper.redirecting_to_https?(resp).should be_false
end
end
end
|
added specs for lockbox cache
require 'spec_helper'
require 'lockbox_cache'
describe LockBoxCache::Cache do
subject { LockBoxCache::Cache.new }
describe "#write" do
it "should save what you write to it" do
subject.write(:foo, 'bar')
subject.read(:foo).should == 'bar'
end
end
describe "#read" do
it "should return nil when reading a non-existent key" do
subject.read(:foo).should be_nil
end
end
describe "#delete" do
it "should delete the key and value" do
subject.write(:foo, 'bar')
subject.delete(:foo)
subject.read(:foo).should be_nil
end
end
context "in a Rails app" do
it "should use the Rails cache" do
subject.write(:foo, 'bar')
Rails.cache.read(:foo).should == 'bar'
end
end
context "in a Rack app" do
# doesn't work, oh well
# it "should still work" do
# remove_const(Rails)
# cache = LockBox::Cache.new
# cache.write(:foo, 'bar')
# cache.read(:foo).should == 'bar'
# end
end
end |
require 'spec_helper'
require 'sc/git/branch'
def quiet
'-q'
end
def test_file
@test_file ||= rand(10000).to_s
end
def test_branch
@test_branch ||= SC::Git::Branch.new('test_branch')
end
def other_branch
@other_branch ||= SC::Git::Branch.new('other_test_branch')
end
def klass
SC::Git::Branch
end
describe SC::Git::Branch do
before(:all) do
@reset_to = `git rev-parse HEAD`.chomp
@checkout_to = `git rev-parse --abbrev-ref HEAD`.chomp
run "touch #{test_file}"
run "git add . -A"
run "git commit -m 'temp commit' #{quiet}"
run "git branch #{quiet} #{test_branch}"
end
after(:all) do
run "git checkout #{@checkout_to} #{quiet}"
run "git reset --soft #{quiet} #{@reset_to}"
run "rm #{test_file}"
run "git rm #{test_file} #{quiet}"
run "git branch -D #{quiet} #{test_branch}"
end
describe '.checked_out' do
it 'returns a branch object for the currently checked out branch' do
expect(klass.checked_out.to_s).to eq @checkout_to
end
end
describe '.latest' do
before do
run "git branch #{quiet} release-1.2.3"
run "git branch #{quiet} release-1.2.4"
run "git checkout release-1.2.3 #{quiet}"
end
after do
run "git checkout #{@checkout_to} #{quiet}"
run "git branch -D #{quiet} release-1.2.3"
run "git branch -D #{quiet} release-1.2.4"
end
it 'returns the latest branch for this prefix type' do
expect(klass.latest('release').to_s).to eq 'release-1.2.4'
end
end
describe '#exists?' do
context 'the branch exists' do
it 'returns true' do
expect(test_branch.exists?).to eq true
end
end
context 'the branch does not exist' do
it 'returns false' do
expect(other_branch.exists?).to eq false
end
end
end
describe '#checked_out?' do
context 'the branch is checked out' do
before do
run "git checkout #{quiet} #{test_branch}"
end
after do
run "git checkout - #{quiet}"
end
it 'returns true' do
expect(test_branch.checked_out?).to eq true
end
end
context 'the branch is not checked_out' do
it 'returns false' do
expect(test_branch.checked_out?).to eq false
end
end
end
describe '#subset_of?' do
before do
run "git branch #{quiet} #{other_branch}"
end
after do
run "git branch -D #{quiet} #{other_branch}"
end
context 'the branch is not a subset' do
before do
run "git checkout #{quiet} #{other_branch}"
run "touch subset_of_test_file"
run 'git add .'
run "git commit -m 'temp commit' #{quiet}"
end
after do
run "git checkout - #{quiet}"
end
it 'returns false' do
expect(other_branch.subset_of?(test_branch)).to eq false
end
end
context 'the branch is a subset' do
it 'returns true' do
expect(other_branch.subset_of?(test_branch)).to eq true
end
end
end
describe '#last_commit' do
it 'returns the commit hash of the last commit' do
expect(test_branch.last_commit).to eq `git rev-parse #{test_branch}`.chomp
end
end
describe '#checkout' do
after do
run "git checkout #{@checkout_to} #{quiet}"
end
it 'checks out the branch' do
expect {
test_branch.checkout
}.to change {
`git rev-parse --abbrev-ref HEAD`.chomp
}.to(test_branch.to_s)
end
end
describe '#merged' do
it 'returns a list of merged branches' do
expect([ @checkout_to, 'test_branch' ] - test_branch.merged).to eq []
end
end
describe '#version' do
it 'returns the contents of the version file' do
expect(test_branch.version).to eq `cat #{test_branch.version_file}`.chomp
end
end
describe '#branch_from' do
after do
run 'git branch -D from_test_branch -q'
end
it 'creates a new branch from self' do
expect {
SC::Git::Branch.new('test_branch').branch_from('from_test_branch')
}.to change {
system("git show-ref --verify --quiet refs/heads/from_test_branch")
}.from(false).to(true)
expect(`git rev-parse --abbrev-ref HEAD`.chomp).to eq 'master'
end
end
describe '#update_version_file' do
before do
@reset_update_version_file_to = test_branch.last_commit
end
after do
run "git checkout #{test_branch} #{quiet}"
run "git reset --hard #{@reset_update_version_file_to} #{quiet}"
run "git checkout #{@checkout_to} #{quiet}"
end
it 'updates the version file' do
expect {
test_branch.update_version_file('new_version')
}.to change {
`git show #{test_branch}:#{test_branch.version_file}`.chomp
}.to('new_version')
end
end
end
reset without staged changes after specs
require 'spec_helper'
require 'sc/git/branch'
def quiet
'-q'
end
def test_file
@test_file ||= rand(10000).to_s
end
def test_branch
@test_branch ||= SC::Git::Branch.new('test_branch')
end
def other_branch
@other_branch ||= SC::Git::Branch.new('other_test_branch')
end
def klass
SC::Git::Branch
end
describe SC::Git::Branch do
before(:all) do
@reset_to = `git rev-parse HEAD`.chomp
@checkout_to = `git rev-parse --abbrev-ref HEAD`.chomp
run "touch #{test_file}"
run "git add . -A"
run "git commit -m 'temp commit' #{quiet}"
run "git branch #{quiet} #{test_branch}"
end
after(:all) do
run "git checkout #{@checkout_to} #{quiet}"
run "git reset #{quiet} #{@reset_to}"
run "rm #{test_file}"
run "git branch -D #{quiet} #{test_branch}"
end
describe '.checked_out' do
it 'returns a branch object for the currently checked out branch' do
expect(klass.checked_out.to_s).to eq @checkout_to
end
end
describe '.latest' do
before do
run "git branch #{quiet} release-1.2.3"
run "git branch #{quiet} release-1.2.4"
run "git checkout release-1.2.3 #{quiet}"
end
after do
run "git checkout #{@checkout_to} #{quiet}"
run "git branch -D #{quiet} release-1.2.3"
run "git branch -D #{quiet} release-1.2.4"
end
it 'returns the latest branch for this prefix type' do
expect(klass.latest('release').to_s).to eq 'release-1.2.4'
end
end
describe '#exists?' do
context 'the branch exists' do
it 'returns true' do
expect(test_branch.exists?).to eq true
end
end
context 'the branch does not exist' do
it 'returns false' do
expect(other_branch.exists?).to eq false
end
end
end
describe '#checked_out?' do
context 'the branch is checked out' do
before do
run "git checkout #{quiet} #{test_branch}"
end
after do
run "git checkout - #{quiet}"
end
it 'returns true' do
expect(test_branch.checked_out?).to eq true
end
end
context 'the branch is not checked_out' do
it 'returns false' do
expect(test_branch.checked_out?).to eq false
end
end
end
describe '#subset_of?' do
before do
run "git branch #{quiet} #{other_branch}"
end
after do
run "git branch -D #{quiet} #{other_branch}"
end
context 'the branch is not a subset' do
before do
run "git checkout #{quiet} #{other_branch}"
run "touch subset_of_test_file"
run 'git add .'
run "git commit -m 'temp commit' #{quiet}"
end
after do
run "git checkout - #{quiet}"
end
it 'returns false' do
expect(other_branch.subset_of?(test_branch)).to eq false
end
end
context 'the branch is a subset' do
it 'returns true' do
expect(other_branch.subset_of?(test_branch)).to eq true
end
end
end
describe '#last_commit' do
it 'returns the commit hash of the last commit' do
expect(test_branch.last_commit).to eq `git rev-parse #{test_branch}`.chomp
end
end
describe '#checkout' do
after do
run "git checkout #{@checkout_to} #{quiet}"
end
it 'checks out the branch' do
expect {
test_branch.checkout
}.to change {
`git rev-parse --abbrev-ref HEAD`.chomp
}.to(test_branch.to_s)
end
end
describe '#merged' do
it 'returns a list of merged branches' do
expect([ @checkout_to, 'test_branch' ] - test_branch.merged).to eq []
end
end
describe '#version' do
it 'returns the contents of the version file' do
expect(test_branch.version).to eq `cat #{test_branch.version_file}`.chomp
end
end
describe '#branch_from' do
after do
run 'git branch -D from_test_branch -q'
end
it 'creates a new branch from self' do
expect {
SC::Git::Branch.new('test_branch').branch_from('from_test_branch')
}.to change {
system("git show-ref --verify --quiet refs/heads/from_test_branch")
}.from(false).to(true)
expect(`git rev-parse --abbrev-ref HEAD`.chomp).to eq 'master'
end
end
describe '#update_version_file' do
before do
@reset_update_version_file_to = test_branch.last_commit
end
after do
run "git checkout #{test_branch} #{quiet}"
run "git reset --hard #{@reset_update_version_file_to} #{quiet}"
run "git checkout #{@checkout_to} #{quiet}"
end
it 'updates the version file' do
expect {
test_branch.update_version_file('new_version')
}.to change {
`git show #{test_branch}:#{test_branch.version_file}`.chomp
}.to('new_version')
end
end
end
|
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "MechanizeContent" do
it "initialise mechanize content" do
mc = MechanizeContent.new("http://www.google.com")
mc.urls.first.should eql("http://www.google.com")
end
it "fetch the best title" do
mc = MechanizeContent.new("http://techmeme.com/")
mc.best_title.should eql("Techmeme")
end
it "page has incorrect class so only url returned" do
mc = MechanizeContent.new("http://techmeme.com/")
agent = mock("agent")
page = mock("page")
page.stub!(:class).and_return(String)
agent.should_receive(:get).with("http://techmeme.com/").and_return(page)
mc.should_receive(:init_agent).and_return(agent)
mc.best_title.should eql("http://techmeme.com/")
end
it "page has no title so only url returned" do
mc = MechanizeContent.new("http://techmeme.com/")
agent = mock("agent")
page = mock("page")
page.stub!(:class).and_return(Mechanize::Page)
page.stub!(:title).and_return(nil)
agent.should_receive(:get).with("http://techmeme.com/").and_return(page)
mc.should_receive(:init_agent).and_return(agent)
mc.best_title.should eql("http://techmeme.com/")
end
it "page retrival errors" do
mc = MechanizeContent.new("http://techmeme.com/")
agent = mock("agent")
page = mock("page")
page.stub!(:class).and_return(Mechanize::Page)
agent.should_receive(:get).with("http://techmeme.com/").and_raise(Timeout::Error)
agent.should_receive(:get).with("http://somewherelse.com/").and_raise(Errno::ECONNRESET)
mc.should_receive(:init_agent).any_number_of_times.and_return(agent)
mc.fetch_page("http://techmeme.com/").should eql(nil)
mc.fetch_page("http://somewherelse.com/").should eql(nil)
end
it "mechanize page issues" do
mc = MechanizeContent.new("http://techmeme.com/")
agent = mock("agent")
page = mock("page")
mc.stub!(:init_agent).and_return(agent)
page.stub!(:code).and_return(400)
agent.should_receive(:get).with("http://techmeme.com/").and_return(page)
mc.fetch_page("http://techmeme.com/").should eql(nil)
end
it "fetch some text" do
mc = MechanizeContent.new("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
page = mc.fetch_page("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc.fetch_text(page).should eql(nil)
mc2 = MechanizeContent.new("http://www.gamesetwatch.com/2010/03/gdc_2010_rounds_off_indie_cove.php")
page = mc2.fetch_page("http://www.gamesetwatch.com/2010/03/gdc_2010_rounds_off_indie_cove.php")
mc2.fetch_text(page).should eql("Game Developers Conference organizers have confirmed the final set of independent game-specific content, including Ron Carmel on the just-debuted Indie Fund, the Gamma IV party/showcase, and the EGW-replacing Nuovo Sessions game showcase.The newly confirmed details round off a multitude of independent game-specific content at the March 9th-13th event, held at the Moscone Center in San Francisco, including the 12th Annual Independent Games Festival -- featuring over 30 top indie games playable on the GDC Expo floor from Thursday 11th to Saturday 13th, as well as the major IGF Awards on Thursday 11th at 6.30pm.In addition, the 4th Independent Games Summit on Tuesday 9th and Wednesday 10th has added and clarified a number of sessions, with 2D Boy's Ron Carmel kicking off the event with 'Indies and Publishers: Fixing a System That Never Worked', now confirmed to discuss the new Indie Fund organization.Another major new panel, 'Tripping The Art Fantastic', features Spelunky creator Derek Yu, Braid artist David Hellman and Super Meat Boy co-creator Edmund McMillen discussing \"how each one of these figures influences the state of game art, from hand painted epics to short form experimental Flash games.\"")
end
it "find the best text" do
mc = MechanizeContent.new("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc.best_text.should eql(nil)
mc2 = MechanizeContent.new("http://www.gamesetwatch.com/2010/03/gdc_2010_rounds_off_indie_cove.php")
mc2.best_text.should eql("Game Developers Conference organizers have confirmed the final set of independent game-specific content, including Ron Carmel on the just-debuted Indie Fund, the Gamma IV party/showcase, and the EGW-replacing Nuovo Sessions game showcase.The newly confirmed details round off a multitude of independent game-specific content at the March 9th-13th event, held at the Moscone Center in San Francisco, including the 12th Annual Independent Games Festival -- featuring over 30 top indie games playable on the GDC Expo floor from Thursday 11th to Saturday 13th, as well as the major IGF Awards on Thursday 11th at 6.30pm.In addition, the 4th Independent Games Summit on Tuesday 9th and Wednesday 10th has added and clarified a number of sessions, with 2D Boy's Ron Carmel kicking off the event with 'Indies and Publishers: Fixing a System That Never Worked', now confirmed to discuss the new Indie Fund organization.Another major new panel, 'Tripping The Art Fantastic', features Spelunky creator Derek Yu, Braid artist David Hellman and Super Meat Boy co-creator Edmund McMillen discussing \"how each one of these figures influences the state of game art, from hand painted epics to short form experimental Flash games.\"")
end
it "reject all gifs" do
mc = MechanizeContent.new("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc.valid_image?(500, 500, "http://www.cmpevents.com/GD10/ablank.gif2").should eql(false)
end
it "reject image with banner in the name" do
mc = MechanizeContent.new("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc.valid_image?(500, 500, "http://www.cmpevents.com/GD10/banner.png").should eql(false)
end
it "reject image that is too small" do
mc = MechanizeContent.new("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc.valid_image?(64, 500, "http://www.cmpevents.com/GD10/toosmall.png").should eql(false)
end
it "allow good images" do
mc = MechanizeContent.new("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc.valid_image?(500, 500, "http://www.cmpevents.com/GD10/perfecto.png").should eql(true)
end
it "build a base url for images" do
mc = MechanizeContent.new("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
page = mc.fetch_page("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc.get_base_url(page.parser, page.uri).to_s.should eql("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc = MechanizeContent.new("http://www.mutinydesign.co.uk/scripts/html-base-tag---1/")
page = mc.fetch_page("http://www.mutinydesign.co.uk/scripts/html-base-tag---1/")
mc.get_base_url(page.parser, page.uri).to_s.should eql("http://www.mutinydesign.co.uk/")
end
it "find image" do
mc = MechanizeContent.new("http://www.rockstargames.com/newswire/2010/03/18/4061/episodes_from_liberty_city_now_coming_to_playstation_3_and_pc_this_april")
page = mc.fetch_page("http://www.rockstargames.com/newswire/2010/03/18/4061/episodes_from_liberty_city_now_coming_to_playstation_3_and_pc_this_april")
mc.fetch_image(page).should eql("http://www.rockstargames.com/rockstar/local_data/US/img/news/eflc_luisjohnny.jpg")
mc2 = MechanizeContent.new("http://www.joystiq.com/2010/03/18/xbox-360-gaining-usb-storage-support-in-2010-update/")
page2 = mc2.fetch_page("http://www.joystiq.com/2010/03/18/xbox-360-gaining-usb-storage-support-in-2010-update/")
mc2.fetch_image(page2).should eql("http://www.blogcdn.com/www.joystiq.com/media/2010/03/joystiq-xbox-usb-support-580.jpg")
mc3 = MechanizeContent.new("http://www.gog.com/en/gamecard/another_world_15th_anniversary_edition")
page3 = mc3.fetch_page("http://www.gog.com/en/gamecard/another_world_15th_anniversary_edition")
mc3.fetch_image(page3).should eql(nil)
mc4 = MechanizeContent.new("http://www.gog.com/page_has_no_content")
page4 = mock("page")
mc4.stub!(:fetch_content).with(page4).and_return(nil)
mc4.fetch_image(page4).should eql(nil)
mc5 = MechanizeContent.new("http://www.egmnow.com/press/time-warner-retail-egm.html")
page5 = mc5.fetch_page("http://www.egmnow.com/press/time-warner-retail-egm.html")
mc5.fetch_image(page5).should eql("http://www.egmnow.com/images/egmlogo.jpg")
end
it "find the best image" do
mc = MechanizeContent.new("http://www.rockstargames.com/newswire/2010/03/18/4061/episodes_from_liberty_city_now_coming_to_playstation_3_and_pc_this_april")
mc.best_image.should eql("http://www.rockstargames.com/rockstar/local_data/US/img/news/eflc_luisjohnny.jpg")
mc3 = MechanizeContent.new("http://www.gog.com/en/gamecard/another_world_15th_anniversary_edition")
mc3.best_image.should eql(nil)
end
end
add multi url tests
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "MechanizeContent" do
it "initialise mechanize content" do
mc = MechanizeContent.new("http://www.google.com")
mc.urls.first.should eql("http://www.google.com")
end
it "fetch the best title" do
mc = MechanizeContent.new("http://techmeme.com/")
mc.best_title.should eql("Techmeme")
end
it "page has incorrect class so only url returned" do
mc = MechanizeContent.new("http://techmeme.com/")
agent = mock("agent")
page = mock("page")
page.stub!(:class).and_return(String)
agent.should_receive(:get).with("http://techmeme.com/").and_return(page)
mc.should_receive(:init_agent).and_return(agent)
mc.best_title.should eql("http://techmeme.com/")
end
it "page has no title so only url returned" do
mc = MechanizeContent.new("http://techmeme.com/")
agent = mock("agent")
page = mock("page")
page.stub!(:class).and_return(Mechanize::Page)
page.stub!(:title).and_return(nil)
agent.should_receive(:get).with("http://techmeme.com/").and_return(page)
mc.should_receive(:init_agent).and_return(agent)
mc.best_title.should eql("http://techmeme.com/")
end
it "page retrival errors" do
mc = MechanizeContent.new("http://techmeme.com/")
agent = mock("agent")
page = mock("page")
page.stub!(:class).and_return(Mechanize::Page)
agent.should_receive(:get).with("http://techmeme.com/").and_raise(Timeout::Error)
agent.should_receive(:get).with("http://somewherelse.com/").and_raise(Errno::ECONNRESET)
mc.should_receive(:init_agent).any_number_of_times.and_return(agent)
mc.fetch_page("http://techmeme.com/").should eql(nil)
mc.fetch_page("http://somewherelse.com/").should eql(nil)
end
it "mechanize page issues" do
mc = MechanizeContent.new("http://techmeme.com/")
agent = mock("agent")
page = mock("page")
mc.stub!(:init_agent).and_return(agent)
page.stub!(:code).and_return(400)
agent.should_receive(:get).with("http://techmeme.com/").and_return(page)
mc.fetch_page("http://techmeme.com/").should eql(nil)
end
it "fetch some text" do
mc = MechanizeContent.new("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
page = mc.fetch_page("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc.fetch_text(page).should eql(nil)
mc2 = MechanizeContent.new("http://www.gamesetwatch.com/2010/03/gdc_2010_rounds_off_indie_cove.php")
page = mc2.fetch_page("http://www.gamesetwatch.com/2010/03/gdc_2010_rounds_off_indie_cove.php")
mc2.fetch_text(page).should eql("Game Developers Conference organizers have confirmed the final set of independent game-specific content, including Ron Carmel on the just-debuted Indie Fund, the Gamma IV party/showcase, and the EGW-replacing Nuovo Sessions game showcase.The newly confirmed details round off a multitude of independent game-specific content at the March 9th-13th event, held at the Moscone Center in San Francisco, including the 12th Annual Independent Games Festival -- featuring over 30 top indie games playable on the GDC Expo floor from Thursday 11th to Saturday 13th, as well as the major IGF Awards on Thursday 11th at 6.30pm.In addition, the 4th Independent Games Summit on Tuesday 9th and Wednesday 10th has added and clarified a number of sessions, with 2D Boy's Ron Carmel kicking off the event with 'Indies and Publishers: Fixing a System That Never Worked', now confirmed to discuss the new Indie Fund organization.Another major new panel, 'Tripping The Art Fantastic', features Spelunky creator Derek Yu, Braid artist David Hellman and Super Meat Boy co-creator Edmund McMillen discussing \"how each one of these figures influences the state of game art, from hand painted epics to short form experimental Flash games.\"")
end
it "find the best text" do
mc = MechanizeContent.new("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc.best_text.should eql(nil)
mc2 = MechanizeContent.new("http://www.gamesetwatch.com/2010/03/gdc_2010_rounds_off_indie_cove.php")
mc2.best_text.should eql("Game Developers Conference organizers have confirmed the final set of independent game-specific content, including Ron Carmel on the just-debuted Indie Fund, the Gamma IV party/showcase, and the EGW-replacing Nuovo Sessions game showcase.The newly confirmed details round off a multitude of independent game-specific content at the March 9th-13th event, held at the Moscone Center in San Francisco, including the 12th Annual Independent Games Festival -- featuring over 30 top indie games playable on the GDC Expo floor from Thursday 11th to Saturday 13th, as well as the major IGF Awards on Thursday 11th at 6.30pm.In addition, the 4th Independent Games Summit on Tuesday 9th and Wednesday 10th has added and clarified a number of sessions, with 2D Boy's Ron Carmel kicking off the event with 'Indies and Publishers: Fixing a System That Never Worked', now confirmed to discuss the new Indie Fund organization.Another major new panel, 'Tripping The Art Fantastic', features Spelunky creator Derek Yu, Braid artist David Hellman and Super Meat Boy co-creator Edmund McMillen discussing \"how each one of these figures influences the state of game art, from hand painted epics to short form experimental Flash games.\"")
end
it "reject all gifs" do
mc = MechanizeContent.new("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc.valid_image?(500, 500, "http://www.cmpevents.com/GD10/ablank.gif2").should eql(false)
end
it "reject image with banner in the name" do
mc = MechanizeContent.new("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc.valid_image?(500, 500, "http://www.cmpevents.com/GD10/banner.png").should eql(false)
end
it "reject image that is too small" do
mc = MechanizeContent.new("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc.valid_image?(64, 500, "http://www.cmpevents.com/GD10/toosmall.png").should eql(false)
end
it "allow good images" do
mc = MechanizeContent.new("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc.valid_image?(500, 500, "http://www.cmpevents.com/GD10/perfecto.png").should eql(true)
end
it "build a base url for images" do
mc = MechanizeContent.new("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
page = mc.fetch_page("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc.get_base_url(page.parser, page.uri).to_s.should eql("https://www.cmpevents.com/GD10/a.asp?option=C&V=11&SessID=10601")
mc = MechanizeContent.new("http://www.mutinydesign.co.uk/scripts/html-base-tag---1/")
page = mc.fetch_page("http://www.mutinydesign.co.uk/scripts/html-base-tag---1/")
mc.get_base_url(page.parser, page.uri).to_s.should eql("http://www.mutinydesign.co.uk/")
end
it "find image" do
mc = MechanizeContent.new("http://www.rockstargames.com/newswire/2010/03/18/4061/episodes_from_liberty_city_now_coming_to_playstation_3_and_pc_this_april")
page = mc.fetch_page("http://www.rockstargames.com/newswire/2010/03/18/4061/episodes_from_liberty_city_now_coming_to_playstation_3_and_pc_this_april")
mc.fetch_image(page).should eql("http://www.rockstargames.com/rockstar/local_data/US/img/news/eflc_luisjohnny.jpg")
mc2 = MechanizeContent.new("http://www.joystiq.com/2010/03/18/xbox-360-gaining-usb-storage-support-in-2010-update/")
page2 = mc2.fetch_page("http://www.joystiq.com/2010/03/18/xbox-360-gaining-usb-storage-support-in-2010-update/")
mc2.fetch_image(page2).should eql("http://www.blogcdn.com/www.joystiq.com/media/2010/03/joystiq-xbox-usb-support-580.jpg")
mc3 = MechanizeContent.new("http://www.gog.com/en/gamecard/another_world_15th_anniversary_edition")
page3 = mc3.fetch_page("http://www.gog.com/en/gamecard/another_world_15th_anniversary_edition")
mc3.fetch_image(page3).should eql(nil)
mc4 = MechanizeContent.new("http://www.gog.com/page_has_no_content")
page4 = mock("page")
mc4.stub!(:fetch_content).with(page4).and_return(nil)
mc4.fetch_image(page4).should eql(nil)
mc5 = MechanizeContent.new("http://www.egmnow.com/press/time-warner-retail-egm.html")
page5 = mc5.fetch_page("http://www.egmnow.com/press/time-warner-retail-egm.html")
mc5.fetch_image(page5).should eql("http://www.egmnow.com/images/egmlogo.jpg")
end
it "find the best image" do
mc = MechanizeContent.new("http://www.rockstargames.com/newswire/2010/03/18/4061/episodes_from_liberty_city_now_coming_to_playstation_3_and_pc_this_april")
mc.best_image.should eql("http://www.rockstargames.com/rockstar/local_data/US/img/news/eflc_luisjohnny.jpg")
mc3 = MechanizeContent.new("http://www.gog.com/en/gamecard/another_world_15th_anniversary_edition")
mc3.best_image.should eql(nil)
end
it "find the best content from multiple urls" do
mc = MechanizeContent.new("http://www.rockstargames.com/newswire/2010/03/18/4061/episodes_from_liberty_city_now_coming_to_playstation_3_and_pc_this_april", "http://www.vg247.com/2010/03/18/gta-iv-episodes-from-liberty-city-sees-slight-delay-on-pc-and-ps3/")
mc.best_title.should eql("Rockstar Games | Rockstar News Wire | Episodes from Liberty City Now Coming to PlayStation 3 and PC this April")
mc.best_text.should eql("Due to a last minute game submission request from Sony Computer Entertainment Europe to edit some of the in-game Liberty City radio station, television, and internet content – we are forced to delay the worldwide release of Grand Theft Auto: Episodes from Liberty City for both PlayStation 3 and PC for an extra two weeks.\rThe new release date for Episodes from Liberty City - and the two downloadable episodes The Lost and Damned and The Ballad of Gay Tony - on those platforms is now April 13th in North America and April 16th in Europe. This new date will enable us to rectify these changes for Sony Europe, and still allow for a level playing field for all of the Grand Theft Auto fans that have been waiting patiently for this release. In the meantime, we’re moving full speed ahead towards the new game release date. On that note – please be aware that the Grand Theft Auto IV PlayStation 3 leaderboards at Rockstar Games Social Club will be down for maintenance for one week starting March 22nd as we work on their re-launch in support of Episodes from Liberty City.\rBelow are answers to some additional questions that we know some of you may have…\rThose game changes sound pretty minor. Why does the game have to be delayed a whole two weeks?\rUnfortunately, with each round of changes comes fully re-testing the game and a full re-submission to PlayStation. This is the nature of the game submission process. Believe us, if we could expedite the turnaround any sooner – we would. We are dying to get this game in the hands of fans who’ve waited for it for so long in the first place.Why is content being edited just for the European release? This doesn’t seem fair.\rThere are different regional requirements for content – whether dictated by ratings boards like the ESRB and BBFC or by SCEE – this is pretty standard in the world of entertainment.\rIf this content is only being edited for the PlayStation 3 release, and only in Europe… why does everyone in North America etc have to wait? And why do PC players have to wait at all?\rThis was a tough decision but with a simultaneous release, everyone can experience multiplayer simultaneously, take part in online events together, be on level ground on leaderboards, etc. What about those Episodes from Liberty City PSN and GFWL Social Club multiplayer events you announced for April 2nd and 3rd? \rThe first Episodes events for those systems will now be on April 16th and 17th. We will most likely replace the originally scheduled early April events with one for another game. Any requests?\rAny other questions, please feel to leave in the Comments area and we’ll do our best to answer. While this sort of thing may be commonplace in the world of interactive entertainment, we know that game delays are as disappointing to you all as they are to us – and we thank all of our fans immensely for their patience and understanding.\rRockstar Games")
mc.best_image.should eql("http://www.rockstargames.com/rockstar/local_data/US/img/news/eflc_luisjohnny.jpg")
mc = MechanizeContent.new("http://www.facebook.com/RockBand", "http://www.vg247.com/2010/03/09/rock-band-3-out-this-holiday-will-revolutionize-genre/")
mc.best_title.should eql("Rock Band | Facebook")
mc.best_text.should eql("Harmonix just confirmed that Rock Band 3 will release this holiday season.Said the firm on Rock Band’s Facebook page:“Harmonix is developing Rock Band 3 for worldwide release this holiday season! The game, which will be published by MTV Games and distributed by Electronic Arts, will innovate and revolutionize the music genre once again, just as Harmonix did with the original Rock Band, Rock Band 2 and The Beatles: Rock Band. Stay tuned for more details!”There’s no more detail right now, but keep watching for updates from GDC.")
mc.best_image.should eql("http://assets.vg247.com/current//2010/03/rockbandlogo.jpg")
end
end
|
#encoding: utf-8
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
require 'model/company'
require 'model/attendee'
require 'model/invoice'
require 'model/invoiceable'
describe Invoiceable do
before do
@git = Company.new(:name => 'git')
@junio = Attendee.new(:firstname => 'junio', :lastname => 'hamano', :company => @git)
end
describe 'description' do
it 'should have invoicing system id, textual description of item and attendee full name' do
Invoiceable.new(:attendee => @junio).description.should == 'AGF10P270 - Place - junio hamano'
end
end
describe 'Invoiceable.description' do
it 'should be Place for AGF10P270' do
Invoiceable.describe('AGF10P270').should == 'Place'
end
it 'should be Early for AGF10P220' do
Invoiceable.describe('AGF10P220').should == 'Early'
end
it 'should be Place Gratuite for AGF10P0' do
Invoiceable.describe('AGF10P0').should == 'Place Gratuite'
end
it 'should be Diner for AGF10D40' do
Invoiceable.describe('AGF10D40').should == 'Diner'
end
it 'should be Diner Gratuit for AGF10D0' do
Invoiceable.describe('AGF10D0').should == 'Diner Gratuit'
end
it 'should be empty for foo' do
Invoiceable.describe('foo').should == ''
end
end
end
style
#encoding: utf-8
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
require 'model/company'
require 'model/attendee'
require 'model/invoice'
require 'model/invoiceable'
describe Invoiceable do
before do
@git = Company.new(:name => 'git')
@junio = Attendee.new(:firstname => 'junio', :lastname => 'hamano', :company => @git)
end
describe 'description' do
it 'should have invoicing system id, textual description of item and attendee full name' do
Invoiceable.new(:attendee => @junio).description.should == 'AGF10P270 - Place - junio hamano'
end
end
describe 'Invoiceable.description' do
it 'should be Place for AGF10P270' do
Invoiceable.describe('AGF10P270').should == 'Place'
end
it 'should be Early for AGF10P220' do
Invoiceable.describe('AGF10P220').should == 'Early'
end
it 'should be Place Gratuite for AGF10P0' do
Invoiceable.describe('AGF10P0').should == 'Place Gratuite'
end
it 'should be Diner for AGF10D40' do
Invoiceable.describe('AGF10D40').should == 'Diner'
end
it 'should be Diner Gratuit for AGF10D0' do
Invoiceable.describe('AGF10D0').should == 'Diner Gratuit'
end
it 'should be empty for foo' do
Invoiceable.describe('foo').should == ''
end
end
end |
describe Assignment do
let(:assignment) { build(:assignment, id: 1, name: 'no assignment', participants: [participant], teams: [team]) }
let(:instructor) { build(:instructor, id: 6) }
let(:student) { build(:student, id: 3, name: 'no one') }
let(:review_response_map) { build(:review_response_map, response: [response], reviewer: build(:participant), reviewee: build(:assignment_team)) }
let(:teammate_review_response_map) { build(:review_response_map, type: 'TeammateReviewResponseMap') }
let(:participant) { build(:participant, id: 1) }
let(:question) { double('Question') }
let(:team) { build(:assignment_team, id: 1, name: 'no team') }
let(:response) { build(:response) }
let(:course) { build(:course) }
let(:assignment_due_date) do
build(:assignment_due_date, due_at: '2011-11-11 11:11:11 UTC', deadline_name: 'Review',
description_url: 'https://expertiza.ncsu.edu/', round: 1)
end
let(:topic_due_date) { build(:topic_due_date, deadline_name: 'Submission', description_url: 'https://github.com/expertiza/expertiza') }
describe '.max_outstanding_reviews' do
it 'returns 2 by default' do
expect(Assignment.max_outstanding_reviews).to equal(2)
end
end
describe '#team_assignment?' do
it 'checks an assignment has team' do
# @assignment = build(:assignment)
expect(assignment.team_assignment).to eql(true)
end
end
# Need to create assignment, else giving error
describe '#topics?' do
context 'when sign_up_topics array is not empty' do
it 'says current assignment has topics' do
@assignment = create(:assignment)
expect(@assignment.sign_up_topics.empty?).to eql(true)
@topic = create(:topic,assignment: @assignment)
# or @topic.assignment = @assignment
expect(@assignment.sign_up_topics.empty?).to eql(false)
end
end
context 'when sign_up_topics array is empty' do
it 'says current assignment does not have a topic' do
# @assignment = create(:assignment)
expect(assignment.sign_up_topics.empty?).to eql(true)
end
end
end
# Ask guide -> Build not working in this case
describe '.set_courses_to_assignment' do
it 'fetches all courses belong to current instructor and with the order of course names' do
# @instructor = create(:instructor)
# @assignment = create(:assignment, instructor: @instructor)
@course1 = create(:course, instructor: instructor, name: 'C')
@cours2 = create(:course, instructor: instructor, name: 'B')
@cours3 = create(:course, instructor: instructor, name: 'A')
# expect(Assignment.set_courses_to_assignment(@instructor).map {|x| x.name}).to be_an_instance_of(Array)
@arr = Assignment.set_courses_to_assignment(instructor).map {|x| x.name}
expect(@arr).to match_array(['A','B','C'])
end
end
describe '#teams?' do
context 'when teams array is not empty' do
it 'says current assignment has teams' do
# assignment=create(:assignment)
# expect(assignment.teams.empty?).to equal(true)
# team=create(:assignment_team)
# team.parent_id=assignment.id
expect(assignment.teams.empty?).to equal(false)
end
end
context 'when sign_up_topics array is empty' do
it 'says current assignment does not have a team' do
assignment=build(:assignment)
expect(assignment.teams.empty?).to equal(true)
end
end
end
describe '#valid_num_review' do
context 'when num_reviews_allowed is not -1 and num_reviews_allowed is less than num_reviews_required' do
it 'adds an error message to current assignment object' do
# Check error
# @assignment = create(:assignment)
assignment.num_reviews_allowed = 2
assignment.num_reviews_required = 3
expect(assignment.num_reviews_allowed < assignment.num_reviews_required).to eql(!assignment.has_attribute?(:message))
end
end
context 'when the first if condition is false, num_metareviews_allowed is not -1, and num_metareviews_allowed less than num_metareviews_required' do
it 'adds an error message to current assignment object'
# @assignment = create(:assignment)
# @assignment.num_reviews_allowed = 4
# @assignment.num_reviews_required = 3
# @assignment.num_metareviews_allowed = 2
# @assignment.num_metareviews_required = 3
# expect(@assignment.num_metareviews_allowed < @assignment.num_metareviews_required).to eql(!@assignment.has_attribute?(:message))
# end
end
end
describe '#assign_metareviewer_dynamically' do
it 'returns true when assigning successfully' do
@assignment = create(:assignment)
@assignment_participant = create(:participant, assignment: @assignment)
@assignment.review_mappings << review_response_map
expect(@assignment.assign_metareviewer_dynamically(@assignment_participant)).to be_an_instance_of(MetareviewResponseMap)
end
end
describe '#response_map_to_metareview' do
it 'does not raise any errors and returns the first review response map'
# assignment=create(:assignment)
# participant=create(:participant)
# review_map=create(:review_response_map,reviewed_object_id:assignment.id)
# expect(assignment.response_map_to_metareview(participant).empty?).to equal(true)
# end
end
describe '#metareview_mappings' do
it 'returns review mapping'
end
describe '#dynamic_reviewer_assignment?' do
context 'when review_assignment_strategy of current assignment is Auto-Selected' do
it 'returns true' do
# @assignment = create(:assignment)
expect(assignment.review_assignment_strategy).to eql('Auto-Selected')
end
end
context 'when review_assignment_strategy of current assignment is Instructor-Selected' do
it 'returns false' do
# @assignment = create(:assignment)
expect(assignment.review_assignment_strategy=='Instructor-Selected').to eql(false)
end
end
end
# Take guidance from guide
# describe '#scores' do
# context 'when assignment is varying rubric by round assignment' do
# it 'calculates scores in each round of each team in current assignment' do
# @assignment = create(:assignment,id: 999)
# @review_response_map = create(:review_response_map)
# @participant=create(:participant,:assignment => @assignment)
# @questionnaire = create(:questionnaire)
# @assignment_questionnaire = create(:assignment_questionnaire, assignment: @assignment, used_in_round: 2, questionnaire: @questionnaire)
# @questions = create(:question, questionnaire: @questionnaire)
# expect(@assignment.scores(@questions)).to eql(10)
# end
# end
# context 'when assignment is not varying rubric by round assignment' do
# it 'calculates scores of each team in current assignment'
# end
# end
describe '#path' do
context 'when both course_id and instructor_id are nil' do
it 'raises an error' do
# assignment=create(:assignment)
assignment.course_id= nil
assignment.instructor_id= nil
expect{assignment.path}.to raise_error(RuntimeError,"The path cannot be created. The assignment must be associated with either a course or an instructor.")
end
end
context 'when course_id is not nil and course_id is larger than 0' do
it 'returns path with course directory path' do
# assignment=create(:assignment)
assignment.course_id= 1
expect(assignment.path).to be == "#{Rails.root}/pg_data/instructor6/csc517/test/final_test"
end
end
context 'when course_id is nil' do
it 'returns path without course directory path' do
# assignment=create(:assignment)
assignment.course_id=nil
expect(assignment.path).to be == "#{Rails.root}/pg_data/instructor6/final_test"
end
end
end
describe '#check_condition' do
context 'when the next due date is nil' do
it 'returns false ' do
# assignment=create(:assignment)
# dead_rigth=create(:deadline_right)
# ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
#ass_due_date=AssignmentDueDate.where(:parent_id => assignment.id).first
# ass_due_date.due_at= DateTime.now.in_time_zone - 1.day
expect(assignment.check_condition(:id)).to equal(false)
end
end
# Changing to build gives active record not found error
context 'when the next due date is allowed to review submissions' do
it 'returns true' do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right ,:name=> 'OK')
#dead_rigth.id=3
#dead_rigth.name='OK'
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
#ass_due_date=AssignmentDueDate.where(:parent_id => assignment.id).first
#ass_due_date.due_at= DateTime.now.in_time_zone - 1.day
expect(assignment.check_condition(:id)).to equal(true)
end
end
end
describe '#submission_allowed' do
it 'returns true when the next topic due date is allowed to submit sth'do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right ,:name=> 'OK')
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
expect(assignment.submission_allowed).to equal (true)
end
end
describe '#quiz_allowed' do
it 'returns false when the next topic due date is not allowed to do quiz' do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right ,:name=> 'NO')
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
expect(assignment.submission_allowed).to equal (false)
end
end
describe '#can_review' do
it "returns false when the next assignment due date is not allowed to review other's work" do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right ,:name=> 'NO')
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
expect(assignment.submission_allowed).to equal (false)
end
end
describe '#metareview_allowed' do
it 'returns true when the next assignment due date is not allowed to do metareview' do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right ,:name=> 'NO')
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
expect(!assignment.submission_allowed).to equal (true)
end
end
# Does not work without create
describe '#delete' do
context 'when there is at least one review response in current assignment' do
it 'raises an error messge and current assignment cannot be deleted' do
@assignment = create(:assignment)
@review_response_map = create(:review_response_map, assignment: @assignment)
expect{@assignent.delete}.to raise_error(NoMethodError,'undefined method `delete\' for nil:NilClass')
end
end
context 'when there is no review response in current assignment and at least one teammate review response in current assignment' do
it 'raises an error messge and current assignment cannot be deleted' do
@assignment = create(:assignment)
@assignment_team = create(:assignment_team, assignment: @assignment)
@team_user = create(:team_user,team: @assignment_team)
expect{@assignent.delete}.to raise_error(NoMethodError,'undefined method `delete\' for nil:NilClass')
end
end
context 'when ReviewResponseMap and TeammateReviewResponseMap can be deleted successfully' do
it 'deletes other corresponding db records and current assignment' do
# @assignment = create(:assignment)
# @assignment_team = create(:assignment_team, assignment: @assignment)
# @team_user = create(:team_user,team: @assignment_team)
expect(!assignment.delete.blank?).to eql(true)
end
end
end
describe '#microtask?' do
context 'when microtask is not nil' do
it 'returns microtask status (false by default)' do
assignment = build(:assignment, microtask: true)
# assignment = create(:assignment)
expect(assignment.microtask?).to eql(true)
end
end
context 'when microtask is nil' do
it 'returns false' do
assignment = build(:assignment, microtask: nil)
expect(assignment.microtask?).to eql(false)
end
end
end
describe '#add_participant' do
context 'when user is nil' do
it 'raises an error' do
# @assignment = create(:assignment)
expect{assignment.add_participant('',true,true,true)}.to raise_error(NoMethodError)
end
end
# Get undefined method 'url_for' if we dont use create
context 'when the user is already a participant of current assignment' do
it 'raises an error' do
@assignment = create(:assignment)
@user = create(:student)
@participant = create(:participant, user: @user)
expect{@assignment.add_participant(@user.name,true,true,true)}.to raise_error(RuntimeError)
end
end
context 'when AssignmentParticipant was created successfully' do
it 'returns true' do
@assignment = create(:assignment)
@user = create(:student)
expect(assignment.add_participant(@user.name,true,true,true)).to eql(true)
end
end
end
describe '#create_node' do
it 'will save node' do
# @assignment = create(:assignment)
expect(assignment.create_node).to eql(true)
end
end
describe '#number_of_current_round' do
context 'when next_due_date is nil' do
it 'returns 0' do
# @assignment = create(:assignment)
expect(assignment.number_of_current_round(nil)).to eql(0)
end
end
# Create is required here also
context 'when next_due_date is not nil' do
it 'returns the round of next_due_date' do
@assignment = create(:assignment)
@deadline_right = create(:deadline_right)
@assignment_due_date = create(:assignment_due_date, assignment: @assignment, parent_id: @deadline_right.id, review_allowed_id: @deadline_right.id, review_of_review_allowed_id: @deadline_right.id, submission_allowed_id: @deadline_right.id)
@assignment_due_date.due_at = DateTime.now.in_time_zone + 1.day
expect(@assignment.number_of_current_round(nil)>0).to eql(true)
end
end
end
#Active record mysql record not unique error
describe '#current_stage_name' do
context 'when assignment has staggered deadline' do
context 'topic_id is nil' do
it 'returns Unknow' do
assignment = create(:assignment, staggered_deadline: true)
expect(assignment.current_stage_name(nil)).to eql("Unknown")
end
end
context 'topic_id is not nil' do
it 'returns Unknow' do
assignment = create(:assignment, staggered_deadline: true)
@topic = create(:topic, assignment: assignment )
expect(assignment.current_stage_name(@topic.id)).to eql("Finished")
end
end
end
context 'when assignment does not have staggered deadline' do
context "when due date is not equal to 'Finished', due date is not nil and its deadline name is not nil" do
it 'returns the deadline name of current due date'
end
end
end
describe '#microtask?' do
it 'checks whether assignment is a micro task' do
assignment = build(:assignment, microtask: true)
expect(assignment.microtask?).to equal(true)
end
end
describe '#varying_rubrics_by_round?' do
it 'returns true if the number of 2nd round questionnaire(s) is larger or equal 1'
end
describe '#link_for_current_stage' do
context 'when current assignment has staggered deadline and topic id is nil' do
it 'returns nil' do
assignment = build( :assignment, staggered_deadline: true )
expect(assignment.link_for_current_stage(nil)).to eq(nil)
end
end
context 'when current assignment does not have staggered deadline' do
context 'when due date is a TopicDueDate' do
it 'returns nil'
end
context 'when due_date is not nil, not finished and is not a TopicDueDate' do
it 'returns description url of current due date'
end
end
end
describe '#stage_deadline' do
context 'when topic id is nil and current assignment has staggered deadline' do
it 'returns Unknown' do
# assignment=create(:assignment)
assignment.staggered_deadline=true
expect(assignment.stage_deadline()).to eq("Unknown")
end
end
context 'when current assignment does not have staggered deadline' do
context 'when due date is nil' do
it 'returns nil' do
# assignment=create(:assignment)
# dead_rigth=create(:deadline_right)
# ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id,:due_at=> DateTime.now.in_time_zone - 1.day)
#ass_due_date.due_at= DateTime.now.in_time_zone - 1.day
expect(assignment.stage_deadline).not_to be_nil
end
end
# We do require create over here
context 'when due date is not nil and due date is not equal to Finished' do
it 'returns due date' do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right)
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
#ass_due_date.due_at= DateTime.now.in_time_zone - 1.day
expect(assignment.stage_deadline).to eq(ass_due_date.due_at.to_s)
end
end
end
end
# We need create here
describe '#num_review_rounds' do
it 'returns max round number in all due dates of current assignment' do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right)
create(:assignment_due_date,:round=>1,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
create(:assignment_due_date,:round=>2,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
create(:assignment_due_date,:round=>3,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
expect(assignment.num_review_rounds).to equal(3)
end
end
describe '#find_current_stage' do
context 'when next due date is nil' do
it 'returns Finished'do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right)
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id,:due_at=> DateTime.now.in_time_zone - 1.day)
expect(assignment.find_current_stage()).to eq("Finished")
end
end
context 'when next due date is nil' do
it 'returns next due date object' do
assignment=create(:assignment)
dead_rigth=create(:deadline_right)
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
expect(assignment.find_current_stage()).to eq(ass_due_date)
end
end
end
# MySql error if create not used
describe '#review_questionnaire_id' do
it 'returns review_questionnaire_id' do
@assignment = create(:assignment)
@questionnaire = create(:questionnaire)
@assignment_questionnaire = create(:assignment_questionnaire, assignment:@assignment, questionnaire: @questionnaire)
expect(@assignment.review_questionnaire_id>0).to eql(true)
end
end
describe 'has correct csv values?' do
before(:each) do
create(:assignment)
create(:assignment_team, name: 'team1')
@student = create(:student, name: 'student1')
create(:participant, user: @student)
create(:questionnaire)
create(:question)
create(:review_response_map)
create(:response)
@options = {'team_id' => 'true', 'team_name' => 'true',
'reviewer' => 'true', 'question' => 'true',
'question_id' => 'true', 'comment_id' => 'true',
'comments' => 'true', 'score' => 'true'}
end
def generated_csv(t_assignment, t_options)
delimiter = ','
CSV.generate(col_sep: delimiter) do |csv|
csv << Assignment.export_headers(t_assignment.id)
csv << Assignment.export_details_fields(t_options)
Assignment.export_details(csv, t_assignment.id, t_options)
end
end
it 'checks_if_csv has the correct data' do
create(:answer, comments: 'Test comment')
expected_csv = File.read('spec/features/assignment_export_details/expected_details_csv.txt')
expect(generated_csv(assignment, @options)).to eq(expected_csv)
end
it 'checks csv with some options' do
create(:answer, comments: 'Test comment')
@options['team_id'] = 'false'
@options['question_id'] = 'false'
@options['comment_id'] = 'false'
expected_csv = File.read('spec/features/assignment_export_details/expected_details_some_options_csv.txt')
expect(generated_csv(assignment, @options)).to eq(expected_csv)
end
it 'checks csv with no data' do
expected_csv = File.read('spec/features/assignment_export_details/expected_details_no_data_csv.txt')
expect(generated_csv(assignment, @options)).to eq(expected_csv)
end
it 'checks csv with data and no options' do
create(:answer, comments: 'Test comment')
@options['team_id'] = 'false'
@options['team_name'] = 'false'
@options['reviewer'] = 'false'
@options['question'] = 'false'
@options['question_id'] = 'false'
@options['comment_id'] = 'false'
@options['comments'] = 'false'
@options['score'] = 'false'
expected_csv = File.read('spec/features/assignment_export_details/expected_details_no_options_csv.txt')
expect(generated_csv(assignment, @options)).to eq(expected_csv)
end
end
end
scores in progresS
describe Assignment do
let(:assignment) { build(:assignment, id: 1, name: 'no assignment', participants: [participant], teams: [team]) }
let(:instructor) { build(:instructor, id: 6) }
let(:student) { build(:student, id: 3, name: 'no one') }
let(:review_response_map) { build(:review_response_map, response: [response], reviewer: build(:participant), reviewee: build(:assignment_team)) }
let(:teammate_review_response_map) { build(:review_response_map, type: 'TeammateReviewResponseMap') }
let(:participant) { build(:participant, id: 1) }
let(:question) { double('Question') }
let(:team) { build(:assignment_team, id: 1, name: 'no team') }
let(:response) { build(:response) }
let(:course) { build(:course) }
let(:assignment_due_date) do
build(:assignment_due_date, due_at: '2011-11-11 11:11:11 UTC', deadline_name: 'Review',
description_url: 'https://expertiza.ncsu.edu/', round: 1)
end
let(:topic_due_date) { build(:topic_due_date, deadline_name: 'Submission', description_url: 'https://github.com/expertiza/expertiza') }
describe '.max_outstanding_reviews' do
it 'returns 2 by default' do
expect(Assignment.max_outstanding_reviews).to equal(2)
end
end
describe '#team_assignment?' do
it 'checks an assignment has team' do
# @assignment = build(:assignment)
expect(assignment.team_assignment).to eql(true)
end
end
# Need to create assignment, else giving error
describe '#topics?' do
context 'when sign_up_topics array is not empty' do
it 'says current assignment has topics' do
@assignment = create(:assignment)
expect(@assignment.sign_up_topics.empty?).to eql(true)
@topic = create(:topic,assignment: @assignment)
# or @topic.assignment = @assignment
expect(@assignment.sign_up_topics.empty?).to eql(false)
end
end
context 'when sign_up_topics array is empty' do
it 'says current assignment does not have a topic' do
# @assignment = create(:assignment)
expect(assignment.sign_up_topics.empty?).to eql(true)
end
end
end
# Ask guide -> Build not working in this case
describe '.set_courses_to_assignment' do
it 'fetches all courses belong to current instructor and with the order of course names' do
# @instructor = create(:instructor)
# @assignment = create(:assignment, instructor: @instructor)
@course1 = create(:course, instructor: instructor, name: 'C')
@cours2 = create(:course, instructor: instructor, name: 'B')
@cours3 = create(:course, instructor: instructor, name: 'A')
# expect(Assignment.set_courses_to_assignment(@instructor).map {|x| x.name}).to be_an_instance_of(Array)
@arr = Assignment.set_courses_to_assignment(instructor).map {|x| x.name}
expect(@arr).to match_array(['A','B','C'])
end
end
describe '#teams?' do
context 'when teams array is not empty' do
it 'says current assignment has teams' do
# assignment=create(:assignment)
# expect(assignment.teams.empty?).to equal(true)
# team=create(:assignment_team)
# team.parent_id=assignment.id
expect(assignment.teams.empty?).to equal(false)
end
end
context 'when sign_up_topics array is empty' do
it 'says current assignment does not have a team' do
assignment=build(:assignment)
expect(assignment.teams.empty?).to equal(true)
end
end
end
describe '#valid_num_review' do
context 'when num_reviews_allowed is not -1 and num_reviews_allowed is less than num_reviews_required' do
it 'adds an error message to current assignment object' do
# Check error
# @assignment = create(:assignment)
assignment.num_reviews_allowed = 2
assignment.num_reviews_required = 3
expect(assignment.num_reviews_allowed < assignment.num_reviews_required).to eql(!assignment.has_attribute?(:message))
end
end
context 'when the first if condition is false, num_metareviews_allowed is not -1, and num_metareviews_allowed less than num_metareviews_required' do
it 'adds an error message to current assignment object'
# @assignment = create(:assignment)
# @assignment.num_reviews_allowed = 4
# @assignment.num_reviews_required = 3
# @assignment.num_metareviews_allowed = 2
# @assignment.num_metareviews_required = 3
# expect(@assignment.num_metareviews_allowed < @assignment.num_metareviews_required).to eql(!@assignment.has_attribute?(:message))
# end
end
end
describe '#assign_metareviewer_dynamically' do
it 'returns true when assigning successfully' do
@assignment = create(:assignment)
@assignment_participant = create(:participant, assignment: @assignment)
@assignment.review_mappings << review_response_map
expect(@assignment.assign_metareviewer_dynamically(@assignment_participant)).to be_an_instance_of(MetareviewResponseMap)
end
end
describe '#response_map_to_metareview' do
it 'does not raise any errors and returns the first review response map'
# assignment=create(:assignment)
# participant=create(:participant)
# review_map=create(:review_response_map,reviewed_object_id:assignment.id)
# expect(assignment.response_map_to_metareview(participant).empty?).to equal(true)
# end
end
describe '#metareview_mappings' do
it 'returns review mapping'
end
describe '#dynamic_reviewer_assignment?' do
context 'when review_assignment_strategy of current assignment is Auto-Selected' do
it 'returns true' do
# @assignment = create(:assignment)
expect(assignment.review_assignment_strategy).to eql('Auto-Selected')
end
end
context 'when review_assignment_strategy of current assignment is Instructor-Selected' do
it 'returns false' do
# @assignment = create(:assignment)
expect(assignment.review_assignment_strategy=='Instructor-Selected').to eql(false)
end
end
end
# Take guidance from guide
# describe '#scores' do
# context 'when assignment is varying rubric by round assignment' do
# it 'calculates scores in each round of each team in current assignment' do
# # @assignment = create(:assignment,id: 999)
# # @review_response_map = create(:review_response_map)
# # @participant=create(:participant,:assignment => @assignment)
# @questionnaire = create(:questionnaire)
# @assignment_questionnaire = create(:assignment_questionnaire, assignment: @assignment, used_in_round: 2, questionnaire: @questionnaire)
# @questions = create(:question, questionnaire: @questionnaire)
# expect(assignment.scores(@questions)).to eql(10)
# end
# end
# context 'when assignment is not varying rubric by round assignment' do
# it 'calculates scores of each team in current assignment'
# end
# end
describe '#path' do
context 'when both course_id and instructor_id are nil' do
it 'raises an error' do
# assignment=create(:assignment)
assignment.course_id= nil
assignment.instructor_id= nil
expect{assignment.path}.to raise_error(RuntimeError,"The path cannot be created. The assignment must be associated with either a course or an instructor.")
end
end
context 'when course_id is not nil and course_id is larger than 0' do
it 'returns path with course directory path' do
# assignment=create(:assignment)
assignment.course_id= 1
expect(assignment.path).to be == "#{Rails.root}/pg_data/instructor6/csc517/test/final_test"
end
end
context 'when course_id is nil' do
it 'returns path without course directory path' do
# assignment=create(:assignment)
assignment.course_id=nil
expect(assignment.path).to be == "#{Rails.root}/pg_data/instructor6/final_test"
end
end
end
describe '#check_condition' do
context 'when the next due date is nil' do
it 'returns false ' do
# assignment=create(:assignment)
# dead_rigth=create(:deadline_right)
# ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
#ass_due_date=AssignmentDueDate.where(:parent_id => assignment.id).first
# ass_due_date.due_at= DateTime.now.in_time_zone - 1.day
expect(assignment.check_condition(:id)).to equal(false)
end
end
# Changing to build gives active record not found error
context 'when the next due date is allowed to review submissions' do
it 'returns true' do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right ,:name=> 'OK')
#dead_rigth.id=3
#dead_rigth.name='OK'
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
#ass_due_date=AssignmentDueDate.where(:parent_id => assignment.id).first
#ass_due_date.due_at= DateTime.now.in_time_zone - 1.day
expect(assignment.check_condition(:id)).to equal(true)
end
end
end
describe '#submission_allowed' do
it 'returns true when the next topic due date is allowed to submit sth'do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right ,:name=> 'OK')
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
expect(assignment.submission_allowed).to equal (true)
end
end
describe '#quiz_allowed' do
it 'returns false when the next topic due date is not allowed to do quiz' do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right ,:name=> 'NO')
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
expect(assignment.submission_allowed).to equal (false)
end
end
describe '#can_review' do
it "returns false when the next assignment due date is not allowed to review other's work" do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right ,:name=> 'NO')
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
expect(assignment.submission_allowed).to equal (false)
end
end
describe '#metareview_allowed' do
it 'returns true when the next assignment due date is not allowed to do metareview' do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right ,:name=> 'NO')
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
expect(!assignment.submission_allowed).to equal (true)
end
end
# Does not work without create
describe '#delete' do
context 'when there is at least one review response in current assignment' do
it 'raises an error messge and current assignment cannot be deleted' do
@assignment = create(:assignment)
@review_response_map = create(:review_response_map, assignment: @assignment)
expect{@assignent.delete}.to raise_error(NoMethodError,'undefined method `delete\' for nil:NilClass')
end
end
context 'when there is no review response in current assignment and at least one teammate review response in current assignment' do
it 'raises an error messge and current assignment cannot be deleted' do
@assignment = create(:assignment)
@assignment_team = create(:assignment_team, assignment: @assignment)
@team_user = create(:team_user,team: @assignment_team)
expect{@assignent.delete}.to raise_error(NoMethodError,'undefined method `delete\' for nil:NilClass')
end
end
context 'when ReviewResponseMap and TeammateReviewResponseMap can be deleted successfully' do
it 'deletes other corresponding db records and current assignment' do
# @assignment = create(:assignment)
# @assignment_team = create(:assignment_team, assignment: @assignment)
# @team_user = create(:team_user,team: @assignment_team)
expect(!assignment.delete.blank?).to eql(true)
end
end
end
describe '#microtask?' do
context 'when microtask is not nil' do
it 'returns microtask status (false by default)' do
assignment = build(:assignment, microtask: true)
# assignment = create(:assignment)
expect(assignment.microtask?).to eql(true)
end
end
context 'when microtask is nil' do
it 'returns false' do
assignment = build(:assignment, microtask: nil)
expect(assignment.microtask?).to eql(false)
end
end
end
describe '#add_participant' do
context 'when user is nil' do
it 'raises an error' do
# @assignment = create(:assignment)
expect{assignment.add_participant('',true,true,true)}.to raise_error(NoMethodError)
end
end
# Get undefined method 'url_for' if we dont use create
context 'when the user is already a participant of current assignment' do
it 'raises an error' do
@assignment = create(:assignment)
@user = create(:student)
@participant = create(:participant, user: @user)
expect{@assignment.add_participant(@user.name,true,true,true)}.to raise_error(RuntimeError)
end
end
context 'when AssignmentParticipant was created successfully' do
it 'returns true' do
@assignment = create(:assignment)
@user = create(:student)
expect(assignment.add_participant(@user.name,true,true,true)).to eql(true)
end
end
end
describe '#create_node' do
it 'will save node' do
# @assignment = create(:assignment)
expect(assignment.create_node).to eql(true)
end
end
describe '#number_of_current_round' do
context 'when next_due_date is nil' do
it 'returns 0' do
# @assignment = create(:assignment)
expect(assignment.number_of_current_round(nil)).to eql(0)
end
end
# Create is required here also
context 'when next_due_date is not nil' do
it 'returns the round of next_due_date' do
@assignment = create(:assignment)
@deadline_right = create(:deadline_right)
@assignment_due_date = create(:assignment_due_date, assignment: @assignment, parent_id: @deadline_right.id, review_allowed_id: @deadline_right.id, review_of_review_allowed_id: @deadline_right.id, submission_allowed_id: @deadline_right.id)
@assignment_due_date.due_at = DateTime.now.in_time_zone + 1.day
expect(@assignment.number_of_current_round(nil)>0).to eql(true)
end
end
end
#Active record mysql record not unique error
describe '#current_stage_name' do
context 'when assignment has staggered deadline' do
context 'topic_id is nil' do
it 'returns Unknow' do
assignment = create(:assignment, staggered_deadline: true)
expect(assignment.current_stage_name(nil)).to eql("Unknown")
end
end
context 'topic_id is not nil' do
it 'returns Unknow' do
assignment = create(:assignment, staggered_deadline: true)
@topic = create(:topic, assignment: assignment )
expect(assignment.current_stage_name(@topic.id)).to eql("Finished")
end
end
end
context 'when assignment does not have staggered deadline' do
context "when due date is not equal to 'Finished', due date is not nil and its deadline name is not nil" do
it 'returns the deadline name of current due date'
end
end
end
describe '#microtask?' do
it 'checks whether assignment is a micro task' do
assignment = build(:assignment, microtask: true)
expect(assignment.microtask?).to equal(true)
end
end
describe '#varying_rubrics_by_round?' do
it 'returns true if the number of 2nd round questionnaire(s) is larger or equal 1'
end
describe '#link_for_current_stage' do
context 'when current assignment has staggered deadline and topic id is nil' do
it 'returns nil' do
assignment = build( :assignment, staggered_deadline: true )
expect(assignment.link_for_current_stage(nil)).to eq(nil)
end
end
context 'when current assignment does not have staggered deadline' do
context 'when due date is a TopicDueDate' do
it 'returns nil'
end
context 'when due_date is not nil, not finished and is not a TopicDueDate' do
it 'returns description url of current due date'
end
end
end
describe '#stage_deadline' do
context 'when topic id is nil and current assignment has staggered deadline' do
it 'returns Unknown' do
# assignment=create(:assignment)
assignment.staggered_deadline=true
expect(assignment.stage_deadline()).to eq("Unknown")
end
end
context 'when current assignment does not have staggered deadline' do
context 'when due date is nil' do
it 'returns nil' do
# assignment=create(:assignment)
# dead_rigth=create(:deadline_right)
# ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id,:due_at=> DateTime.now.in_time_zone - 1.day)
#ass_due_date.due_at= DateTime.now.in_time_zone - 1.day
expect(assignment.stage_deadline).not_to be_nil
end
end
# We do require create over here
context 'when due date is not nil and due date is not equal to Finished' do
it 'returns due date' do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right)
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
#ass_due_date.due_at= DateTime.now.in_time_zone - 1.day
expect(assignment.stage_deadline).to eq(ass_due_date.due_at.to_s)
end
end
end
end
# We need create here
describe '#num_review_rounds' do
it 'returns max round number in all due dates of current assignment' do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right)
create(:assignment_due_date,:round=>1,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
create(:assignment_due_date,:round=>2,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
create(:assignment_due_date,:round=>3,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
expect(assignment.num_review_rounds).to equal(3)
end
end
describe '#find_current_stage' do
context 'when next due date is nil' do
it 'returns Finished'do
# assignment=create(:assignment)
dead_rigth=create(:deadline_right)
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id,:due_at=> DateTime.now.in_time_zone - 1.day)
expect(assignment.find_current_stage()).to eq("Finished")
end
end
context 'when next due date is nil' do
it 'returns next due date object' do
assignment=create(:assignment)
dead_rigth=create(:deadline_right)
ass_due_date=create(:assignment_due_date,:parent_id => assignment.id,:review_allowed_id=>dead_rigth.id,:review_of_review_allowed_id=>dead_rigth.id,:submission_allowed_id=>dead_rigth.id)
expect(assignment.find_current_stage()).to eq(ass_due_date)
end
end
end
# MySql error if create not used
describe '#review_questionnaire_id' do
it 'returns review_questionnaire_id' do
@assignment = create(:assignment)
@questionnaire = create(:questionnaire)
@assignment_questionnaire = create(:assignment_questionnaire, assignment:@assignment, questionnaire: @questionnaire)
expect(@assignment.review_questionnaire_id>0).to eql(true)
end
end
describe 'has correct csv values?' do
before(:each) do
create(:assignment)
create(:assignment_team, name: 'team1')
@student = create(:student, name: 'student1')
create(:participant, user: @student)
create(:questionnaire)
create(:question)
create(:review_response_map)
create(:response)
@options = {'team_id' => 'true', 'team_name' => 'true',
'reviewer' => 'true', 'question' => 'true',
'question_id' => 'true', 'comment_id' => 'true',
'comments' => 'true', 'score' => 'true'}
end
def generated_csv(t_assignment, t_options)
delimiter = ','
CSV.generate(col_sep: delimiter) do |csv|
csv << Assignment.export_headers(t_assignment.id)
csv << Assignment.export_details_fields(t_options)
Assignment.export_details(csv, t_assignment.id, t_options)
end
end
it 'checks_if_csv has the correct data' do
create(:answer, comments: 'Test comment')
expected_csv = File.read('spec/features/assignment_export_details/expected_details_csv.txt')
expect(generated_csv(assignment, @options)).to eq(expected_csv)
end
it 'checks csv with some options' do
create(:answer, comments: 'Test comment')
@options['team_id'] = 'false'
@options['question_id'] = 'false'
@options['comment_id'] = 'false'
expected_csv = File.read('spec/features/assignment_export_details/expected_details_some_options_csv.txt')
expect(generated_csv(assignment, @options)).to eq(expected_csv)
end
it 'checks csv with no data' do
expected_csv = File.read('spec/features/assignment_export_details/expected_details_no_data_csv.txt')
expect(generated_csv(assignment, @options)).to eq(expected_csv)
end
it 'checks csv with data and no options' do
create(:answer, comments: 'Test comment')
@options['team_id'] = 'false'
@options['team_name'] = 'false'
@options['reviewer'] = 'false'
@options['question'] = 'false'
@options['question_id'] = 'false'
@options['comment_id'] = 'false'
@options['comments'] = 'false'
@options['score'] = 'false'
expected_csv = File.read('spec/features/assignment_export_details/expected_details_no_options_csv.txt')
expect(generated_csv(assignment, @options)).to eq(expected_csv)
end
end
end
|
# frozen_string_literal: true
require 'spec_helper'
describe Enterprise do
describe "sending emails" do
describe "on creation" do
let!(:user) { create(:user) }
let!(:enterprise) { create(:enterprise, owner: user) }
it "sends a welcome email" do
expect do
create(:enterprise, owner: user)
end.to enqueue_job ActionMailer::DeliveryJob
expect(enqueued_jobs.last.to_s).to match "welcome"
end
end
end
describe "associations" do
it { is_expected.to belong_to(:owner) }
it { is_expected.to have_many(:supplied_products) }
it { is_expected.to have_many(:distributed_orders) }
it { is_expected.to belong_to(:address) }
it "destroys enterprise roles upon its own demise" do
e = create(:enterprise)
u = create(:user)
u.enterprise_roles.build(enterprise: e).save!
role = e.enterprise_roles.first
e.destroy
expect(EnterpriseRole.where(id: role.id)).to be_empty
end
xit "destroys supplied products upon destroy" do
s = create(:supplier_enterprise)
p = create(:simple_product, supplier: s)
s.destroy
expect(Spree::Product.where(id: p.id)).to be_empty
end
it "destroys relationships upon destroy" do
e = create(:enterprise)
e_other = create(:enterprise)
er1 = create(:enterprise_relationship, parent: e, child: e_other)
er2 = create(:enterprise_relationship, child: e, parent: e_other)
e.destroy
expect(EnterpriseRelationship.where(id: [er1, er2])).to be_empty
end
describe "relationships to other enterprises" do
let(:e) { create(:distributor_enterprise) }
let(:p) { create(:supplier_enterprise) }
let(:c) { create(:distributor_enterprise) }
let!(:er1) { create(:enterprise_relationship, parent_id: p.id, child_id: e.id) }
let!(:er2) { create(:enterprise_relationship, parent_id: e.id, child_id: c.id) }
it "finds relatives" do
expect(e.relatives).to match_array [p, c]
end
it "finds relatives_including_self" do
expect(e.relatives_including_self).to include e
end
it "scopes relatives to visible distributors" do
enterprise = build_stubbed(:distributor_enterprise)
expect(enterprise).to receive(:relatives_including_self).and_return(relatives = [])
expect(relatives).to receive(:is_distributor).and_return relatives
enterprise.distributors
end
it "scopes relatives to visible producers" do
enterprise = build_stubbed(:distributor_enterprise)
expect(enterprise).to receive(:relatives_including_self).and_return(relatives = [])
expect(relatives).to receive(:is_primary_producer).and_return relatives
enterprise.suppliers
end
end
describe "ownership" do
let(:u1) { create(:user) }
let(:u2) { create(:user) }
let!(:e) { create(:enterprise, owner: u1 ) }
it "adds new owner to list of managers" do
expect(e.owner).to eq u1
expect(e.users).to include u1
expect(e.users).to_not include u2
e.owner = u2
e.save!
e.reload
expect(e.owner).to eq u2
expect(e.users).to include u1, u2
end
it "validates ownership limit" do
expect(u1.enterprise_limit).to be 5
expect(u1.owned_enterprises.reload).to eq [e]
4.times { create(:enterprise, owner: u1) }
e2 = create(:enterprise, owner: u2)
expect {
e2.owner = u1
e2.save!
}.to raise_error ActiveRecord::RecordInvalid, "Validation failed: #{u1.email} is not permitted to own any more enterprises (limit is 5)."
end
end
end
describe "validations" do
it { is_expected.to validate_presence_of(:name) }
it do
create(:distributor_enterprise)
is_expected.to validate_uniqueness_of(:permalink)
end
it "requires an owner" do
enterprise = build_stubbed(:enterprise, owner: nil)
expect(enterprise).not_to be_valid
expect(enterprise.errors[:owner].first).to eq "can't be blank"
end
describe "name uniqueness" do
let(:owner) { create(:user, email: 'owner@example.com') }
let!(:enterprise) { create(:enterprise, name: 'Enterprise', owner: owner) }
it "prevents duplicate names for new records" do
e = Enterprise.new name: enterprise.name
expect(e).to_not be_valid
expect(e.errors[:name].first).to include I18n.t('enterprise_name_error', email: owner.email)
end
it "prevents duplicate names for existing records" do
e = create(:enterprise, name: 'foo')
e.name = enterprise.name
expect(e).to_not be_valid
expect(e.errors[:name].first).to include I18n.t('enterprise_name_error', email: owner.email)
end
it "does not prohibit the saving of an enterprise with no name clash" do
expect(enterprise).to be_valid
end
it "sets the enterprise contact to the owner by default" do
expect(enterprise.contact).to eq enterprise.owner
end
end
describe "preferred_shopfront_taxon_order" do
it "empty strings are valid" do
enterprise = build(:enterprise, preferred_shopfront_taxon_order: "")
expect(enterprise).to be_valid
end
it "a single integer is valid" do
enterprise = build(:enterprise, preferred_shopfront_taxon_order: "11")
expect(enterprise).to be_valid
end
it "comma delimited integers are valid" do
enterprise = build(:enterprise, preferred_shopfront_taxon_order: "1,2,3")
expect(enterprise).to be_valid
enterprise = build(:enterprise, preferred_shopfront_taxon_order: "1,22,333")
expect(enterprise).to be_valid
end
it "commas at the beginning and end are disallowed" do
enterprise = build(:enterprise, preferred_shopfront_taxon_order: ",1,2,3")
expect(enterprise).to be_invalid
enterprise = build(:enterprise, preferred_shopfront_taxon_order: "1,2,3,")
expect(enterprise).to be_invalid
end
it "any other characters are invalid" do
enterprise = build(:enterprise, preferred_shopfront_taxon_order: "a1,2,3")
expect(enterprise).to be_invalid
enterprise = build(:enterprise, preferred_shopfront_taxon_order: ".1,2,3")
expect(enterprise).to be_invalid
enterprise = build(:enterprise, preferred_shopfront_taxon_order: " 1,2,3")
expect(enterprise).to be_invalid
end
end
end
describe "callbacks" do
it "restores permalink to original value when it is changed and invalid" do
e1 = create(:enterprise, permalink: "taken")
e2 = create(:enterprise, permalink: "not_taken")
e2.permalink = "taken"
e2.save
expect(e2.permalink).to eq "not_taken"
end
end
describe "scopes" do
describe 'visible' do
it 'find visible enterprises' do
d1 = create(:distributor_enterprise, visible: false)
s1 = create(:supplier_enterprise)
expect(Enterprise.visible).to eq([s1])
end
end
describe "activated" do
let!(:unconfirmed_user) { create(:user, confirmed_at: nil, enterprise_limit: 2) }
let!(:inactive_enterprise) { create(:enterprise, sells: "unspecified") }
let!(:active_enterprise) { create(:enterprise, sells: "none") }
it "finds enterprises that have a sells property other than 'unspecified'" do
activated_enterprises = Enterprise.activated
expect(activated_enterprises).to include active_enterprise
expect(activated_enterprises).to_not include inactive_enterprise
end
end
describe "ready_for_checkout" do
let!(:e) { create(:enterprise) }
it "does not show enterprises with no payment methods" do
create(:shipping_method, distributors: [e])
expect(Enterprise.ready_for_checkout).not_to include e
end
it "does not show enterprises with no shipping methods" do
create(:payment_method, distributors: [e])
expect(Enterprise.ready_for_checkout).not_to include e
end
it "does not show enterprises with unavailable payment methods" do
create(:shipping_method, distributors: [e])
create(:payment_method, distributors: [e], active: false)
expect(Enterprise.ready_for_checkout).not_to include e
end
it "shows enterprises with available payment and shipping methods" do
create(:shipping_method, distributors: [e])
create(:payment_method, distributors: [e])
expect(Enterprise.ready_for_checkout).to include e
end
end
describe "not_ready_for_checkout" do
let!(:e) { create(:enterprise) }
it "shows enterprises with no payment methods" do
create(:shipping_method, distributors: [e])
expect(Enterprise.not_ready_for_checkout).to include e
end
it "shows enterprises with no shipping methods" do
create(:payment_method, distributors: [e])
expect(Enterprise.not_ready_for_checkout).to include e
end
it "shows enterprises with unavailable payment methods" do
create(:shipping_method, distributors: [e])
create(:payment_method, distributors: [e], active: false)
expect(Enterprise.not_ready_for_checkout).to include e
end
it "does not show enterprises with available payment and shipping methods" do
create(:shipping_method, distributors: [e])
create(:payment_method, distributors: [e])
expect(Enterprise.not_ready_for_checkout).not_to include e
end
end
describe "#ready_for_checkout?" do
let!(:e) { create(:enterprise) }
it "returns false for enterprises with no payment methods" do
create(:shipping_method, distributors: [e])
expect(e.reload).not_to be_ready_for_checkout
end
it "returns false for enterprises with no shipping methods" do
create(:payment_method, distributors: [e])
expect(e.reload).not_to be_ready_for_checkout
end
it "returns false for enterprises with unavailable payment methods" do
create(:shipping_method, distributors: [e])
create(:payment_method, distributors: [e], active: false)
expect(e.reload).not_to be_ready_for_checkout
end
it "returns true for enterprises with available payment and shipping methods" do
create(:shipping_method, distributors: [e])
create(:payment_method, distributors: [e])
expect(e.reload).to be_ready_for_checkout
end
end
describe "distributors_with_active_order_cycles" do
it "finds active distributors by order cycles" do
s = create(:supplier_enterprise)
d = create(:distributor_enterprise)
p = create(:product)
create(:simple_order_cycle, suppliers: [s], distributors: [d], variants: [p.master])
expect(Enterprise.distributors_with_active_order_cycles).to eq([d])
end
it "should not find inactive distributors by order cycles" do
s = create(:supplier_enterprise)
d = create(:distributor_enterprise)
p = create(:product)
create(:simple_order_cycle, orders_open_at: 10.days.from_now, orders_close_at: 17.days.from_now, suppliers: [s], distributors: [d], variants: [p.master])
expect(Enterprise.distributors_with_active_order_cycles).not_to include d
end
end
describe "supplying_variant_in" do
it "finds producers by supply of master variant" do
s = create(:supplier_enterprise)
p = create(:simple_product, supplier: s)
expect(Enterprise.supplying_variant_in([p.master])).to eq([s])
end
it "finds producers by supply of variant" do
s = create(:supplier_enterprise)
p = create(:simple_product, supplier: s)
v = create(:variant, product: p)
expect(Enterprise.supplying_variant_in([v])).to eq([s])
end
it "returns multiple enterprises when given multiple variants" do
s1 = create(:supplier_enterprise)
s2 = create(:supplier_enterprise)
p1 = create(:simple_product, supplier: s1)
p2 = create(:simple_product, supplier: s2)
expect(Enterprise.supplying_variant_in([p1.master, p2.master])).to match_array [s1, s2]
end
it "does not return duplicates" do
s = create(:supplier_enterprise)
p1 = create(:simple_product, supplier: s)
p2 = create(:simple_product, supplier: s)
expect(Enterprise.supplying_variant_in([p1.master, p2.master])).to eq([s])
end
end
describe "distributing_products" do
let(:distributor) { create(:distributor_enterprise) }
let(:product) { create(:product) }
it "returns enterprises distributing via an order cycle" do
order_cycle = create(:simple_order_cycle, distributors: [distributor], variants: [product.master])
expect(Enterprise.distributing_products(product.id)).to eq([distributor])
end
it "does not return duplicate enterprises" do
another_product = create(:product)
order_cycle = create(:simple_order_cycle, distributors: [distributor], variants: [product.master, another_product.master])
expect(Enterprise.distributing_products([product.id, another_product.id])).to eq([distributor])
end
end
describe "managed_by" do
it "shows only enterprises for given user" do
user = create(:user)
user.spree_roles = []
e1 = create(:enterprise)
e2 = create(:enterprise)
e1.enterprise_roles.build(user: user).save
enterprises = Enterprise.managed_by user
expect(enterprises.count).to eq(1)
expect(enterprises).to include e1
end
it "shows all enterprises for admin user" do
user = create(:admin_user)
e1 = create(:enterprise)
e2 = create(:enterprise)
enterprises = Enterprise.managed_by user
expect(enterprises.count).to eq(2)
expect(enterprises).to include e1
expect(enterprises).to include e2
end
end
end
describe "callbacks" do
describe "after creation" do
let(:owner) { create(:user, enterprise_limit: 10) }
let(:hub1) { create(:distributor_enterprise, owner: owner) }
let(:hub2) { create(:distributor_enterprise, owner: owner) }
let(:hub3) { create(:distributor_enterprise, owner: owner) }
let(:producer1) { create(:supplier_enterprise, owner: owner) }
let(:producer2) { create(:supplier_enterprise, owner: owner) }
describe "when a producer is created" do
before do
hub1
hub2
end
it "creates links from the new producer to all hubs owned by the same user, granting add_to_order_cycle and create_variant_overrides permissions" do
producer1
should_have_enterprise_relationship from: producer1, to: hub1, with: [:add_to_order_cycle, :create_variant_overrides]
should_have_enterprise_relationship from: producer1, to: hub2, with: [:add_to_order_cycle, :create_variant_overrides]
end
it "does not create any other links" do
expect do
producer1
end.to change(EnterpriseRelationship, :count).by(2)
end
end
describe "when a new hub is created" do
it "it creates links to the hub, from all producers owned by the same user, granting add_to_order_cycle and create_variant_overrides permissions" do
producer1
producer2
hub1
should_have_enterprise_relationship from: producer1, to: hub1, with: [:add_to_order_cycle, :create_variant_overrides]
should_have_enterprise_relationship from: producer2, to: hub1, with: [:add_to_order_cycle, :create_variant_overrides]
end
it "creates links from the new hub to all hubs owned by the same user, granting add_to_order_cycle permission" do
hub1
hub2
hub3
should_have_enterprise_relationship from: hub2, to: hub1, with: [:add_to_order_cycle]
should_have_enterprise_relationship from: hub3, to: hub1, with: [:add_to_order_cycle]
should_have_enterprise_relationship from: hub3, to: hub2, with: [:add_to_order_cycle]
end
it "does not create any other links" do
producer1
producer2
expect { hub1 }.to change(EnterpriseRelationship, :count).by(2) # 2 producer links
expect { hub2 }.to change(EnterpriseRelationship, :count).by(3) # 2 producer links + 1 hub link
expect { hub3 }.to change(EnterpriseRelationship, :count).by(4) # 2 producer links + 2 hub links
end
end
def should_have_enterprise_relationship(opts = {})
er = EnterpriseRelationship.where(parent_id: opts[:from], child_id: opts[:to]).last
expect(er).not_to be_nil
if opts[:with] == :all_permissions
expect(er.permissions.map(&:name)).to match_array ['add_to_order_cycle', 'manage_products', 'edit_profile', 'create_variant_overrides']
elsif opts.key? :with
expect(er.permissions.map(&:name)).to match_array opts[:with].map(&:to_s)
end
end
end
end
describe "finding variants distributed by the enterprise" do
it "finds variants, including master, distributed by order cycle" do
distributor = create(:distributor_enterprise)
product = create(:product)
variant = product.variants.first
create(:simple_order_cycle, distributors: [distributor], variants: [variant])
expect(distributor.distributed_variants).to match_array [product.master, variant]
end
end
describe "taxons" do
let(:distributor) { create(:distributor_enterprise) }
let(:supplier) { create(:supplier_enterprise) }
let(:taxon1) { create(:taxon) }
let(:taxon2) { create(:taxon) }
let(:taxon3) { create(:taxon) }
let(:product1) { create(:simple_product, primary_taxon: taxon1, taxons: [taxon1]) }
let(:product2) { create(:simple_product, primary_taxon: taxon1, taxons: [taxon1, taxon2]) }
let(:product3) { create(:simple_product, primary_taxon: taxon3) }
let(:oc) { create(:order_cycle) }
let(:ex) { create(:exchange, order_cycle: oc, incoming: false, sender: supplier, receiver: distributor) }
it "gets all taxons of all distributed products" do
allow(Spree::Product).to receive(:in_distributor).and_return [product1, product2]
expect(distributor.distributed_taxons).to match_array [taxon1, taxon2]
end
it "gets all taxons of all distributed products in open order cycles" do
allow(Spree::Product).to receive(:in_distributor).and_return [product1, product2, product3]
ex.variants << product1.variants.first
ex.variants << product3.variants.first
expect(distributor.current_distributed_taxons).to match_array [taxon1, taxon3]
end
it "gets all taxons of all supplied products" do
allow(Spree::Product).to receive(:in_supplier).and_return [product1, product2]
expect(supplier.supplied_taxons).to match_array [taxon1, taxon2]
end
end
describe "presentation of attributes" do
let(:distributor) {
build_stubbed(:distributor_enterprise,
website: "http://www.google.com",
facebook: "www.facebook.com/roger",
linkedin: "https://linkedin.com")
}
it "strips http from url fields" do
expect(distributor.website).to eq("www.google.com")
expect(distributor.facebook).to eq("www.facebook.com/roger")
expect(distributor.linkedin).to eq("linkedin.com")
end
end
describe "producer properties" do
let(:supplier) { create(:supplier_enterprise) }
it "sets producer properties" do
supplier.set_producer_property 'Organic Certified', 'NASAA 12345'
expect(supplier.producer_properties.count).to eq(1)
expect(supplier.producer_properties.first.value).to eq('NASAA 12345')
expect(supplier.producer_properties.first.property.presentation).to eq('Organic Certified')
end
end
describe "provide enterprise category" do
let(:producer_sell_all) { build_stubbed(:enterprise, is_primary_producer: true, sells: "any") }
let(:producer_sell_own) { build_stubbed(:enterprise, is_primary_producer: true, sells: "own") }
let(:producer_sell_none) { build_stubbed(:enterprise, is_primary_producer: true, sells: "none") }
let(:non_producer_sell_all) { build_stubbed(:enterprise, is_primary_producer: false, sells: "any") }
let(:non_producer_sell_own) { build_stubbed(:enterprise, is_primary_producer: false, sells: "own") }
let(:non_producer_sell_none) { build_stubbed(:enterprise, is_primary_producer: false, sells: "none") }
it "should output enterprise categories" do
expect(producer_sell_all.is_primary_producer).to eq(true)
expect(producer_sell_all.sells).to eq("any")
expect(producer_sell_all.category).to eq(:producer_hub)
expect(producer_sell_own.category).to eq(:producer_shop)
expect(producer_sell_none.category).to eq(:producer)
expect(non_producer_sell_all.category).to eq(:hub)
expect(non_producer_sell_own.category).to eq(:hub)
expect(non_producer_sell_none.category).to eq(:hub_profile)
end
end
describe "finding and automatically assigning a permalink" do
let(:enterprise) { build_stubbed(:enterprise, name: "Name To Turn Into A Permalink") }
it "assigns permalink when initialized" do
allow(Enterprise).to receive(:find_available_permalink).and_return("available_permalink")
expect(Enterprise).to receive(:find_available_permalink).with("Name To Turn Into A Permalink")
expect(
lambda { enterprise.send(:initialize_permalink) }
).to change{
enterprise.permalink
}.to(
"available_permalink"
)
end
describe "finding a permalink" do
let!(:enterprise1) { create(:enterprise, permalink: "permalink") }
let!(:enterprise2) { create(:enterprise, permalink: "permalink1") }
it "parameterizes the value provided" do
expect(Enterprise.find_available_permalink("Some Unused Permalink")).to eq "some-unused-permalink"
end
it "sets the permalink to 'my-enterprise' if parametized permalink is blank" do
expect(Enterprise.find_available_permalink("")).to eq "my-enterprise"
expect(Enterprise.find_available_permalink("$$%{$**}$%}")).to eq "my-enterprise"
end
it "finds and index value based on existing permalinks" do
expect(Enterprise.find_available_permalink("permalink")).to eq "permalink2"
end
it "ignores permalinks with characters after the index value" do
create(:enterprise, permalink: "permalink2xxx")
expect(Enterprise.find_available_permalink("permalink")).to eq "permalink2"
end
it "finds available permalink similar to existing" do
create(:enterprise, permalink: "permalink2xxx")
expect(Enterprise.find_available_permalink("permalink2")).to eq "permalink2"
end
it "finds gaps in the indices of existing permalinks" do
create(:enterprise, permalink: "permalink3")
expect(Enterprise.find_available_permalink("permalink")).to eq "permalink2"
end
end
end
describe "#plus_relatives_and_oc_producers" do
it "does not find non-produders " do
supplier = create(:supplier_enterprise)
distributor = create(:distributor_enterprise, is_primary_producer: false)
product = create(:product)
order_cycle = create(
:simple_order_cycle,
suppliers: [supplier],
distributors: [distributor],
variants: [product.master]
)
expect(distributor.plus_relatives_and_oc_producers(order_cycle)).to eq([supplier])
end
end
end
fix enterprise permalink spec
# frozen_string_literal: true
require 'spec_helper'
describe Enterprise do
describe "sending emails" do
describe "on creation" do
let!(:user) { create(:user) }
let!(:enterprise) { create(:enterprise, owner: user) }
it "sends a welcome email" do
expect do
create(:enterprise, owner: user)
end.to enqueue_job ActionMailer::DeliveryJob
expect(enqueued_jobs.last.to_s).to match "welcome"
end
end
end
describe "associations" do
it { is_expected.to belong_to(:owner) }
it { is_expected.to have_many(:supplied_products) }
it { is_expected.to have_many(:distributed_orders) }
it { is_expected.to belong_to(:address) }
it "destroys enterprise roles upon its own demise" do
e = create(:enterprise)
u = create(:user)
u.enterprise_roles.build(enterprise: e).save!
role = e.enterprise_roles.first
e.destroy
expect(EnterpriseRole.where(id: role.id)).to be_empty
end
xit "destroys supplied products upon destroy" do
s = create(:supplier_enterprise)
p = create(:simple_product, supplier: s)
s.destroy
expect(Spree::Product.where(id: p.id)).to be_empty
end
it "destroys relationships upon destroy" do
e = create(:enterprise)
e_other = create(:enterprise)
er1 = create(:enterprise_relationship, parent: e, child: e_other)
er2 = create(:enterprise_relationship, child: e, parent: e_other)
e.destroy
expect(EnterpriseRelationship.where(id: [er1, er2])).to be_empty
end
describe "relationships to other enterprises" do
let(:e) { create(:distributor_enterprise) }
let(:p) { create(:supplier_enterprise) }
let(:c) { create(:distributor_enterprise) }
let!(:er1) { create(:enterprise_relationship, parent_id: p.id, child_id: e.id) }
let!(:er2) { create(:enterprise_relationship, parent_id: e.id, child_id: c.id) }
it "finds relatives" do
expect(e.relatives).to match_array [p, c]
end
it "finds relatives_including_self" do
expect(e.relatives_including_self).to include e
end
it "scopes relatives to visible distributors" do
enterprise = build_stubbed(:distributor_enterprise)
expect(enterprise).to receive(:relatives_including_self).and_return(relatives = [])
expect(relatives).to receive(:is_distributor).and_return relatives
enterprise.distributors
end
it "scopes relatives to visible producers" do
enterprise = build_stubbed(:distributor_enterprise)
expect(enterprise).to receive(:relatives_including_self).and_return(relatives = [])
expect(relatives).to receive(:is_primary_producer).and_return relatives
enterprise.suppliers
end
end
describe "ownership" do
let(:u1) { create(:user) }
let(:u2) { create(:user) }
let!(:e) { create(:enterprise, owner: u1 ) }
it "adds new owner to list of managers" do
expect(e.owner).to eq u1
expect(e.users).to include u1
expect(e.users).to_not include u2
e.owner = u2
e.save!
e.reload
expect(e.owner).to eq u2
expect(e.users).to include u1, u2
end
it "validates ownership limit" do
expect(u1.enterprise_limit).to be 5
expect(u1.owned_enterprises.reload).to eq [e]
4.times { create(:enterprise, owner: u1) }
e2 = create(:enterprise, owner: u2)
expect {
e2.owner = u1
e2.save!
}.to raise_error ActiveRecord::RecordInvalid, "Validation failed: #{u1.email} is not permitted to own any more enterprises (limit is 5)."
end
end
end
describe "validations" do
it { is_expected.to validate_presence_of(:name) }
it do
create(:distributor_enterprise)
is_expected.to validate_uniqueness_of(:permalink)
end
it "requires an owner" do
enterprise = build_stubbed(:enterprise, owner: nil)
expect(enterprise).not_to be_valid
expect(enterprise.errors[:owner].first).to eq "can't be blank"
end
describe "name uniqueness" do
let(:owner) { create(:user, email: 'owner@example.com') }
let!(:enterprise) { create(:enterprise, name: 'Enterprise', owner: owner) }
it "prevents duplicate names for new records" do
e = Enterprise.new name: enterprise.name
expect(e).to_not be_valid
expect(e.errors[:name].first).to include I18n.t('enterprise_name_error', email: owner.email)
end
it "prevents duplicate names for existing records" do
e = create(:enterprise, name: 'foo')
e.name = enterprise.name
expect(e).to_not be_valid
expect(e.errors[:name].first).to include I18n.t('enterprise_name_error', email: owner.email)
end
it "does not prohibit the saving of an enterprise with no name clash" do
expect(enterprise).to be_valid
end
it "sets the enterprise contact to the owner by default" do
expect(enterprise.contact).to eq enterprise.owner
end
end
describe "preferred_shopfront_taxon_order" do
it "empty strings are valid" do
enterprise = build(:enterprise, preferred_shopfront_taxon_order: "")
expect(enterprise).to be_valid
end
it "a single integer is valid" do
enterprise = build(:enterprise, preferred_shopfront_taxon_order: "11")
expect(enterprise).to be_valid
end
it "comma delimited integers are valid" do
enterprise = build(:enterprise, preferred_shopfront_taxon_order: "1,2,3")
expect(enterprise).to be_valid
enterprise = build(:enterprise, preferred_shopfront_taxon_order: "1,22,333")
expect(enterprise).to be_valid
end
it "commas at the beginning and end are disallowed" do
enterprise = build(:enterprise, preferred_shopfront_taxon_order: ",1,2,3")
expect(enterprise).to be_invalid
enterprise = build(:enterprise, preferred_shopfront_taxon_order: "1,2,3,")
expect(enterprise).to be_invalid
end
it "any other characters are invalid" do
enterprise = build(:enterprise, preferred_shopfront_taxon_order: "a1,2,3")
expect(enterprise).to be_invalid
enterprise = build(:enterprise, preferred_shopfront_taxon_order: ".1,2,3")
expect(enterprise).to be_invalid
enterprise = build(:enterprise, preferred_shopfront_taxon_order: " 1,2,3")
expect(enterprise).to be_invalid
end
end
end
describe "callbacks" do
it "restores permalink to original value when it is changed and invalid" do
e1 = create(:enterprise, permalink: "taken")
e2 = create(:enterprise, permalink: "not_taken")
e2.permalink = "taken"
e2.save
expect(e2.reload.permalink).to eq "not_taken"
end
end
describe "scopes" do
describe 'visible' do
it 'find visible enterprises' do
d1 = create(:distributor_enterprise, visible: false)
s1 = create(:supplier_enterprise)
expect(Enterprise.visible).to eq([s1])
end
end
describe "activated" do
let!(:unconfirmed_user) { create(:user, confirmed_at: nil, enterprise_limit: 2) }
let!(:inactive_enterprise) { create(:enterprise, sells: "unspecified") }
let!(:active_enterprise) { create(:enterprise, sells: "none") }
it "finds enterprises that have a sells property other than 'unspecified'" do
activated_enterprises = Enterprise.activated
expect(activated_enterprises).to include active_enterprise
expect(activated_enterprises).to_not include inactive_enterprise
end
end
describe "ready_for_checkout" do
let!(:e) { create(:enterprise) }
it "does not show enterprises with no payment methods" do
create(:shipping_method, distributors: [e])
expect(Enterprise.ready_for_checkout).not_to include e
end
it "does not show enterprises with no shipping methods" do
create(:payment_method, distributors: [e])
expect(Enterprise.ready_for_checkout).not_to include e
end
it "does not show enterprises with unavailable payment methods" do
create(:shipping_method, distributors: [e])
create(:payment_method, distributors: [e], active: false)
expect(Enterprise.ready_for_checkout).not_to include e
end
it "shows enterprises with available payment and shipping methods" do
create(:shipping_method, distributors: [e])
create(:payment_method, distributors: [e])
expect(Enterprise.ready_for_checkout).to include e
end
end
describe "not_ready_for_checkout" do
let!(:e) { create(:enterprise) }
it "shows enterprises with no payment methods" do
create(:shipping_method, distributors: [e])
expect(Enterprise.not_ready_for_checkout).to include e
end
it "shows enterprises with no shipping methods" do
create(:payment_method, distributors: [e])
expect(Enterprise.not_ready_for_checkout).to include e
end
it "shows enterprises with unavailable payment methods" do
create(:shipping_method, distributors: [e])
create(:payment_method, distributors: [e], active: false)
expect(Enterprise.not_ready_for_checkout).to include e
end
it "does not show enterprises with available payment and shipping methods" do
create(:shipping_method, distributors: [e])
create(:payment_method, distributors: [e])
expect(Enterprise.not_ready_for_checkout).not_to include e
end
end
describe "#ready_for_checkout?" do
let!(:e) { create(:enterprise) }
it "returns false for enterprises with no payment methods" do
create(:shipping_method, distributors: [e])
expect(e.reload).not_to be_ready_for_checkout
end
it "returns false for enterprises with no shipping methods" do
create(:payment_method, distributors: [e])
expect(e.reload).not_to be_ready_for_checkout
end
it "returns false for enterprises with unavailable payment methods" do
create(:shipping_method, distributors: [e])
create(:payment_method, distributors: [e], active: false)
expect(e.reload).not_to be_ready_for_checkout
end
it "returns true for enterprises with available payment and shipping methods" do
create(:shipping_method, distributors: [e])
create(:payment_method, distributors: [e])
expect(e.reload).to be_ready_for_checkout
end
end
describe "distributors_with_active_order_cycles" do
it "finds active distributors by order cycles" do
s = create(:supplier_enterprise)
d = create(:distributor_enterprise)
p = create(:product)
create(:simple_order_cycle, suppliers: [s], distributors: [d], variants: [p.master])
expect(Enterprise.distributors_with_active_order_cycles).to eq([d])
end
it "should not find inactive distributors by order cycles" do
s = create(:supplier_enterprise)
d = create(:distributor_enterprise)
p = create(:product)
create(:simple_order_cycle, orders_open_at: 10.days.from_now, orders_close_at: 17.days.from_now, suppliers: [s], distributors: [d], variants: [p.master])
expect(Enterprise.distributors_with_active_order_cycles).not_to include d
end
end
describe "supplying_variant_in" do
it "finds producers by supply of master variant" do
s = create(:supplier_enterprise)
p = create(:simple_product, supplier: s)
expect(Enterprise.supplying_variant_in([p.master])).to eq([s])
end
it "finds producers by supply of variant" do
s = create(:supplier_enterprise)
p = create(:simple_product, supplier: s)
v = create(:variant, product: p)
expect(Enterprise.supplying_variant_in([v])).to eq([s])
end
it "returns multiple enterprises when given multiple variants" do
s1 = create(:supplier_enterprise)
s2 = create(:supplier_enterprise)
p1 = create(:simple_product, supplier: s1)
p2 = create(:simple_product, supplier: s2)
expect(Enterprise.supplying_variant_in([p1.master, p2.master])).to match_array [s1, s2]
end
it "does not return duplicates" do
s = create(:supplier_enterprise)
p1 = create(:simple_product, supplier: s)
p2 = create(:simple_product, supplier: s)
expect(Enterprise.supplying_variant_in([p1.master, p2.master])).to eq([s])
end
end
describe "distributing_products" do
let(:distributor) { create(:distributor_enterprise) }
let(:product) { create(:product) }
it "returns enterprises distributing via an order cycle" do
order_cycle = create(:simple_order_cycle, distributors: [distributor], variants: [product.master])
expect(Enterprise.distributing_products(product.id)).to eq([distributor])
end
it "does not return duplicate enterprises" do
another_product = create(:product)
order_cycle = create(:simple_order_cycle, distributors: [distributor], variants: [product.master, another_product.master])
expect(Enterprise.distributing_products([product.id, another_product.id])).to eq([distributor])
end
end
describe "managed_by" do
it "shows only enterprises for given user" do
user = create(:user)
user.spree_roles = []
e1 = create(:enterprise)
e2 = create(:enterprise)
e1.enterprise_roles.build(user: user).save
enterprises = Enterprise.managed_by user
expect(enterprises.count).to eq(1)
expect(enterprises).to include e1
end
it "shows all enterprises for admin user" do
user = create(:admin_user)
e1 = create(:enterprise)
e2 = create(:enterprise)
enterprises = Enterprise.managed_by user
expect(enterprises.count).to eq(2)
expect(enterprises).to include e1
expect(enterprises).to include e2
end
end
end
describe "callbacks" do
describe "after creation" do
let(:owner) { create(:user, enterprise_limit: 10) }
let(:hub1) { create(:distributor_enterprise, owner: owner) }
let(:hub2) { create(:distributor_enterprise, owner: owner) }
let(:hub3) { create(:distributor_enterprise, owner: owner) }
let(:producer1) { create(:supplier_enterprise, owner: owner) }
let(:producer2) { create(:supplier_enterprise, owner: owner) }
describe "when a producer is created" do
before do
hub1
hub2
end
it "creates links from the new producer to all hubs owned by the same user, granting add_to_order_cycle and create_variant_overrides permissions" do
producer1
should_have_enterprise_relationship from: producer1, to: hub1, with: [:add_to_order_cycle, :create_variant_overrides]
should_have_enterprise_relationship from: producer1, to: hub2, with: [:add_to_order_cycle, :create_variant_overrides]
end
it "does not create any other links" do
expect do
producer1
end.to change(EnterpriseRelationship, :count).by(2)
end
end
describe "when a new hub is created" do
it "it creates links to the hub, from all producers owned by the same user, granting add_to_order_cycle and create_variant_overrides permissions" do
producer1
producer2
hub1
should_have_enterprise_relationship from: producer1, to: hub1, with: [:add_to_order_cycle, :create_variant_overrides]
should_have_enterprise_relationship from: producer2, to: hub1, with: [:add_to_order_cycle, :create_variant_overrides]
end
it "creates links from the new hub to all hubs owned by the same user, granting add_to_order_cycle permission" do
hub1
hub2
hub3
should_have_enterprise_relationship from: hub2, to: hub1, with: [:add_to_order_cycle]
should_have_enterprise_relationship from: hub3, to: hub1, with: [:add_to_order_cycle]
should_have_enterprise_relationship from: hub3, to: hub2, with: [:add_to_order_cycle]
end
it "does not create any other links" do
producer1
producer2
expect { hub1 }.to change(EnterpriseRelationship, :count).by(2) # 2 producer links
expect { hub2 }.to change(EnterpriseRelationship, :count).by(3) # 2 producer links + 1 hub link
expect { hub3 }.to change(EnterpriseRelationship, :count).by(4) # 2 producer links + 2 hub links
end
end
def should_have_enterprise_relationship(opts = {})
er = EnterpriseRelationship.where(parent_id: opts[:from], child_id: opts[:to]).last
expect(er).not_to be_nil
if opts[:with] == :all_permissions
expect(er.permissions.map(&:name)).to match_array ['add_to_order_cycle', 'manage_products', 'edit_profile', 'create_variant_overrides']
elsif opts.key? :with
expect(er.permissions.map(&:name)).to match_array opts[:with].map(&:to_s)
end
end
end
end
describe "finding variants distributed by the enterprise" do
it "finds variants, including master, distributed by order cycle" do
distributor = create(:distributor_enterprise)
product = create(:product)
variant = product.variants.first
create(:simple_order_cycle, distributors: [distributor], variants: [variant])
expect(distributor.distributed_variants).to match_array [product.master, variant]
end
end
describe "taxons" do
let(:distributor) { create(:distributor_enterprise) }
let(:supplier) { create(:supplier_enterprise) }
let(:taxon1) { create(:taxon) }
let(:taxon2) { create(:taxon) }
let(:taxon3) { create(:taxon) }
let(:product1) { create(:simple_product, primary_taxon: taxon1, taxons: [taxon1]) }
let(:product2) { create(:simple_product, primary_taxon: taxon1, taxons: [taxon1, taxon2]) }
let(:product3) { create(:simple_product, primary_taxon: taxon3) }
let(:oc) { create(:order_cycle) }
let(:ex) { create(:exchange, order_cycle: oc, incoming: false, sender: supplier, receiver: distributor) }
it "gets all taxons of all distributed products" do
allow(Spree::Product).to receive(:in_distributor).and_return [product1, product2]
expect(distributor.distributed_taxons).to match_array [taxon1, taxon2]
end
it "gets all taxons of all distributed products in open order cycles" do
allow(Spree::Product).to receive(:in_distributor).and_return [product1, product2, product3]
ex.variants << product1.variants.first
ex.variants << product3.variants.first
expect(distributor.current_distributed_taxons).to match_array [taxon1, taxon3]
end
it "gets all taxons of all supplied products" do
allow(Spree::Product).to receive(:in_supplier).and_return [product1, product2]
expect(supplier.supplied_taxons).to match_array [taxon1, taxon2]
end
end
describe "presentation of attributes" do
let(:distributor) {
build_stubbed(:distributor_enterprise,
website: "http://www.google.com",
facebook: "www.facebook.com/roger",
linkedin: "https://linkedin.com")
}
it "strips http from url fields" do
expect(distributor.website).to eq("www.google.com")
expect(distributor.facebook).to eq("www.facebook.com/roger")
expect(distributor.linkedin).to eq("linkedin.com")
end
end
describe "producer properties" do
let(:supplier) { create(:supplier_enterprise) }
it "sets producer properties" do
supplier.set_producer_property 'Organic Certified', 'NASAA 12345'
expect(supplier.producer_properties.count).to eq(1)
expect(supplier.producer_properties.first.value).to eq('NASAA 12345')
expect(supplier.producer_properties.first.property.presentation).to eq('Organic Certified')
end
end
describe "provide enterprise category" do
let(:producer_sell_all) { build_stubbed(:enterprise, is_primary_producer: true, sells: "any") }
let(:producer_sell_own) { build_stubbed(:enterprise, is_primary_producer: true, sells: "own") }
let(:producer_sell_none) { build_stubbed(:enterprise, is_primary_producer: true, sells: "none") }
let(:non_producer_sell_all) { build_stubbed(:enterprise, is_primary_producer: false, sells: "any") }
let(:non_producer_sell_own) { build_stubbed(:enterprise, is_primary_producer: false, sells: "own") }
let(:non_producer_sell_none) { build_stubbed(:enterprise, is_primary_producer: false, sells: "none") }
it "should output enterprise categories" do
expect(producer_sell_all.is_primary_producer).to eq(true)
expect(producer_sell_all.sells).to eq("any")
expect(producer_sell_all.category).to eq(:producer_hub)
expect(producer_sell_own.category).to eq(:producer_shop)
expect(producer_sell_none.category).to eq(:producer)
expect(non_producer_sell_all.category).to eq(:hub)
expect(non_producer_sell_own.category).to eq(:hub)
expect(non_producer_sell_none.category).to eq(:hub_profile)
end
end
describe "finding and automatically assigning a permalink" do
let(:enterprise) { build_stubbed(:enterprise, name: "Name To Turn Into A Permalink") }
it "assigns permalink when initialized" do
allow(Enterprise).to receive(:find_available_permalink).and_return("available_permalink")
expect(Enterprise).to receive(:find_available_permalink).with("Name To Turn Into A Permalink")
expect(
lambda { enterprise.send(:initialize_permalink) }
).to change{
enterprise.permalink
}.to(
"available_permalink"
)
end
describe "finding a permalink" do
let!(:enterprise1) { create(:enterprise, permalink: "permalink") }
let!(:enterprise2) { create(:enterprise, permalink: "permalink1") }
it "parameterizes the value provided" do
expect(Enterprise.find_available_permalink("Some Unused Permalink")).to eq "some-unused-permalink"
end
it "sets the permalink to 'my-enterprise' if parametized permalink is blank" do
expect(Enterprise.find_available_permalink("")).to eq "my-enterprise"
expect(Enterprise.find_available_permalink("$$%{$**}$%}")).to eq "my-enterprise"
end
it "finds and index value based on existing permalinks" do
expect(Enterprise.find_available_permalink("permalink")).to eq "permalink2"
end
it "ignores permalinks with characters after the index value" do
create(:enterprise, permalink: "permalink2xxx")
expect(Enterprise.find_available_permalink("permalink")).to eq "permalink2"
end
it "finds available permalink similar to existing" do
create(:enterprise, permalink: "permalink2xxx")
expect(Enterprise.find_available_permalink("permalink2")).to eq "permalink2"
end
it "finds gaps in the indices of existing permalinks" do
create(:enterprise, permalink: "permalink3")
expect(Enterprise.find_available_permalink("permalink")).to eq "permalink2"
end
end
end
describe "#plus_relatives_and_oc_producers" do
it "does not find non-produders " do
supplier = create(:supplier_enterprise)
distributor = create(:distributor_enterprise, is_primary_producer: false)
product = create(:product)
order_cycle = create(
:simple_order_cycle,
suppliers: [supplier],
distributors: [distributor],
variants: [product.master]
)
expect(distributor.plus_relatives_and_oc_producers(order_cycle)).to eq([supplier])
end
end
end
|
shared_examples "custom_report_with_custom_attributes" do |base_report, custom_attribute_field|
let(:options) { {:targets_hash => true, :userid => "admin"} }
let(:custom_attributes_field) { custom_attribute_field.to_s.pluralize }
before do
@user = FactoryGirl.create(:user_with_group)
# create custom attributes
@key = 'key1'
@value = 'value1'
@resource = base_report == "Host" ? FactoryGirl.create(:host) : FactoryGirl.create(:vm_vmware)
FactoryGirl.create(custom_attribute_field, :resource => @resource, :name => @key, :value => @value)
end
let(:report) do
MiqReport.new(
:name => "Custom VM report",
:title => "Custom VM report",
:rpt_group => "Custom",
:rpt_type => "Custom",
:db => base_report == "Host" ? "Host" : "ManageIQ::Providers::InfraManager::Vm",
:cols => %w(name),
:include => {custom_attributes_field.to_s => {"columns" => %w(name value)}},
:col_order => %w(miq_custom_attributes.name miq_custom_attributes.value name),
:headers => ["EVM Custom Attribute Name", "EVM Custom Attribute Value", "Name"],
:order => "Ascending",
:sortby => ["miq_custom_attributes.name"]
)
end
it "creates custom report based on #{base_report} with #{custom_attribute_field} field of custom attributes" do
expect { @results, _attrs = report.paged_view_search(options) }.not_to raise_error
custom_attributes_name = "#{custom_attributes_field}.name"
custom_attributes_value = "#{custom_attributes_field}.value"
expect(@results.data.first[custom_attributes_name]).to eq(@key)
expect(@results.data.first[custom_attributes_value]).to eq(@value)
end
end
describe MiqReport do
context "report with filtering in Registry" do
let(:options) { {:targets_hash => true, :userid => "admin"} }
let(:miq_task) { FactoryGirl.create(:miq_task) }
before do
@user = FactoryGirl.create(:user_with_group)
@registry = FactoryGirl.create(:registry_item, :name => "HKLM\\SOFTWARE\\WindowsFirewall : EnableFirewall",
:data => 0)
@vm = FactoryGirl.create(:vm_vmware, :registry_items => [@registry])
EvmSpecHelper.local_miq_server
end
let(:report) do
MiqReport.new(:name => "Custom VM report", :title => "Custom VM report", :rpt_group => "Custom",
:rpt_type => "Custom", :db => "Vm", :cols => %w(name),
:conditions => MiqExpression.new("=" => {"regkey" => "HKLM\\SOFTWARE\\WindowsFirewall",
"regval" => "EnableFirewall", "value" => "0"}),
:include => {"registry_items" => {"columns" => %w(data name value_name)}},
:col_order => %w(name registry_items.data registry_items.name registry_items.value_name),
:headers => ["Name", "Registry Data", "Registry Name", "Registry Value Name"],
:order => "Ascending")
end
it "can generate a report filtered by registry items" do
report.queue_generate_table(:userid => @user.userid)
report._async_generate_table(miq_task.id, :userid => @user.userid, :mode => "async",
:report_source => "Requested by user")
report_result = report.table.data.map do |x|
x.data.delete("id")
x.data
end
expect(report_result.count).to eq(1)
expect(report_result.first["name"]).to eq(@vm.name)
end
end
context "report with virtual dynamic custom attributes" do
let(:options) { {:targets_hash => true, :userid => "admin"} }
let(:custom_column_key_1) { 'kubernetes.io/hostname' }
let(:custom_column_key_2) { 'manageiq.org' }
let(:custom_column_key_3) { 'ATTR_Name_3' }
let(:custom_column_value) { 'value1' }
let(:user) { FactoryGirl.create(:user_with_group) }
let(:ems) { FactoryGirl.create(:ems_vmware) }
let!(:vm_1) { FactoryGirl.create(:vm_vmware) }
let!(:vm_2) { FactoryGirl.create(:vm_vmware, :retired => false, :ext_management_system => ems) }
let(:virtual_column_key_1) { "#{CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX}kubernetes.io/hostname" }
let(:virtual_column_key_2) { "#{CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX}manageiq.org" }
let(:virtual_column_key_3) { "#{CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX}ATTR_Name_3" }
let(:miq_task) { FactoryGirl.create(:miq_task) }
subject! do
FactoryGirl.create(:miq_custom_attribute, :resource => vm_1, :name => custom_column_key_1,
:value => custom_column_value)
FactoryGirl.create(:miq_custom_attribute, :resource => vm_2, :name => custom_column_key_2,
:value => custom_column_value)
FactoryGirl.create(:miq_custom_attribute, :resource => vm_2, :name => custom_column_key_3,
:value => custom_column_value)
end
before do
EvmSpecHelper.local_miq_server
end
let(:report) do
MiqReport.new(
:name => "Custom VM report", :title => "Custom VM report", :rpt_group => "Custom", :rpt_type => "Custom",
:db => "ManageIQ::Providers::InfraManager::Vm",
:cols => %w(name virtual_custom_attribute_kubernetes.io/hostname virtual_custom_attribute_manageiq.org),
:include => {:custom_attributes => {}},
:col_order => %w(name virtual_custom_attribute_kubernetes.io/hostname virtual_custom_attribute_manageiq.org),
:headers => ["Name", custom_column_key_1, custom_column_key_1],
:order => "Ascending"
)
end
it "generates report with dynamic custom attributes" do
report.queue_generate_table(:userid => user.userid)
report._async_generate_table(miq_task.id, :userid => user.userid, :mode => "async",
:report_source => "Requested by user")
report_result = report.table.data.map do |x|
x.data.delete("id")
x.data
end
expected_results = [{"name" => vm_1.name, virtual_column_key_1 => custom_column_value,
virtual_column_key_2 => nil},
{"name" => vm_2.name, virtual_column_key_1 => nil,
virtual_column_key_2 => custom_column_value}]
expect(report_result).to match_array(expected_results)
end
let(:exp) { MiqExpression.new("IS NOT EMPTY" => {"field" => "#{vm_1.type}-#{virtual_column_key_1}"}) }
it "generates report with dynamic custom attributes with MiqExpression filtering" do
report.conditions = exp
report.queue_generate_table(:userid => user.userid)
report._async_generate_table(miq_task.id, :userid => user.userid, :mode => "async",
:report_source => "Requested by user")
report_result = report.table.data.map do |x|
x.data.delete("id")
x.data
end
expected_results = ["name" => vm_1.name, virtual_column_key_1 => custom_column_value, virtual_column_key_2 => nil]
expect(report_result).to match_array(expected_results)
end
let(:exp_3) do
MiqExpression.new("and" => [{"=" => { "field" => "#{vm_2.type}-active", "value" => "true"}},
{"or" => [{"IS NOT EMPTY" => { "field" => "#{vm_2.type}-name", "value" => ""}},
{"IS NOT EMPTY" => { "field" => "#{vm_2.type}-#{virtual_column_key_3}"}}]}])
end
it "generates report with dynamic custom attributes with filtering with field which is not listed in cols" do
report.conditions = exp_3
report.queue_generate_table(:userid => user.userid)
report._async_generate_table(miq_task.id, :userid => user.userid, :mode => "async",
:report_source => "Requested by user")
report_result = report.table.data.map do |x|
x.data.delete("id")
x.data
end
expected_results = ["name" => vm_2.name, virtual_column_key_1 => nil, virtual_column_key_2 => custom_column_value]
expect(report_result).to match_array(expected_results)
end
end
context "Host and MiqCustomAttributes" do
include_examples "custom_report_with_custom_attributes", "Host", :miq_custom_attribute
end
context "Vm and MiqCustomAttributes" do
include_examples "custom_report_with_custom_attributes", "Vm", :miq_custom_attribute
end
context "Host and EmsCustomAttributes" do
include_examples "custom_report_with_custom_attributes", "Host", :ems_custom_attribute
end
context "Vm and EmsCustomAttributes" do
include_examples "custom_report_with_custom_attributes", "Vm", :ems_custom_attribute
end
it "attr_accessors are serializable via yaml" do
result = [{"id" => 5, "vmm_vendor" => "vmware", "vmm_vendor_display" => "VMware", "vmm_product" => "ESXi", "ipaddress" => "192.168.252.13", "vmm_buildnumber" => "260247", "vmm_version" => "4.1.0", "name" => "VI4ESXM1.manageiq.com"}, {"id" => 3, "vmm_vendor" => "VMware", "vmm_product" => "ESXi", "ipaddress" => "192.168.252.9", "vmm_buildnumber" => "348481", "vmm_version" => "4.1.0", "name" => "vi4esxm2.manageiq.com"}, {"id" => 4, "vmm_vendor" => "VMware", "vmm_product" => "ESX", "ipaddress" => "192.168.252.10", "vmm_buildnumber" => "502767", "vmm_version" => "4.1.0", "name" => "vi4esxm3.manageiq.com"}, {"id" => 1, "vmm_vendor" => "VMware", "vmm_product" => "ESXi", "ipaddress" => "192.168.252.4", "vmm_buildnumber" => "504850", "vmm_version" => "4.0.0", "name" => "per410a-t5.manageiq.com"}]
column_names = ["name", "ipaddress", "vmm_vendor", "vmm_vendor_display", "vmm_product", "vmm_version", "vmm_buildnumber", "id"]
fake_ruport_data_table = {:data => result, :column_names => column_names}
before = MiqReport.new
before.table = fake_ruport_data_table
after = YAML.load(YAML.dump(before))
expect(after.table).to eq(fake_ruport_data_table)
end
it '.get_expressions_by_model' do
FactoryGirl.create(:miq_report, :conditions => nil)
rep_nil = FactoryGirl.create(:miq_report)
# FIXME: find a way to do this in a factory
serialized_nil = "--- !!null \n...\n"
ActiveRecord::Base.connection.execute("update miq_reports set conditions='#{serialized_nil}' where id=#{rep_nil.id}")
rep_ok = FactoryGirl.create(:miq_report, :conditions => "SOMETHING")
reports = MiqReport.get_expressions_by_model('Vm')
expect(reports).to eq(rep_ok.name => rep_ok.id)
end
it "paged_view_search on vmdb_* tables" do
# Create EVM tables/indexes and hourly metric data...
table = FactoryGirl.create(:vmdb_table_evm, :name => "accounts")
index = FactoryGirl.create(:vmdb_index, :name => "accounts_pkey", :vmdb_table => table)
FactoryGirl.create(:vmdb_metric, :resource => index, :timestamp => Time.now.utc, :capture_interval_name => 'hourly', :size => 102, :rows => 102, :pages => 102, :wasted_bytes => 102, :percent_bloat => 102)
report_args = {
"db" => "VmdbIndex",
"cols" => ["name"],
"include" => {"vmdb_table" => {"columns" => ["type"]}, "latest_hourly_metric" => {"columns" => ["rows", "size", "wasted_bytes", "percent_bloat"]}},
"col_order" => ["name", "latest_hourly_metric.rows", "latest_hourly_metric.size", "latest_hourly_metric.wasted_bytes", "latest_hourly_metric.percent_bloat"],
"col_formats" => [nil, nil, :bytes_human, :bytes_human, nil],
}
report = MiqReport.new(report_args)
search_expression = MiqExpression.new("and" => [{"=" => {"value" => "VmdbTableEvm", "field" => "VmdbIndex.vmdb_table-type"}}])
results, = report.paged_view_search(:filter => search_expression)
expect(results.data.collect(&:data)).to eq(
[{
"name" => "accounts_pkey",
"vmdb_table.type" => "VmdbTableEvm",
"latest_hourly_metric.rows" => 102,
"latest_hourly_metric.size" => 102,
"latest_hourly_metric.wasted_bytes" => 102.0,
"latest_hourly_metric.percent_bloat" => 102.0,
"id" => index.id
}]
)
end
context "#paged_view_search" do
it "filters vms in folders" do
host = FactoryGirl.create(:host)
vm1 = FactoryGirl.create(:vm_vmware, :host => host)
allow(vm1).to receive(:archived?).and_return(false)
vm2 = FactoryGirl.create(:vm_vmware, :host => host)
allow(vm2).to receive(:archived?).and_return(false)
allow(Vm).to receive(:find_by).and_return(vm1)
root = FactoryGirl.create(:ems_folder, :name => "datacenters")
root.parent = host
usa = FactoryGirl.create(:ems_folder, :name => "usa")
usa.parent = root
nyc = FactoryGirl.create(:ems_folder, :name => "nyc")
nyc.parent = usa
vm1.with_relationship_type("ems_metadata") { vm1.parent = usa }
vm2.with_relationship_type("ems_metadata") { vm2.parent = nyc }
report = MiqReport.new(:db => "Vm")
results, = report.paged_view_search(:parent => usa)
expect(results.data.collect { |rec| rec.data['id'] }).to eq [vm1.id]
results, = report.paged_view_search(:parent => root)
expect(results.data.collect { |rec| rec.data['id'] }).to eq []
results, = report.paged_view_search(:parent => root, :association => :all_vms)
expect(results.data.collect { |rec| rec.data['id'] }).to match_array [vm1.id, vm2.id]
end
it "paging with order" do
vm1 = FactoryGirl.create(:vm_vmware)
vm2 = FactoryGirl.create(:vm_vmware)
ids = [vm1.id, vm2.id].sort
report = MiqReport.new(:db => "Vm", :sortby => "id", :order => "Descending")
results, = report.paged_view_search(:page => 2, :per_page => 1)
found_ids = results.data.collect { |rec| rec.data['id'] }
expect(found_ids).to eq [ids.first]
end
it "target_ids_for_paging caches results" do
vm = FactoryGirl.create(:vm_vmware)
FactoryGirl.create(:vm_vmware)
report = MiqReport.new(:db => "Vm")
report.extras = {:target_ids_for_paging => [vm.id], :attrs_for_paging => {}}
results, = report.paged_view_search(:page => 1, :per_page => 10)
found_ids = results.data.collect { |rec| rec.data['id'] }
expect(found_ids).to eq [vm.id]
end
it "VMs under Host with order" do
host1 = FactoryGirl.create(:host)
FactoryGirl.create(:vm_vmware, :host => host1, :name => "a")
ems = FactoryGirl.create(:ems_vmware)
host2 = FactoryGirl.create(:host)
vmb = FactoryGirl.create(:vm_vmware, :host => host2, :name => "b", :ext_management_system => ems)
vmc = FactoryGirl.create(:vm_vmware, :host => host2, :name => "c", :ext_management_system => ems)
report = MiqReport.new(:db => "Vm", :sortby => "name", :order => "Descending")
results, = report.paged_view_search(
:parent => host2,
:association => "vms",
:only => ["name"],
:page => 1,
:per_page => 2
)
names = results.data.collect(&:name)
expect(names).to eq [vmc.name, vmb.name]
end
it "user managed filters" do
vm1 = FactoryGirl.create(:vm_vmware)
vm1.tag_with("/managed/environment/prod", :ns => "*")
vm2 = FactoryGirl.create(:vm_vmware)
vm2.tag_with("/managed/environment/dev", :ns => "*")
user = FactoryGirl.create(:user_with_group)
group = user.current_group
allow(User).to receive_messages(:server_timezone => "UTC")
group.entitlement = Entitlement.new
group.entitlement.set_managed_filters([["/managed/environment/prod"]])
group.save!
report = MiqReport.new(:db => "Vm")
results, attrs = report.paged_view_search(
:only => ["name"],
:userid => user.userid,
)
expect(results.length).to eq 1
expect(results.data.collect(&:name)).to eq [vm1.name]
expect(report.table.length).to eq 1
expect(attrs[:apply_sortby_in_search]).to be_truthy
expect(attrs[:apply_limit_in_sql]).to be_truthy
expect(attrs[:auth_count]).to eq 1
expect(attrs[:user_filters]["managed"]).to eq [["/managed/environment/prod"]]
end
it "sortby, order, user filters, where sort column is in a sub-table" do
user = FactoryGirl.create(:user_with_group)
group = user.current_group
vm1 = FactoryGirl.create(:vm_vmware, :name => "VA", :storage => FactoryGirl.create(:storage, :name => "SA"))
vm2 = FactoryGirl.create(:vm_vmware, :name => "VB", :storage => FactoryGirl.create(:storage, :name => "SB"))
tag = "/managed/environment/prod"
group.entitlement = Entitlement.new
group.entitlement.set_managed_filters([[tag]])
group.save!
vm1.tag_with(tag, :ns => "*")
vm2.tag_with(tag, :ns => "*")
allow(User).to receive_messages(:server_timezone => "UTC")
report = MiqReport.new(:db => "Vm", :sortby => %w(storage.name name), :order => "Ascending", :include => {"storage" => {"columns" => ["name"]}})
options = {
:only => ["name", "storage.name"],
:userid => user.userid,
}
results, attrs = report.paged_view_search(options)
# Why do we need to check all of these things?
expect(results.length).to eq 2
expect(results.data.first["name"]).to eq "VA"
expect(results.data.first["storage.name"]).to eq "SA"
expect(report.table.length).to eq 2
expect(attrs[:apply_sortby_in_search]).to be_truthy
expect(attrs[:apply_limit_in_sql]).to be_truthy
expect(attrs[:auth_count]).to eq 2
expect(attrs[:user_filters]["managed"]).to eq [[tag]]
end
it "sorting on a virtual column" do
FactoryGirl.create(:vm_vmware, :name => "B", :host => FactoryGirl.create(:host, :name => "A"))
FactoryGirl.create(:vm_vmware, :name => "A", :host => FactoryGirl.create(:host, :name => "B"))
report = MiqReport.new(:db => "Vm", :sortby => %w(host_name name), :order => "Descending")
options = {
:only => %w(name host_name),
:page => 2,
}
results, _attrs = report.paged_view_search(options)
expect(results.length).to eq 2
expect(results.data.first["host_name"]).to eq "B"
end
it "expression filtering on a virtual column" do
FactoryGirl.create(:vm_vmware, :name => "VA", :host => FactoryGirl.create(:host, :name => "HA"))
FactoryGirl.create(:vm_vmware, :name => "VB", :host => FactoryGirl.create(:host, :name => "HB"))
report = MiqReport.new(:db => "Vm")
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
"=":
field: Vm-host_name
value: "HA"
'
results, _attrs = report.paged_view_search(:only => %w(name host_name), :filter => filter)
expect(results.length).to eq 1
expect(results.data.first["name"]).to eq "VA"
expect(results.data.first["host_name"]).to eq "HA"
end
it "expression filtering on a virtual column and user filters" do
user = FactoryGirl.create(:user_with_group)
group = user.current_group
_vm1 = FactoryGirl.create(:vm_vmware, :name => "VA", :host => FactoryGirl.create(:host, :name => "HA"))
vm2 = FactoryGirl.create(:vm_vmware, :name => "VB", :host => FactoryGirl.create(:host, :name => "HB"))
vm3 = FactoryGirl.create(:vm_vmware, :name => "VAA", :host => FactoryGirl.create(:host, :name => "HAA"))
tag = "/managed/environment/prod"
group.entitlement = Entitlement.new
group.entitlement.set_managed_filters([[tag]])
group.save!
# vm1's host.name starts with HA but isn't tagged
vm2.tag_with(tag, :ns => "*")
vm3.tag_with(tag, :ns => "*")
allow(User).to receive_messages(:server_timezone => "UTC")
report = MiqReport.new(:db => "Vm")
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
"starts with":
field: Vm-host_name
value: "HA"
'
results, attrs = report.paged_view_search(:only => %w(name host_name), :userid => user.userid, :filter => filter)
expect(results.length).to eq 1
expect(results.data.first["name"]).to eq "VAA"
expect(results.data.first["host_name"]).to eq "HAA"
expect(attrs[:user_filters]["managed"]).to eq [[tag]]
end
it "filtering on a virtual reflection" do
vm1 = FactoryGirl.create(:vm_vmware, :name => "VA")
vm2 = FactoryGirl.create(:vm_vmware, :name => "VB")
rp1 = FactoryGirl.create(:resource_pool, :name => "RPA")
rp2 = FactoryGirl.create(:resource_pool, :name => "RPB")
rp1.add_child(vm1)
rp2.add_child(vm2)
report = MiqReport.new(:db => "Vm")
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
"starts with":
field: Vm.parent_resource_pool-name
value: "RPA"
'
results, _attrs = report.paged_view_search(:only => %w(name), :filter => filter)
expect(results.length).to eq 1
expect(results.data.first["name"]).to eq "VA"
end
it "virtual columns included in cols" do
FactoryGirl.create(:vm_vmware, :host => FactoryGirl.create(:host, :name => "HA", :vmm_product => "ESX"))
FactoryGirl.create(:vm_vmware, :host => FactoryGirl.create(:host, :name => "HB", :vmm_product => "ESX"))
report = MiqReport.new(
:name => "VMs",
:title => "Virtual Machines",
:db => "Vm",
:cols => %w(name host_name v_host_vmm_product),
:include => {"host" => {"columns" => %w(name vmm_product)}},
:col_order => %w(name host.name host.vmm_product),
:headers => ["Name", "Host", "Host VMM Product"],
:order => "Ascending",
:sortby => ["host_name"],
)
options = {
:targets_hash => true,
:userid => "admin"
}
results, _attrs = report.paged_view_search(options)
expect(results.length).to eq 2
expect(results.data.collect { |rec| rec.data["host_name"] }).to eq(%w(HA HB))
expect(results.data.collect { |rec| rec.data["v_host_vmm_product"] }).to eq(%w(ESX ESX))
end
end
describe "#generate_table" do
it "with has_many through" do
ems = FactoryGirl.create(:ems_vmware_with_authentication)
user = FactoryGirl.create(:user_with_group)
group = user.current_group
template = FactoryGirl.create(:template_vmware, :ext_management_system => ems)
vm = FactoryGirl.create(:vm_vmware, :ext_management_system => ems)
hardware = FactoryGirl.create(:hardware, :vm => vm)
FactoryGirl.create(:disk, :hardware => hardware, :disk_type => "thin")
options = {
:vm_name => vm.name,
:vm_target_name => vm.name,
:provision_type => "vmware",
:src_vm_id => [template.id, template.name]
}
provision = FactoryGirl.create(
:miq_provision_vmware,
:destination => vm,
:source => template,
:userid => user.userid,
:request_type => 'template',
:state => 'finished',
:status => 'Ok',
:options => options
)
template.miq_provisions_from_template << provision
template.save
expect(template.miq_provision_vms.count).to be > 0
expect(template.miq_provision_vms.count(&:thin_provisioned)).to be > 0
report = MiqReport.create(
:name => "VMs based on Disk Type",
:title => "VMs using thin provisioned disks",
:rpt_group => "Custom",
:rpt_type => "Custom",
:db => "MiqTemplate",
:cols => [],
:include => {"miq_provision_vms" => {"columns" => ["name"]}},
:col_order => ["miq_provision_vms.name"],
:headers => ["Name"],
:template_type => "report",
:miq_group_id => group.id,
:user_id => user.userid,
:conditions => MiqExpression.new(
{"FIND" => {"search" => {"=" => {"field" => "MiqTemplate.miq_provision_vms-thin_provisioned", "value" => "true"}}, "checkall" => {"=" => {"field" => "MiqTemplate.miq_provision_vms-thin_provisioned", "value" => "true"}}}},
nil
)
)
report.generate_table
expect(report.table.data.collect { |rec| rec.data['miq_provision_vms.name'] }).to eq([vm.name])
end
let(:report) do
MiqReport.new(
:name => "All Departments with Performance", :title => "All Departments with Performance for last week",
:db => "VmPerformance",
:cols => %w(resource_name max_cpu_usage_rate_average cpu_usage_rate_average),
:include => {"vm" => {"columns" => ["v_annotation"]}, "host" => {"columns" => ["name"]}},
:col_order => ["ems_cluster.name", "vm.v_annotation", "host.name"],
:headers => ["Cluster", "VM Annotations - Notes", "Host Name"],
:order => "Ascending",
:group => "c",
:db_options => {:start_offset => 604_800, :end_offset => 0, :interval => interval},
:conditions => conditions)
end
context "daily reports" do
let(:interval) { "daily" }
context "with conditions where is joining with another table" do
let(:conditions) do
YAML.load '--- !ruby/object:MiqExpression
exp:
IS NOT EMPTY:
field: VmPerformance.host-name
context_type:'
end
it "should not raise an exception" do
expect do
report.generate_table(:userid => "admin",
:mode => "async",
:report_source => "Requested by user")
end.not_to raise_error
end
end
end
context "performance reports" do
let(:report) do
MiqReport.new(
:title => "vim_perf_daily.yaml",
:db => "VimPerformanceDaily",
:cols => %w(timestamp cpu_usagemhz_rate_average max_derived_cpu_available),
:include => { "metric_rollup" => {
"columns" => %w(cpu_usagemhz_rate_average_high_over_time_period
cpu_usagemhz_rate_average_low_over_time_period
derived_memory_used_high_over_time_period
derived_memory_used_low_over_time_period)}})
end
let(:ems) { FactoryGirl.create(:ems_vmware, :zone => @server.zone) }
it "runs report" do
report.generate_table(:userid => "admin")
end
end
context "Tenant Quota Report" do
include Spec::Support::QuotaHelper
let!(:tenant_without_quotas) { FactoryGirl.create(:tenant, :name=>"tenant_without_quotas") }
let(:skip_condition) do
YAML.load '--- !ruby/object:MiqExpression
exp:
">":
count: tenants.tenant_quotas
value: 0'
end
let(:report) do
include = {"tenant_quotas" => {"columns" => %w(name total used allocated available)}}
cols = ["name", "tenant_quotas.name", "tenant_quotas.total", "tenant_quotas.used", "tenant_quotas.allocated",
"tenant_quotas.available"]
headers = ["Tenant Name", "Quota Name", "Total Quota", "Total Quota", "In Use", "Allocated", "Available"]
FactoryGirl.create(:miq_report, :title => "Tenant Quotas", :order => 'Ascending', :rpt_group => "Custom",
:priority => 231, :rpt_type => 'Custom', :db => 'Tenant', :include => include, :cols => cols,
:col_order => cols, :template_type => "report", :headers => headers,
:conditions => skip_condition, :sortby => ["tenant_quotas.name"])
end
let(:user_admin) { FactoryGirl.create(:user, :role => "super_administrator") }
def generate_table_cell(formatted_value)
"<td style=\"text-align:right\">#{formatted_value}</td>"
end
def generate_html_row(is_even, tenant_name, formatted_values)
row = []
row << "<tr class='row#{is_even ? '0' : '1'}-nocursor'><td>#{tenant_name}</td>"
[:name, :total, :used, :allocated, :available].each do |metric|
row << generate_table_cell(formatted_values[metric])
end
row << "</tr>"
row.join
end
before do
setup_model
# dummy child tenant
FactoryGirl.create(:tenant, :parent => @tenant)
# remove quotas that QuotaHelper already initialized
@tenant.tenant_quotas = []
@tenant.tenant_quotas.create :name => :cpu_allocated, :value => 2
@tenant.tenant_quotas.create :name => :mem_allocated, :value => 4_294_967_296
@tenant.tenant_quotas.create :name => :storage_allocated, :value => 4_294_967_296
@tenant.tenant_quotas.create :name => :templates_allocated, :value => 4
@tenant.tenant_quotas.create :name => :vms_allocated, :value => 4
@expected_html_rows = []
formatted_values = {:name => "Allocated Virtual CPUs", :total => "2 Count", :used => "0 Count",
:allocated => "0 Count", :available => "2 Count"}
@expected_html_rows.push(generate_html_row(true, @tenant.name, formatted_values))
formatted_values = {:name => "Allocated Memory in GB", :total => "4.0 GB", :used => "1.0 GB",
:allocated => "0.0 GB", :available => "3.0 GB"}
@expected_html_rows.push(generate_html_row(false, @tenant.name, formatted_values))
formatted_values = {:name => "Allocated Storage in GB", :total => "4.0 GB",
:used => "#{1_000_000.0 / 1.gigabyte} GB", :allocated => "0.0 GB",
:available => "#{(4.gigabytes - 1_000_000.0) / 1.gigabyte} GB"}
@expected_html_rows.push(generate_html_row(true, @tenant.name, formatted_values))
formatted_values = {:name => "Allocated Number of Templates", :total => "4 Count", :used => "1 Count",
:allocated => "0 Count", :available => "3 Count"}
@expected_html_rows.push(generate_html_row(false, @tenant.name, formatted_values))
formatted_values = {:name => "Allocated Number of Virtual Machines", :total => "4 Count", :used => "1 Count",
:allocated => "0 Count", :available => "3 Count"}
@expected_html_rows.push(generate_html_row(true, @tenant.name, formatted_values))
User.current_user = user_admin
end
it "returns expected html outputs with formatted values" do
allow(User).to receive(:server_timezone).and_return("UTC")
report.generate_table
expect(report.build_html_rows).to match_array(@expected_html_rows)
end
it "returns only rows for tenant with any tenant_quotas" do
allow(User).to receive(:server_timezone).and_return("UTC")
report.generate_table
# 6th row would be for tenant_without_quotas, but skipped now because of skip_condition, so we expecting 5
expect(report.table.data.count).to eq(5)
end
end
end
describe ".ascending?" do
it "handles nil" do
report = MiqReport.new(:order => nil)
expect(report).to be_ascending
end
it "handles ascending" do
report = MiqReport.new(:order => "Ascending")
expect(report).to be_ascending
end
it "handles descending" do
report = MiqReport.new(:order => "Descending")
expect(report).not_to be_ascending
end
end
describe ".ascending=" do
it "handles nil" do
report = MiqReport.new
report.ascending = true
expect(report).to be_ascending
end
it "handles ascending" do
report = MiqReport.new
report.ascending = false
expect(report).not_to be_ascending
end
end
describe ".sort_col" do
it "uses sort_by if available" do
report = MiqReport.new(
:db => "Host",
:cols => %w(name hostname smart),
:col_order => %w(name hostname smart),
:sortby => ["hostname"]
)
expect(report.sort_col).to eq(1)
end
it "falls back to first column" do
report = MiqReport.new(
:db => "Host",
:cols => %w(name hostname smart),
:col_order => %w(name hostname smart),
)
expect(report.sort_col).to eq(0)
end
end
describe ".cols" do
it "loads given value" do
report = MiqReport.new(
:cols => %w(name)
)
expect(report.cols).to eq(%w(name))
end
it "falls back to col_order" do
report = MiqReport.new(
:col_order => %w(miq_custom_attributes.name miq_custom_attributes.value name)
)
expect(report.cols).to eq(%w(name))
end
it "allows manipulation" do
report = MiqReport.new(
:col_order => %w(miq_custom_attributes.name miq_custom_attributes.value name),
)
report.cols << "name2"
expect(report.cols).to eq(%w(name name2))
end
end
end
follow up tests and fixes for chargeback with custom attributes as columns
shared_examples "custom_report_with_custom_attributes" do |base_report, custom_attribute_field|
let(:options) { {:targets_hash => true, :userid => "admin"} }
let(:custom_attributes_field) { custom_attribute_field.to_s.pluralize }
before do
@user = FactoryGirl.create(:user_with_group)
# create custom attributes
@key = 'key1'
@value = 'value1'
@resource = base_report == "Host" ? FactoryGirl.create(:host) : FactoryGirl.create(:vm_vmware)
FactoryGirl.create(custom_attribute_field, :resource => @resource, :name => @key, :value => @value)
end
let(:report) do
MiqReport.new(
:name => "Custom VM report",
:title => "Custom VM report",
:rpt_group => "Custom",
:rpt_type => "Custom",
:db => base_report == "Host" ? "Host" : "ManageIQ::Providers::InfraManager::Vm",
:cols => %w(name),
:include => {custom_attributes_field.to_s => {"columns" => %w(name value)}},
:col_order => %w(miq_custom_attributes.name miq_custom_attributes.value name),
:headers => ["EVM Custom Attribute Name", "EVM Custom Attribute Value", "Name"],
:order => "Ascending",
:sortby => ["miq_custom_attributes.name"]
)
end
it "creates custom report based on #{base_report} with #{custom_attribute_field} field of custom attributes" do
expect { @results, _attrs = report.paged_view_search(options) }.not_to raise_error
custom_attributes_name = "#{custom_attributes_field}.name"
custom_attributes_value = "#{custom_attributes_field}.value"
expect(@results.data.first[custom_attributes_name]).to eq(@key)
expect(@results.data.first[custom_attributes_value]).to eq(@value)
end
end
describe MiqReport do
context "report with filtering in Registry" do
let(:options) { {:targets_hash => true, :userid => "admin"} }
let(:miq_task) { FactoryGirl.create(:miq_task) }
before do
@user = FactoryGirl.create(:user_with_group)
@registry = FactoryGirl.create(:registry_item, :name => "HKLM\\SOFTWARE\\WindowsFirewall : EnableFirewall",
:data => 0)
@vm = FactoryGirl.create(:vm_vmware, :registry_items => [@registry])
EvmSpecHelper.local_miq_server
end
let(:report) do
MiqReport.new(:name => "Custom VM report", :title => "Custom VM report", :rpt_group => "Custom",
:rpt_type => "Custom", :db => "Vm", :cols => %w(name),
:conditions => MiqExpression.new("=" => {"regkey" => "HKLM\\SOFTWARE\\WindowsFirewall",
"regval" => "EnableFirewall", "value" => "0"}),
:include => {"registry_items" => {"columns" => %w(data name value_name)}},
:col_order => %w(name registry_items.data registry_items.name registry_items.value_name),
:headers => ["Name", "Registry Data", "Registry Name", "Registry Value Name"],
:order => "Ascending")
end
it "can generate a report filtered by registry items" do
report.queue_generate_table(:userid => @user.userid)
report._async_generate_table(miq_task.id, :userid => @user.userid, :mode => "async",
:report_source => "Requested by user")
report_result = report.table.data.map do |x|
x.data.delete("id")
x.data
end
expect(report_result.count).to eq(1)
expect(report_result.first["name"]).to eq(@vm.name)
end
end
context "report with virtual dynamic custom attributes" do
let(:options) { {:targets_hash => true, :userid => "admin"} }
let(:custom_column_key_1) { 'kubernetes.io/hostname' }
let(:custom_column_key_2) { 'manageiq.org' }
let(:custom_column_key_3) { 'ATTR_Name_3' }
let(:custom_column_value) { 'value1' }
let(:user) { FactoryGirl.create(:user_with_group) }
let(:ems) { FactoryGirl.create(:ems_vmware) }
let!(:vm_1) { FactoryGirl.create(:vm_vmware) }
let!(:vm_2) { FactoryGirl.create(:vm_vmware, :retired => false, :ext_management_system => ems) }
let(:virtual_column_key_1) { "#{CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX}kubernetes.io/hostname" }
let(:virtual_column_key_2) { "#{CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX}manageiq.org" }
let(:virtual_column_key_3) { "#{CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX}ATTR_Name_3" }
let(:miq_task) { FactoryGirl.create(:miq_task) }
subject! do
FactoryGirl.create(:miq_custom_attribute, :resource => vm_1, :name => custom_column_key_1,
:value => custom_column_value)
FactoryGirl.create(:miq_custom_attribute, :resource => vm_2, :name => custom_column_key_2,
:value => custom_column_value)
FactoryGirl.create(:miq_custom_attribute, :resource => vm_2, :name => custom_column_key_3,
:value => custom_column_value)
end
before do
EvmSpecHelper.local_miq_server
end
let(:report) do
MiqReport.new(
:name => "Custom VM report", :title => "Custom VM report", :rpt_group => "Custom", :rpt_type => "Custom",
:db => "ManageIQ::Providers::InfraManager::Vm",
:cols => %w(name virtual_custom_attribute_kubernetes.io/hostname virtual_custom_attribute_manageiq.org),
:include => {:custom_attributes => {}},
:col_order => %w(name virtual_custom_attribute_kubernetes.io/hostname virtual_custom_attribute_manageiq.org),
:headers => ["Name", custom_column_key_1, custom_column_key_1],
:order => "Ascending"
)
end
it "generates report with dynamic custom attributes" do
report.queue_generate_table(:userid => user.userid)
report._async_generate_table(miq_task.id, :userid => user.userid, :mode => "async",
:report_source => "Requested by user")
report_result = report.table.data.map do |x|
x.data.delete("id")
x.data
end
expected_results = [{"name" => vm_1.name, virtual_column_key_1 => custom_column_value,
virtual_column_key_2 => nil},
{"name" => vm_2.name, virtual_column_key_1 => nil,
virtual_column_key_2 => custom_column_value}]
expect(report_result).to match_array(expected_results)
end
let(:exp) { MiqExpression.new("IS NOT EMPTY" => {"field" => "#{vm_1.type}-#{virtual_column_key_1}"}) }
it "generates report with dynamic custom attributes with MiqExpression filtering" do
report.conditions = exp
report.queue_generate_table(:userid => user.userid)
report._async_generate_table(miq_task.id, :userid => user.userid, :mode => "async",
:report_source => "Requested by user")
report_result = report.table.data.map do |x|
x.data.delete("id")
x.data
end
expected_results = ["name" => vm_1.name, virtual_column_key_1 => custom_column_value, virtual_column_key_2 => nil]
expect(report_result).to match_array(expected_results)
end
let(:exp_3) do
MiqExpression.new("and" => [{"=" => { "field" => "#{vm_2.type}-active", "value" => "true"}},
{"or" => [{"IS NOT EMPTY" => { "field" => "#{vm_2.type}-name", "value" => ""}},
{"IS NOT EMPTY" => { "field" => "#{vm_2.type}-#{virtual_column_key_3}"}}]}])
end
it "generates report with dynamic custom attributes with filtering with field which is not listed in cols" do
report.conditions = exp_3
report.queue_generate_table(:userid => user.userid)
report._async_generate_table(miq_task.id, :userid => user.userid, :mode => "async",
:report_source => "Requested by user")
report_result = report.table.data.map do |x|
x.data.delete("id")
x.data
end
expected_results = ["name" => vm_2.name, virtual_column_key_1 => nil, virtual_column_key_2 => custom_column_value]
expect(report_result).to match_array(expected_results)
end
end
context "Host and MiqCustomAttributes" do
include_examples "custom_report_with_custom_attributes", "Host", :miq_custom_attribute
end
context "Vm and MiqCustomAttributes" do
include_examples "custom_report_with_custom_attributes", "Vm", :miq_custom_attribute
end
context "Host and EmsCustomAttributes" do
include_examples "custom_report_with_custom_attributes", "Host", :ems_custom_attribute
end
context "Vm and EmsCustomAttributes" do
include_examples "custom_report_with_custom_attributes", "Vm", :ems_custom_attribute
end
it "attr_accessors are serializable via yaml" do
result = [{"id" => 5, "vmm_vendor" => "vmware", "vmm_vendor_display" => "VMware", "vmm_product" => "ESXi", "ipaddress" => "192.168.252.13", "vmm_buildnumber" => "260247", "vmm_version" => "4.1.0", "name" => "VI4ESXM1.manageiq.com"}, {"id" => 3, "vmm_vendor" => "VMware", "vmm_product" => "ESXi", "ipaddress" => "192.168.252.9", "vmm_buildnumber" => "348481", "vmm_version" => "4.1.0", "name" => "vi4esxm2.manageiq.com"}, {"id" => 4, "vmm_vendor" => "VMware", "vmm_product" => "ESX", "ipaddress" => "192.168.252.10", "vmm_buildnumber" => "502767", "vmm_version" => "4.1.0", "name" => "vi4esxm3.manageiq.com"}, {"id" => 1, "vmm_vendor" => "VMware", "vmm_product" => "ESXi", "ipaddress" => "192.168.252.4", "vmm_buildnumber" => "504850", "vmm_version" => "4.0.0", "name" => "per410a-t5.manageiq.com"}]
column_names = ["name", "ipaddress", "vmm_vendor", "vmm_vendor_display", "vmm_product", "vmm_version", "vmm_buildnumber", "id"]
fake_ruport_data_table = {:data => result, :column_names => column_names}
before = MiqReport.new
before.table = fake_ruport_data_table
after = YAML.load(YAML.dump(before))
expect(after.table).to eq(fake_ruport_data_table)
end
it '.get_expressions_by_model' do
FactoryGirl.create(:miq_report, :conditions => nil)
rep_nil = FactoryGirl.create(:miq_report)
# FIXME: find a way to do this in a factory
serialized_nil = "--- !!null \n...\n"
ActiveRecord::Base.connection.execute("update miq_reports set conditions='#{serialized_nil}' where id=#{rep_nil.id}")
rep_ok = FactoryGirl.create(:miq_report, :conditions => "SOMETHING")
reports = MiqReport.get_expressions_by_model('Vm')
expect(reports).to eq(rep_ok.name => rep_ok.id)
end
it "paged_view_search on vmdb_* tables" do
# Create EVM tables/indexes and hourly metric data...
table = FactoryGirl.create(:vmdb_table_evm, :name => "accounts")
index = FactoryGirl.create(:vmdb_index, :name => "accounts_pkey", :vmdb_table => table)
FactoryGirl.create(:vmdb_metric, :resource => index, :timestamp => Time.now.utc, :capture_interval_name => 'hourly', :size => 102, :rows => 102, :pages => 102, :wasted_bytes => 102, :percent_bloat => 102)
report_args = {
"db" => "VmdbIndex",
"cols" => ["name"],
"include" => {"vmdb_table" => {"columns" => ["type"]}, "latest_hourly_metric" => {"columns" => ["rows", "size", "wasted_bytes", "percent_bloat"]}},
"col_order" => ["name", "latest_hourly_metric.rows", "latest_hourly_metric.size", "latest_hourly_metric.wasted_bytes", "latest_hourly_metric.percent_bloat"],
"col_formats" => [nil, nil, :bytes_human, :bytes_human, nil],
}
report = MiqReport.new(report_args)
search_expression = MiqExpression.new("and" => [{"=" => {"value" => "VmdbTableEvm", "field" => "VmdbIndex.vmdb_table-type"}}])
results, = report.paged_view_search(:filter => search_expression)
expect(results.data.collect(&:data)).to eq(
[{
"name" => "accounts_pkey",
"vmdb_table.type" => "VmdbTableEvm",
"latest_hourly_metric.rows" => 102,
"latest_hourly_metric.size" => 102,
"latest_hourly_metric.wasted_bytes" => 102.0,
"latest_hourly_metric.percent_bloat" => 102.0,
"id" => index.id
}]
)
end
context "#paged_view_search" do
it "filters vms in folders" do
host = FactoryGirl.create(:host)
vm1 = FactoryGirl.create(:vm_vmware, :host => host)
allow(vm1).to receive(:archived?).and_return(false)
vm2 = FactoryGirl.create(:vm_vmware, :host => host)
allow(vm2).to receive(:archived?).and_return(false)
allow(Vm).to receive(:find_by).and_return(vm1)
root = FactoryGirl.create(:ems_folder, :name => "datacenters")
root.parent = host
usa = FactoryGirl.create(:ems_folder, :name => "usa")
usa.parent = root
nyc = FactoryGirl.create(:ems_folder, :name => "nyc")
nyc.parent = usa
vm1.with_relationship_type("ems_metadata") { vm1.parent = usa }
vm2.with_relationship_type("ems_metadata") { vm2.parent = nyc }
report = MiqReport.new(:db => "Vm")
results, = report.paged_view_search(:parent => usa)
expect(results.data.collect { |rec| rec.data['id'] }).to eq [vm1.id]
results, = report.paged_view_search(:parent => root)
expect(results.data.collect { |rec| rec.data['id'] }).to eq []
results, = report.paged_view_search(:parent => root, :association => :all_vms)
expect(results.data.collect { |rec| rec.data['id'] }).to match_array [vm1.id, vm2.id]
end
it "paging with order" do
vm1 = FactoryGirl.create(:vm_vmware)
vm2 = FactoryGirl.create(:vm_vmware)
ids = [vm1.id, vm2.id].sort
report = MiqReport.new(:db => "Vm", :sortby => "id", :order => "Descending")
results, = report.paged_view_search(:page => 2, :per_page => 1)
found_ids = results.data.collect { |rec| rec.data['id'] }
expect(found_ids).to eq [ids.first]
end
it "target_ids_for_paging caches results" do
vm = FactoryGirl.create(:vm_vmware)
FactoryGirl.create(:vm_vmware)
report = MiqReport.new(:db => "Vm")
report.extras = {:target_ids_for_paging => [vm.id], :attrs_for_paging => {}}
results, = report.paged_view_search(:page => 1, :per_page => 10)
found_ids = results.data.collect { |rec| rec.data['id'] }
expect(found_ids).to eq [vm.id]
end
it "VMs under Host with order" do
host1 = FactoryGirl.create(:host)
FactoryGirl.create(:vm_vmware, :host => host1, :name => "a")
ems = FactoryGirl.create(:ems_vmware)
host2 = FactoryGirl.create(:host)
vmb = FactoryGirl.create(:vm_vmware, :host => host2, :name => "b", :ext_management_system => ems)
vmc = FactoryGirl.create(:vm_vmware, :host => host2, :name => "c", :ext_management_system => ems)
report = MiqReport.new(:db => "Vm", :sortby => "name", :order => "Descending")
results, = report.paged_view_search(
:parent => host2,
:association => "vms",
:only => ["name"],
:page => 1,
:per_page => 2
)
names = results.data.collect(&:name)
expect(names).to eq [vmc.name, vmb.name]
end
it "user managed filters" do
vm1 = FactoryGirl.create(:vm_vmware)
vm1.tag_with("/managed/environment/prod", :ns => "*")
vm2 = FactoryGirl.create(:vm_vmware)
vm2.tag_with("/managed/environment/dev", :ns => "*")
user = FactoryGirl.create(:user_with_group)
group = user.current_group
allow(User).to receive_messages(:server_timezone => "UTC")
group.entitlement = Entitlement.new
group.entitlement.set_managed_filters([["/managed/environment/prod"]])
group.save!
report = MiqReport.new(:db => "Vm")
results, attrs = report.paged_view_search(
:only => ["name"],
:userid => user.userid,
)
expect(results.length).to eq 1
expect(results.data.collect(&:name)).to eq [vm1.name]
expect(report.table.length).to eq 1
expect(attrs[:apply_sortby_in_search]).to be_truthy
expect(attrs[:apply_limit_in_sql]).to be_truthy
expect(attrs[:auth_count]).to eq 1
expect(attrs[:user_filters]["managed"]).to eq [["/managed/environment/prod"]]
end
it "sortby, order, user filters, where sort column is in a sub-table" do
user = FactoryGirl.create(:user_with_group)
group = user.current_group
vm1 = FactoryGirl.create(:vm_vmware, :name => "VA", :storage => FactoryGirl.create(:storage, :name => "SA"))
vm2 = FactoryGirl.create(:vm_vmware, :name => "VB", :storage => FactoryGirl.create(:storage, :name => "SB"))
tag = "/managed/environment/prod"
group.entitlement = Entitlement.new
group.entitlement.set_managed_filters([[tag]])
group.save!
vm1.tag_with(tag, :ns => "*")
vm2.tag_with(tag, :ns => "*")
allow(User).to receive_messages(:server_timezone => "UTC")
report = MiqReport.new(:db => "Vm", :sortby => %w(storage.name name), :order => "Ascending", :include => {"storage" => {"columns" => ["name"]}})
options = {
:only => ["name", "storage.name"],
:userid => user.userid,
}
results, attrs = report.paged_view_search(options)
# Why do we need to check all of these things?
expect(results.length).to eq 2
expect(results.data.first["name"]).to eq "VA"
expect(results.data.first["storage.name"]).to eq "SA"
expect(report.table.length).to eq 2
expect(attrs[:apply_sortby_in_search]).to be_truthy
expect(attrs[:apply_limit_in_sql]).to be_truthy
expect(attrs[:auth_count]).to eq 2
expect(attrs[:user_filters]["managed"]).to eq [[tag]]
end
it "sorting on a virtual column" do
FactoryGirl.create(:vm_vmware, :name => "B", :host => FactoryGirl.create(:host, :name => "A"))
FactoryGirl.create(:vm_vmware, :name => "A", :host => FactoryGirl.create(:host, :name => "B"))
report = MiqReport.new(:db => "Vm", :sortby => %w(host_name name), :order => "Descending")
options = {
:only => %w(name host_name),
:page => 2,
}
results, _attrs = report.paged_view_search(options)
expect(results.length).to eq 2
expect(results.data.first["host_name"]).to eq "B"
end
it "expression filtering on a virtual column" do
FactoryGirl.create(:vm_vmware, :name => "VA", :host => FactoryGirl.create(:host, :name => "HA"))
FactoryGirl.create(:vm_vmware, :name => "VB", :host => FactoryGirl.create(:host, :name => "HB"))
report = MiqReport.new(:db => "Vm")
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
"=":
field: Vm-host_name
value: "HA"
'
results, _attrs = report.paged_view_search(:only => %w(name host_name), :filter => filter)
expect(results.length).to eq 1
expect(results.data.first["name"]).to eq "VA"
expect(results.data.first["host_name"]).to eq "HA"
end
it "expression filtering on a virtual column and user filters" do
user = FactoryGirl.create(:user_with_group)
group = user.current_group
_vm1 = FactoryGirl.create(:vm_vmware, :name => "VA", :host => FactoryGirl.create(:host, :name => "HA"))
vm2 = FactoryGirl.create(:vm_vmware, :name => "VB", :host => FactoryGirl.create(:host, :name => "HB"))
vm3 = FactoryGirl.create(:vm_vmware, :name => "VAA", :host => FactoryGirl.create(:host, :name => "HAA"))
tag = "/managed/environment/prod"
group.entitlement = Entitlement.new
group.entitlement.set_managed_filters([[tag]])
group.save!
# vm1's host.name starts with HA but isn't tagged
vm2.tag_with(tag, :ns => "*")
vm3.tag_with(tag, :ns => "*")
allow(User).to receive_messages(:server_timezone => "UTC")
report = MiqReport.new(:db => "Vm")
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
"starts with":
field: Vm-host_name
value: "HA"
'
results, attrs = report.paged_view_search(:only => %w(name host_name), :userid => user.userid, :filter => filter)
expect(results.length).to eq 1
expect(results.data.first["name"]).to eq "VAA"
expect(results.data.first["host_name"]).to eq "HAA"
expect(attrs[:user_filters]["managed"]).to eq [[tag]]
end
it "filtering on a virtual reflection" do
vm1 = FactoryGirl.create(:vm_vmware, :name => "VA")
vm2 = FactoryGirl.create(:vm_vmware, :name => "VB")
rp1 = FactoryGirl.create(:resource_pool, :name => "RPA")
rp2 = FactoryGirl.create(:resource_pool, :name => "RPB")
rp1.add_child(vm1)
rp2.add_child(vm2)
report = MiqReport.new(:db => "Vm")
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
"starts with":
field: Vm.parent_resource_pool-name
value: "RPA"
'
results, _attrs = report.paged_view_search(:only => %w(name), :filter => filter)
expect(results.length).to eq 1
expect(results.data.first["name"]).to eq "VA"
end
it "virtual columns included in cols" do
FactoryGirl.create(:vm_vmware, :host => FactoryGirl.create(:host, :name => "HA", :vmm_product => "ESX"))
FactoryGirl.create(:vm_vmware, :host => FactoryGirl.create(:host, :name => "HB", :vmm_product => "ESX"))
report = MiqReport.new(
:name => "VMs",
:title => "Virtual Machines",
:db => "Vm",
:cols => %w(name host_name v_host_vmm_product),
:include => {"host" => {"columns" => %w(name vmm_product)}},
:col_order => %w(name host.name host.vmm_product),
:headers => ["Name", "Host", "Host VMM Product"],
:order => "Ascending",
:sortby => ["host_name"],
)
options = {
:targets_hash => true,
:userid => "admin"
}
results, _attrs = report.paged_view_search(options)
expect(results.length).to eq 2
expect(results.data.collect { |rec| rec.data["host_name"] }).to eq(%w(HA HB))
expect(results.data.collect { |rec| rec.data["v_host_vmm_product"] }).to eq(%w(ESX ESX))
end
end
describe "#generate_table" do
it "with has_many through" do
ems = FactoryGirl.create(:ems_vmware_with_authentication)
user = FactoryGirl.create(:user_with_group)
group = user.current_group
template = FactoryGirl.create(:template_vmware, :ext_management_system => ems)
vm = FactoryGirl.create(:vm_vmware, :ext_management_system => ems)
hardware = FactoryGirl.create(:hardware, :vm => vm)
FactoryGirl.create(:disk, :hardware => hardware, :disk_type => "thin")
options = {
:vm_name => vm.name,
:vm_target_name => vm.name,
:provision_type => "vmware",
:src_vm_id => [template.id, template.name]
}
provision = FactoryGirl.create(
:miq_provision_vmware,
:destination => vm,
:source => template,
:userid => user.userid,
:request_type => 'template',
:state => 'finished',
:status => 'Ok',
:options => options
)
template.miq_provisions_from_template << provision
template.save
expect(template.miq_provision_vms.count).to be > 0
expect(template.miq_provision_vms.count(&:thin_provisioned)).to be > 0
report = MiqReport.create(
:name => "VMs based on Disk Type",
:title => "VMs using thin provisioned disks",
:rpt_group => "Custom",
:rpt_type => "Custom",
:db => "MiqTemplate",
:cols => [],
:include => {"miq_provision_vms" => {"columns" => ["name"]}},
:col_order => ["miq_provision_vms.name"],
:headers => ["Name"],
:template_type => "report",
:miq_group_id => group.id,
:user_id => user.userid,
:conditions => MiqExpression.new(
{"FIND" => {"search" => {"=" => {"field" => "MiqTemplate.miq_provision_vms-thin_provisioned", "value" => "true"}}, "checkall" => {"=" => {"field" => "MiqTemplate.miq_provision_vms-thin_provisioned", "value" => "true"}}}},
nil
)
)
report.generate_table
expect(report.table.data.collect { |rec| rec.data['miq_provision_vms.name'] }).to eq([vm.name])
end
let(:report) do
MiqReport.new(
:name => "All Departments with Performance", :title => "All Departments with Performance for last week",
:db => "VmPerformance",
:cols => %w(resource_name max_cpu_usage_rate_average cpu_usage_rate_average),
:include => {"vm" => {"columns" => ["v_annotation"]}, "host" => {"columns" => ["name"]}},
:col_order => ["ems_cluster.name", "vm.v_annotation", "host.name"],
:headers => ["Cluster", "VM Annotations - Notes", "Host Name"],
:order => "Ascending",
:group => "c",
:db_options => {:start_offset => 604_800, :end_offset => 0, :interval => interval},
:conditions => conditions)
end
context "daily reports" do
let(:interval) { "daily" }
context "with conditions where is joining with another table" do
let(:conditions) do
YAML.load '--- !ruby/object:MiqExpression
exp:
IS NOT EMPTY:
field: VmPerformance.host-name
context_type:'
end
it "should not raise an exception" do
expect do
report.generate_table(:userid => "admin",
:mode => "async",
:report_source => "Requested by user")
end.not_to raise_error
end
end
end
context "performance reports" do
let(:report) do
MiqReport.new(
:title => "vim_perf_daily.yaml",
:db => "VimPerformanceDaily",
:cols => %w(timestamp cpu_usagemhz_rate_average max_derived_cpu_available),
:include => { "metric_rollup" => {
"columns" => %w(cpu_usagemhz_rate_average_high_over_time_period
cpu_usagemhz_rate_average_low_over_time_period
derived_memory_used_high_over_time_period
derived_memory_used_low_over_time_period)}})
end
let(:ems) { FactoryGirl.create(:ems_vmware, :zone => @server.zone) }
it "runs report" do
report.generate_table(:userid => "admin")
end
end
context "Tenant Quota Report" do
include Spec::Support::QuotaHelper
let!(:tenant_without_quotas) { FactoryGirl.create(:tenant, :name=>"tenant_without_quotas") }
let(:skip_condition) do
YAML.load '--- !ruby/object:MiqExpression
exp:
">":
count: tenants.tenant_quotas
value: 0'
end
let(:report) do
include = {"tenant_quotas" => {"columns" => %w(name total used allocated available)}}
cols = ["name", "tenant_quotas.name", "tenant_quotas.total", "tenant_quotas.used", "tenant_quotas.allocated",
"tenant_quotas.available"]
headers = ["Tenant Name", "Quota Name", "Total Quota", "Total Quota", "In Use", "Allocated", "Available"]
FactoryGirl.create(:miq_report, :title => "Tenant Quotas", :order => 'Ascending', :rpt_group => "Custom",
:priority => 231, :rpt_type => 'Custom', :db => 'Tenant', :include => include, :cols => cols,
:col_order => cols, :template_type => "report", :headers => headers,
:conditions => skip_condition, :sortby => ["tenant_quotas.name"])
end
let(:user_admin) { FactoryGirl.create(:user, :role => "super_administrator") }
def generate_table_cell(formatted_value)
"<td style=\"text-align:right\">#{formatted_value}</td>"
end
def generate_html_row(is_even, tenant_name, formatted_values)
row = []
row << "<tr class='row#{is_even ? '0' : '1'}-nocursor'><td>#{tenant_name}</td>"
[:name, :total, :used, :allocated, :available].each do |metric|
row << generate_table_cell(formatted_values[metric])
end
row << "</tr>"
row.join
end
before do
setup_model
# dummy child tenant
FactoryGirl.create(:tenant, :parent => @tenant)
# remove quotas that QuotaHelper already initialized
@tenant.tenant_quotas = []
@tenant.tenant_quotas.create :name => :cpu_allocated, :value => 2
@tenant.tenant_quotas.create :name => :mem_allocated, :value => 4_294_967_296
@tenant.tenant_quotas.create :name => :storage_allocated, :value => 4_294_967_296
@tenant.tenant_quotas.create :name => :templates_allocated, :value => 4
@tenant.tenant_quotas.create :name => :vms_allocated, :value => 4
@expected_html_rows = []
formatted_values = {:name => "Allocated Virtual CPUs", :total => "2 Count", :used => "0 Count",
:allocated => "0 Count", :available => "2 Count"}
@expected_html_rows.push(generate_html_row(true, @tenant.name, formatted_values))
formatted_values = {:name => "Allocated Memory in GB", :total => "4.0 GB", :used => "1.0 GB",
:allocated => "0.0 GB", :available => "3.0 GB"}
@expected_html_rows.push(generate_html_row(false, @tenant.name, formatted_values))
formatted_values = {:name => "Allocated Storage in GB", :total => "4.0 GB",
:used => "#{1_000_000.0 / 1.gigabyte} GB", :allocated => "0.0 GB",
:available => "#{(4.gigabytes - 1_000_000.0) / 1.gigabyte} GB"}
@expected_html_rows.push(generate_html_row(true, @tenant.name, formatted_values))
formatted_values = {:name => "Allocated Number of Templates", :total => "4 Count", :used => "1 Count",
:allocated => "0 Count", :available => "3 Count"}
@expected_html_rows.push(generate_html_row(false, @tenant.name, formatted_values))
formatted_values = {:name => "Allocated Number of Virtual Machines", :total => "4 Count", :used => "1 Count",
:allocated => "0 Count", :available => "3 Count"}
@expected_html_rows.push(generate_html_row(true, @tenant.name, formatted_values))
User.current_user = user_admin
end
it "returns expected html outputs with formatted values" do
allow(User).to receive(:server_timezone).and_return("UTC")
report.generate_table
expect(report.build_html_rows).to match_array(@expected_html_rows)
end
it "returns only rows for tenant with any tenant_quotas" do
allow(User).to receive(:server_timezone).and_return("UTC")
report.generate_table
# 6th row would be for tenant_without_quotas, but skipped now because of skip_condition, so we expecting 5
expect(report.table.data.count).to eq(5)
end
end
end
describe ".ascending?" do
it "handles nil" do
report = MiqReport.new(:order => nil)
expect(report).to be_ascending
end
it "handles ascending" do
report = MiqReport.new(:order => "Ascending")
expect(report).to be_ascending
end
it "handles descending" do
report = MiqReport.new(:order => "Descending")
expect(report).not_to be_ascending
end
end
describe ".ascending=" do
it "handles nil" do
report = MiqReport.new
report.ascending = true
expect(report).to be_ascending
end
it "handles ascending" do
report = MiqReport.new
report.ascending = false
expect(report).not_to be_ascending
end
end
describe ".sort_col" do
it "uses sort_by if available" do
report = MiqReport.new(
:db => "Host",
:cols => %w(name hostname smart),
:col_order => %w(name hostname smart),
:sortby => ["hostname"]
)
expect(report.sort_col).to eq(1)
end
it "falls back to first column" do
report = MiqReport.new(
:db => "Host",
:cols => %w(name hostname smart),
:col_order => %w(name hostname smart),
)
expect(report.sort_col).to eq(0)
end
end
describe ".cols" do
it "loads given value" do
report = MiqReport.new(
:cols => %w(name)
)
expect(report.cols).to eq(%w(name))
end
it "falls back to col_order" do
report = MiqReport.new(
:col_order => %w(miq_custom_attributes.name miq_custom_attributes.value name)
)
expect(report.cols).to eq(%w(name))
end
it "allows manipulation" do
report = MiqReport.new(
:col_order => %w(miq_custom_attributes.name miq_custom_attributes.value name),
)
report.cols << "name2"
expect(report.cols).to eq(%w(name name2))
end
end
context "chargeback reports" do
let(:hourly_rate) { 0.01 }
let(:hourly_variable_tier_rate) { {:variable_rate => hourly_rate.to_s} }
let(:detail_params) { {:chargeback_rate_detail_fixed_compute_cost => { :tiers => [hourly_variable_tier_rate] } } }
let!(:chargeback_rate) do
FactoryGirl.create(:chargeback_rate, :detail_params => detail_params)
end
let(:report_params) do
{
:rpt_group => "Custom",
:rpt_type => "Custom",
:include => { :custom_attributes => {} },
:group => "y",
:template_type => "report",
}
end
before do
MiqRegion.seed
ChargebackRateDetailMeasure.seed
ChargeableField.seed
ChargebackRate.seed
EvmSpecHelper.create_guid_miq_server_zone
end
context "chargeback based on container images" do
let(:label_name) { "version" }
let(:label_value) { "1.0.0" }
let(:label) { FactoryGirl.build(:custom_attribute, :name => label_name, :value => label_value, :section => 'docker_labels') }
let(:label_report_column) { "virtual_custom_attribute_#{label_name}" }
let(:report) do
MiqReport.new(
report_params.merge(
:db => "ChargebackContainerImage",
:cols => ["start_date", "display_range", "project_name", "image_name", label_report_column],
:col_order => ["project_name", "image_name", "display_range", label_report_column],
:headers => ["Project Name", "Image Name", "Date Range", nil],
:sortby => ["project_name", "image_name", "start_date"],
:db_options => { :rpt_type => "ChargebackContainerImage",
:options => { :interval => "daily",
:interval_size => 28,
:end_interval_offset => 1,
:provider_id => "all",
:entity_id => "all",
:include_metrics => true,
:groupby => "date",
:groupby_tag => nil }},
:col_options => ChargebackContainerImage.report_col_options
)
)
end
it "runs a report with a custom attribute" do
ems = FactoryGirl.create(:ems_openshift)
image = FactoryGirl.create(:container_image, :ext_management_system => ems)
image.docker_labels << label
project_name = "my project"
project = FactoryGirl.create(:container_project, :name => project_name, :ext_management_system => ems)
group = FactoryGirl.create(:container_group, :ext_management_system => ems, :container_project => project)
container = FactoryGirl.create(:kubernetes_container, :container_group => group, :container_image => image)
container.metric_rollups << FactoryGirl.create(:metric_rollup_vm_hr,
:with_data,
:timestamp => 1.day.ago,
:resource_id => container.id,
:resource_name => container.name,
:parent_ems_id => ems.id,
:tag_names => "")
ChargebackRate.set_assignments(:compute, [{ :cb_rate => chargeback_rate, :label => [label, "container_image"] }])
rpt = report.generate_table(:userid => "admin")
row = rpt[project_name][:row]
expect(row[label_report_column]).to eq(label_value)
end
end
context "chargeback based on container projects" do
let(:label_name) { "version" }
let(:label_value) { "1.0.0" }
let(:label) { FactoryGirl.build(:custom_attribute, :name => label_name, :value => label_value, :section => 'labels') }
let(:label_report_column) { "virtual_custom_attribute_#{label_name}" }
let(:report) do
MiqReport.new(
report_params.merge(
:db => "ChargebackContainerProject",
:cols => ["start_date", "display_range", "project_name", label_report_column],
:col_order => ["project_name", "display_range", label_report_column],
:headers => ["Project Name", "Date Range", nil],
:sortby => ["project_name", "start_date"],
:db_options => {:rpt_type => "ChargebackContainerProject",
:options => { :interval => "daily",
:interval_size => 28,
:end_interval_offset => 1,
:provider_id => "all",
:entity_id => "all",
:include_metrics => true,
:groupby => "date",
:groupby_tag => nil }},
:col_options => ChargebackContainerProject.report_col_options
)
)
end
it "runs a report with a custom attribute" do
ems = FactoryGirl.create(:ems_openshift)
project_name = "my project"
project = FactoryGirl.create(:container_project, :name => project_name, :ext_management_system => ems, :created_on => 2.days.ago)
project.labels << label
project.metric_rollups << FactoryGirl.create(:metric_rollup_vm_hr,
:with_data,
:timestamp => 1.day.ago,
:resource_id => project.id,
:resource_name => project.name,
:parent_ems_id => ems.id,
:tag_names => "")
ChargebackRate.set_assignments(:compute, [{ :cb_rate => chargeback_rate, :object => ems }])
rpt = report.generate_table(:userid => "admin")
row = rpt[project_name][:row]
expect(row[label_report_column]).to eq(label_value)
end
end
context "chargeback based on vms" do
let(:label_name) { "version" }
let(:label_value) { "1.0.0" }
let(:label) { FactoryGirl.build(:custom_attribute, :name => label_name, :value => label_value, :section => 'labels') }
let(:label_report_column) { "virtual_custom_attribute_#{label_name}" }
let(:report) do
MiqReport.new(
report_params.merge(
:db => "ChargebackVm",
:cols => ["start_date", "display_range", "vm_name", label_report_column],
:col_order => ["vm_name", "display_range", label_report_column],
:headers => ["Vm Name", "Date Range", nil],
:sortby => ["vm_name", "start_date"],
:db_options => {:rpt_type => "ChargebackVm",
:options => { :interval => "daily",
:interval_size => 28,
:end_interval_offset => 1,
:provider_id => "all",
:entity_id => "all",
:include_metrics => true,
:groupby => "date",
:groupby_tag => nil,
:tag => '/managed/environment/prod'}},
:col_options => ChargebackVm.report_col_options
)
)
end
it "runs a report with a custom attribute" do
ems = FactoryGirl.create(:ems_vmware)
cat = FactoryGirl.create(:classification, :description => "Environment", :name => "environment", :single_value => true, :show => true)
c = FactoryGirl.create(:classification, :name => "prod", :description => "Production", :parent_id => cat.id)
tag = Tag.find_by(:name => "/managed/environment/prod")
temp = {:cb_rate => chargeback_rate, :tag => [c, "vm"]}
ChargebackRate.set_assignments(:compute, [temp])
vm_name = "test_vm"
vm1 = FactoryGirl.create(:vm_vmware, :name => vm_name, :evm_owner => FactoryGirl.create(:user_admin), :ems_ref => "ems_ref",
:created_on => 2.days.ago)
vm1.tag_with(tag.name, :ns => '*')
vm1.labels << label
host1 = FactoryGirl.create(:host, :hardware => FactoryGirl.create(:hardware, :memory_mb => 8124, :cpu_total_cores => 1, :cpu_speed => 9576), :vms => [vm1])
storage = FactoryGirl.create(:storage_target_vmware)
host1.storages << storage
ems_cluster = FactoryGirl.create(:ems_cluster, :ext_management_system => ems)
ems_cluster.hosts << host1
vm1.metric_rollups << FactoryGirl.create(:metric_rollup_vm_hr,
:with_data,
:timestamp => 1.day.ago,
:resource_id => vm1.id,
:resource_name => vm1.name,
:tag_names => "environment/prod",
:parent_host_id => host1.id,
:parent_ems_cluster_id => ems_cluster.id,
:parent_ems_id => ems.id,
:parent_storage_id => storage.id)
rpt = report.generate_table(:userid => "admin")
row = rpt[vm_name][:row]
expect(row[label_report_column]).to eq(label_value)
end
end
end
end
|
#class SearchableDummyModel < ActiveRecord::Base
# def self.columns() @columns ||= []; end
#
# def self.column(name, sql_type=nil, default=nil, null=true)
# columns << ActiveRecord::ConnectionAdapters::Column.new(name.to_s, default, sql_type.to_s, null)
# end
#
# column :email, :string
# column :name, :string
#end
module SearchableTestModels
module Nothing
end
end
module Walkable
extend ActiveSupport::Concern
def self.included(base)
def has_pavement?; 'paved' end
end
module ClassMethods
def has_sidewalk?; 'luke sidewalker' end
end
end
class Strasse
def district=(val); @district=val end
def district; @district end
def name=(val); @name=val end
def name; @name end
def self.type; 'street' end
end
class Address < Strasse
def initialize(a,b)
@name=a
@number=b
end
def number=(val); @number=val end
def number; @number end
def to_s; "Address is #{name} #{number}, #{district}" end
end
describe 'Searchability' do
before(:each) do
stub_const 'Dummy', Class.new(ActiveRecord::Base)
Dummy.class_eval do
def self.columns() @columns ||= []; end
def self.column(name, sql_type=nil, default=nil, null=true)
columns << ActiveRecord::ConnectionAdapters::Column.new(name.to_s, default, sql_type.to_s, null)
end
column :email, :string
column :name, :string
end
end
context 'on a dummy model' do
before(:each) do
Dummy.class_eval do
include Searchengine::Concerns::Models::Searchable
end
# SearchableDummyModel.class_eval do
# # searchable as: 'Dumdum' do |dum| # `as: 'Dumdum'` part is optional
# # def greet; 'hi'
# # field: :email, type: 'email'
# # field: :name # defaults to string
# # end
# end
# stub_const 'Dummy', SearchableDummyModel
end
it 'exposes the searchability descriptor' do
expect(Dummy).to respond_to(:searchable_as)
end
context "sets the searchindex name" do
it 'to the default name on #searchable' do
expect{
Dummy.searchable { p 'hi'}
}.to change{
Dummy.search_index_name
}.from(nil).to include("#{Dummy.name}Index")
end
it 'to the specified name' do
expect{
Dummy.searchable_as('Attrappe') { p 'ho' }
}.to change{
Dummy.search_index_name
}.from(nil).to include('AttrappeIndex')
end
end
it 'responds to #email' do
expect(Dummy.new).to respond_to(:email)
end
it 'has a model that respects the concerns' do
Strasse.class_eval do
include Walkable
end
expect(Strasse).to respond_to :has_sidewalk?
expect(Strasse).not_to respond_to :has_pavement?
expect(Strasse.new).to respond_to :has_pavement?
end
it 'interrogates objects' do
old_address = 'Eichendorffstraße 18'
new_address = 'Novalisstraße 12'
klass = Class.new(Strasse)
klass.class_eval do
def initialize(a,b)
@name=a
@number=b
end
def to_s; "Adresse est #{name} #{@number}, #{district}" end
end
first = Address.new(*old_address.split)
expect(first.class.superclass).to equal(Strasse)
expect(first.number).to eq(old_address.split.last)
expect(first.name).to eq(old_address.split.first)
expect(first.class.type).to eq(Strasse.type)
second = klass.new(*new_address.split)
expect(second.class.superclass).to equal(Strasse)
expect(second.name).to eq(new_address.split.first)
expect(second.class.type).to eq(Strasse.type)
end
it 'is named' do
SearchableTestModels.const_set("Dummy", Class.new(Strasse))
#p SearchableTestModels::Dummy
end
end
end
Dummy testing the Chewy::Index#descendants
#class SearchableDummyModel < ActiveRecord::Base
# def self.columns() @columns ||= []; end
#
# def self.column(name, sql_type=nil, default=nil, null=true)
# columns << ActiveRecord::ConnectionAdapters::Column.new(name.to_s, default, sql_type.to_s, null)
# end
#
# column :email, :string
# column :name, :string
#end
module SearchableTestModels
module Nothing
end
end
module Walkable
extend ActiveSupport::Concern
def self.included(base)
def has_pavement?; 'paved' end
end
module ClassMethods
def has_sidewalk?; 'luke sidewalker' end
end
end
class Strasse
def district=(val); @district=val end
def district; @district end
def name=(val); @name=val end
def name; @name end
def self.type; 'street' end
end
class Address < Strasse
def initialize(a,b)
@name=a
@number=b
end
def number=(val); @number=val end
def number; @number end
def to_s; "Address is #{name} #{number}, #{district}" end
end
describe 'Searchability' do
before(:each) do
stub_const 'Dummy', Class.new(ActiveRecord::Base)
Dummy.class_eval do
def self.columns() @columns ||= []; end
def self.column(name, sql_type=nil, default=nil, null=true)
columns << ActiveRecord::ConnectionAdapters::Column.new(name.to_s, default, sql_type.to_s, null)
end
column :email, :string
column :name, :string
end
end
context 'on a dummy model' do
before(:each) do
Dummy.class_eval do
include Searchengine::Concerns::Models::Searchable
end
# SearchableDummyModel.class_eval do
# # searchable as: 'Dumdum' do |dum| # `as: 'Dumdum'` part is optional
# # def greet; 'hi'
# # field: :email, type: 'email'
# # field: :name # defaults to string
# # end
# end
# stub_const 'Dummy', SearchableDummyModel
end
it 'exposes the searchability descriptor' do
expect(Dummy).to respond_to(:searchable_as)
end
context "sets the searchindex name" do
it 'to the default name on #searchable' do
expect{
Dummy.searchable { p 'hi'}
}.to change{
Dummy.search_index_name
}.from(nil).to include("#{Dummy.name}Index")
end
it 'to the specified name' do
expect{
Dummy.searchable_as('Attrappe') { p 'ho' }
}.to change{
Dummy.search_index_name
}.from(nil).to include('AttrappeIndex')
end
end
context 'with an index' do
it 'ensures the index is known to Chewy' do
Dummy.searchable { }
p Chewy::Index.descendants.map { |d| d.name }
expect(Chewy::Index.descendants).to include(Dummy.search_index)
end
end
it 'responds to #email' do
expect(Dummy.new).to respond_to(:email)
end
it 'has a model that respects the concerns' do
Strasse.class_eval do
include Walkable
end
expect(Strasse).to respond_to :has_sidewalk?
expect(Strasse).not_to respond_to :has_pavement?
expect(Strasse.new).to respond_to :has_pavement?
end
it 'interrogates objects' do
old_address = 'Eichendorffstraße 18'
new_address = 'Novalisstraße 12'
klass = Class.new(Strasse)
klass.class_eval do
def initialize(a,b)
@name=a
@number=b
end
def to_s; "Adresse est #{name} #{@number}, #{district}" end
end
first = Address.new(*old_address.split)
expect(first.class.superclass).to equal(Strasse)
expect(first.number).to eq(old_address.split.last)
expect(first.name).to eq(old_address.split.first)
expect(first.class.type).to eq(Strasse.type)
second = klass.new(*new_address.split)
expect(second.class.superclass).to equal(Strasse)
expect(second.name).to eq(new_address.split.first)
expect(second.class.type).to eq(Strasse.type)
end
end
end
|
require 'spec_helper'
describe ObservableObject do
let(:event_obj_list) { Array.new }
let(:event) { Proc.new { |obj| event_obj_list << obj } }
it 'has version (smoke test)' do
expect(ObservableObject::VERSION).to be_a(String)
end
it 'returns the object itself for unwrappable objects' do
[:symbol, 1, 1.0, true, false, nil].each do |obj|
expect(ObservableObject.wrap(obj).__id__).to eq(obj.__id__)
expect(ObservableObject.deep_wrap(obj).__id__).to eq(obj.__id__)
end
end
it 'is "equal to" the original object (wrap)' do
ObservableObjectTest::NonBasicObjects.each do |obj|
wrapped = ObservableObject.wrap(obj) { |p| ; }
expect(wrapped).to eq(obj)
end
end
it 'is "equal to" the original object (deep_wrap)' do
ObservableObjectTest::NonBasicObjects.each do |obj|
wrapped = ObservableObject.deep_wrap(obj) { |p| ; }
expect(wrapped).to eq(obj)
end
end
it 'can be used as a key in Hashes instead of the original object (wrap)' do
hash = Hash.new
ObservableObjectTest::NonBasicObjects.each do |obj|
# puts "-> #{obj}"
wrapped = ObservableObject.wrap(obj) { |p| ; }
expect(wrapped.hash).to eq(obj.hash)
expect(wrapped.eql?(obj)).to be true
hash[obj] = obj.__id__
expect(hash[wrapped]).to eq(obj.__id__)
end
end
it 'can be used as a key in Hashes instead of the original object (deep_wrap)' do
hash = Hash.new
ObservableObjectTest::NonBasicObjects.each do |obj|
# puts "-> #{obj}"
wrapped = ObservableObject.deep_wrap(obj) { |p| ; }
expect(wrapped.hash).to eq(obj.hash)
expect(wrapped.eql?(obj)).to be true
hash[obj] = obj.__id__
expect(hash[wrapped]).to eq(obj.__id__)
end
end
it 'calls event handler when the object is modified' do
obj = [[1],[2]]
wrapped = ObservableObject.deep_wrap(obj,&event)
wrapped[0] << 100
wrapped[0] << 50
wrapped[0].sort!
expect(event_obj_list.count).to eq(3)
expect(event_obj_list.all? { |x| x == wrapped }).to be true
end
it 'calls event handler after a sub-object is replaced and then modified' do
obj = [[1],[2]]
wrapped = ObservableObject.deep_wrap(obj,&event)
wrapped[0] = [3]
wrapped[0] << 50
wrapped[0].sort!
wrapped[1] = [['a','b'],'c']
wrapped[1][0][1] = 'd'
expect(event_obj_list.count).to eq(5)
expect(event_obj_list.all? { |x| x == wrapped }).to be true
end
it 'calls event handler after a sub-object is added and then modified' do
obj = [[1],[2]]
wrapped = ObservableObject.deep_wrap(obj,&event)
wrapped[1] << ['a','b']
wrapped[1].last.push('c')
expect(event_obj_list.count).to eq(2)
expect(event_obj_list.all? { |x| x == wrapped }).to be true
end
# TODO: fix the case below, if there is an efficient solution. Not a bug, annoyance.
# it 'does not call event handler after an already deleted sub-object is modified' do
# obj = [[1,2],[3,4]]
# wrapped = ObservableObject.deep_wrap(obj,&event)
# a = wrapped[0]
# a.compact!
# wrapped.delete_at(0)
# a[0] = 100
# expect(event_obj_list.count).to eq(2)
# expect(event_obj_list.all? { |x| x == wrapped }).to be true
# end
end
Improve test coverage: \!=, \!, method_defined\?
require 'spec_helper'
describe ObservableObject do
let(:event_obj_list) { Array.new }
let(:event) { Proc.new { |obj| event_obj_list << obj } }
it 'has version (smoke test)' do
expect(ObservableObject::VERSION).to be_a(String)
end
it 'returns the object itself for unwrappable objects' do
[:symbol, 1, 1.0, true, false, nil].each do |obj|
expect(ObservableObject.wrap(obj).__id__).to eq(obj.__id__)
expect(ObservableObject.deep_wrap(obj).__id__).to eq(obj.__id__)
end
end
it 'is "equal to" the original object (wrap)' do
ObservableObjectTest::NonBasicObjects.each do |obj|
wrapped = ObservableObject.wrap(obj) { |p| ; }
expect(wrapped).to eq(obj)
end
end
it 'is "equal to" the original object (deep_wrap)' do
ObservableObjectTest::NonBasicObjects.each do |obj|
wrapped = ObservableObject.deep_wrap(obj) { |p| ; }
expect(wrapped).to eq(obj)
end
end
it 'provides correct "!" operator' do
ObservableObjectTest::NonBasicObjects.each do |obj|
wrapped = ObservableObject.wrap(obj) { |p| ; }
expect(!wrapped).to eq(!obj)
end
end
it 'provides correct "!=" operator' do
ObservableObjectTest::NonBasicObjects.each do |obj|
wrapped = ObservableObject.wrap(obj) { |p| ; }
expect(wrapped != obj).to be false
end
end
it 'provides correct methods' do
ObservableObjectTest::NonBasicObjects.each do |obj|
# puts "-> #{obj}"
wrapped = ObservableObject.wrap(obj) { |p| ; }
expect(obj.class.instance_methods.all? do |mname|
# puts "----> #{mname}"
wrapped.respond_to?(mname) == obj.respond_to?(mname)
end).to be true
end
end
it 'can be used as a key in Hashes instead of the original object (wrap)' do
hash = Hash.new
ObservableObjectTest::NonBasicObjects.each do |obj|
# puts "-> #{obj}"
wrapped = ObservableObject.wrap(obj) { |p| ; }
expect(wrapped.hash).to eq(obj.hash)
expect(wrapped.eql?(obj)).to be true
hash[obj] = obj.__id__
expect(hash[wrapped]).to eq(obj.__id__)
end
end
it 'can be used as a key in Hashes instead of the original object (deep_wrap)' do
hash = Hash.new
ObservableObjectTest::NonBasicObjects.each do |obj|
# puts "-> #{obj}"
wrapped = ObservableObject.deep_wrap(obj) { |p| ; }
expect(wrapped.hash).to eq(obj.hash)
expect(wrapped.eql?(obj)).to be true
hash[obj] = obj.__id__
expect(hash[wrapped]).to eq(obj.__id__)
end
end
it 'calls event handler when the object is modified' do
obj = [[1],[2]]
wrapped = ObservableObject.deep_wrap(obj,&event)
wrapped[0] << 100
wrapped[0] << 50
wrapped[0].sort!
expect(event_obj_list.count).to eq(3)
expect(event_obj_list.all? { |x| x == wrapped }).to be true
end
it 'calls event handler after a sub-object is replaced and then modified' do
obj = [[1],[2]]
wrapped = ObservableObject.deep_wrap(obj,&event)
wrapped[0] = [3]
wrapped[0] << 50
wrapped[0].sort!
wrapped[1] = [['a','b'],'c']
wrapped[1][0][1] = 'd'
expect(event_obj_list.count).to eq(5)
expect(event_obj_list.all? { |x| x == wrapped }).to be true
end
it 'calls event handler after a sub-object is added and then modified' do
obj = [[1],[2]]
wrapped = ObservableObject.deep_wrap(obj,&event)
wrapped[1] << ['a','b']
wrapped[1].last.push('c')
expect(event_obj_list.count).to eq(2)
expect(event_obj_list.all? { |x| x == wrapped }).to be true
end
# TODO: fix the case below, if there is an efficient solution. Not a bug, annoyance.
# it 'does not call event handler after an already deleted sub-object is modified' do
# obj = [[1,2],[3,4]]
# wrapped = ObservableObject.deep_wrap(obj,&event)
# a = wrapped[0]
# a.compact!
# wrapped.delete_at(0)
# a[0] = 100
# expect(event_obj_list.count).to eq(2)
# expect(event_obj_list.all? { |x| x == wrapped }).to be true
# end
end |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with this
# work for additional information regarding copyright ownership. The ASF
# licenses this file to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helpers'))
describe 'ArchiveTask', :shared=>true do
before do
@dir = File.expand_path('test')
@files = %w{Test1.txt Text2.html}.map { |file| File.expand_path(file, @dir) }.
each { |file| write file, content_for(file) }
@empty_dirs = %w{EmptyDir1 EmptyDir2}.map { |file| File.expand_path(file, @dir) }.
each { |file| mkdir file }
end
# Not too smart, we just create some content based on file name to make sure you read what you write.
def content_for(file)
"Content for #{File.basename(file)}"
end
# Create an archive not using the archive task, this way we do have a file in existence, but we don't
# have an already invoked task. Yield an archive task to the block which can use it to include files,
# set options, etc.
def create_without_task
archive(@archive + '.tmp').tap do |task|
yield task if block_given?
task.invoke
mv task.name, @archive
end
end
def create_for_merge
zip(@archive + '.src').include(@files).tap do |task|
yield task
end
end
it 'should point to archive file' do
archive(@archive).name.should eql(@archive)
end
it 'should create file' do
lambda { archive(@archive).invoke }.should change { File.exist?(@archive) }.to(true)
end
it 'should create empty archive if no files included' do
archive(@archive).invoke
inspect_archive { |archive| archive.should be_empty }
end
it 'should raise error when include() is called with nil values' do
lambda { archive(@archive).include(nil) }.should raise_error
lambda { archive(@archive).include([nil]) }.should raise_error
end
it 'should create empty archive if called #clean method' do
archive(@archive).include(@files).clean.invoke
inspect_archive { |archive| archive.should be_empty }
end
it 'should archive all included files' do
archive(@archive).include(@files).invoke
inspect_archive { |archive| @files.each { |f| archive[File.basename(f)].should eql(content_for(f)) } }
inspect_archive.size.should eql(@files.size)
end
it 'should archive file tasks' do
tasks = @files.map { |fn| file(fn) }
archive(@archive).include(tasks).invoke
inspect_archive { |archive| @files.each { |f| archive[File.basename(f)].should eql(content_for(f)) } }
inspect_archive.size.should eql(@files.size)
end
it 'should invoke and archive file tasks' do
file = file('included') { write 'included' }
lambda { archive(@archive).include(file).invoke }.should change { File.exist?(file.to_s) }.to(true)
inspect_archive.keys.should include('included')
end
it 'should archive artifacts' do
write 'library-1.0.txt', 'library-1.0'
artifact("org.example:library:txt:1.0").from 'library-1.0.txt'
archive(@archive).include("org.example:library:txt:1.0").invoke
inspect_archive.keys.should include('library-1.0.txt')
end
it 'should archive project artifacts' do
define 'p1' do
project.version = '1.0'
package(:zip)
end
archive(@archive).include(project('p1')).invoke
inspect_archive.keys.should include('p1-1.0.zip')
end
it 'should include entry for directory' do
archive(@archive).include(@dir).invoke
inspect_archive { |archive| @files.each { |f| archive['test/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should not archive any excluded files' do
archive(@archive).include(@files).exclude(@files.last).invoke
inspect_archive do |archive|
archive.keys.should include(File.basename(@files.first))
archive.keys.should_not include(File.basename(@files.last))
end
end
it 'should not archive any excluded files in included directories' do
archive(@archive).include(@dir).exclude(@files.last).invoke
inspect_archive do |archive|
archive.keys.should include('test/' + File.basename(@files.first))
archive.keys.should_not include('test/' + File.basename(@files.last))
end
end
it 'should not archive any excluded files when using :from/:as' do
archive(@archive).include(:from=>@dir).exclude(@files.last).invoke
inspect_archive do |archive|
archive.keys.should include(File.basename(@files.first))
archive.keys.should_not include(File.basename(@files.last))
end
end
it 'should raise error when using :from with nil value' do
lambda {
archive(@archive).include(:from=>nil)
}.should raise_error
end
it 'should exclude entire directory and all its children' do
mkpath "#{@dir}/sub"
write "#{@dir}/sub/test"
archive(@archive).include(@dir).exclude("#{@dir}/sub").invoke
inspect_archive do |archive|
archive.keys.select { |file| file =~ /sub/ }.should be_empty
end
end
it 'should not archive any excluded files when pattern is *.ext' do
write "test/file.txt"
write "test/file.swf"
archive(@archive).include(@dir).exclude('**/*.swf').invoke
inspect_archive do |archive|
archive.keys.should include('test/file.txt')
archive.keys.should_not include('test/file.swf')
end
end
it 'should archive files into specified path' do
archive(@archive).include(@files, :path=>'code').invoke
inspect_archive { |archive| @files.each { |f| archive['code/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should include entry for directory' do
archive(@archive).include(@dir).invoke
inspect_archive { |archive| @files.each { |f| archive['test/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should archive files into specified path' do
archive(@archive).include(@files, :path=>'code').invoke
inspect_archive { |archive| @files.each { |f| archive['code/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should archive directories into specified path' do
archive(@archive).include(@dir, :path=>'code').invoke
inspect_archive { |archive| @files.each { |f| archive['code/test/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should understand . in path' do
archive(@archive).path('.').should == archive(@archive).path('')
archive(@archive).path('foo').path('.').should == archive(@archive).path('foo')
end
it 'should understand .. in path' do
archive(@archive).path('..').should == archive(@archive).path('')
archive(@archive).path('foo').path('..').should == archive(@archive).path('')
archive(@archive).path('foo/bar').path('..').should == archive(@archive).path('foo')
end
it 'should understand leading / in path' do
archive(@archive).path('/').should == archive(@archive).path('')
archive(@archive).path('foo/bar').path('/').should == archive(@archive).path('')
end
it 'should archive file into specified name' do
archive(@archive).include(@files.first, :as=>'test/sample').invoke
inspect_archive { |archive| @files.each { |f| archive['test/sample'].should eql(content_for(@files.first)) } }
end
it 'should archive directory into specified alias, without using "."' do
archive(@archive).include(@dir, :as=>'.').invoke
inspect_archive { |archive| archive.keys.should_not include(".") }
end
it 'should archive directories into specified alias, even if it has the same name' do
p "dir #{@dir}"
archive(@archive).include(@dir, :as=>File.basename(@dir)).invoke
inspect_archive { |archive|
archive.keys.should_not include "#{File.basename(@dir)}"
}
end
it 'should archive file into specified name/path' do
archive(@archive).include(@files.first, :as=>'test/sample', :path=>'path').invoke
inspect_archive { |archive| @files.each { |f| archive['path/test/sample'].should eql(content_for(@files.first)) } }
end
it 'should archive files starting with dot' do
write 'test/.config', '# configuration'
archive(@archive).include('test').invoke
inspect_archive { |archive| @files.each { |f| archive['test/.config'].should eql('# configuration') } }
end
it 'should archive directory into specified name' do
archive(@archive).include(@dir, :as=>'code').invoke
inspect_archive { |archive| @files.each { |f| archive['code/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should archive directory into specified name/path' do
archive(@archive).include(@dir, :as=>'code', :path=>'path').invoke
inspect_archive { |archive| @files.each { |f| archive['path/code/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should archive directory contents' do
archive(@archive).include(@dir, :as=>'.').invoke
inspect_archive { |archive| @files.each { |f| archive[File.basename(f)].should eql(content_for(f)) } }
end
it 'should archive directory contents into specified path' do
archive(@archive).include(@dir, :as=>'.', :path=>'path').invoke
inspect_archive { |archive| @files.each { |f| archive['path/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should not allow two files with the :as argument' do
lambda { archive(@archive).include(@files.first, @files.last, :as=>'test/sample') }.should raise_error(RuntimeError, /one file/)
end
it 'should expand another archive file' do
create_for_merge do |src|
archive(@archive).merge(src)
archive(@archive).invoke
inspect_archive { |archive| @files.each { |f| archive[File.basename(f)].should eql(content_for(f)) } }
end
end
it 'should expand another archive file with include pattern' do
create_for_merge do |src|
archive(@archive).merge(src).include(File.basename(@files.first))
archive(@archive).invoke
inspect_archive do |archive|
archive[File.basename(@files.first)].should eql(content_for(@files.first))
archive[File.basename(@files.last)].should be_nil
end
end
end
it 'should expand another archive file with exclude pattern' do
create_for_merge do |src|
archive(@archive).merge(src).exclude(File.basename(@files.first))
archive(@archive).invoke
inspect_archive do |archive|
@files[1..-1].each { |f| archive[File.basename(f)].should eql(content_for(f)) }
archive[File.basename(@files.first)].should be_nil
end
end
end
it 'should expand another archive file with nested exclude pattern' do
@files = %w{Test1.txt Text2.html}.map { |file| File.join(@dir, "foo", file) }.
each { |file| write file, content_for(file) }
zip(@archive + '.src').include(@dir).tap do |task|
archive(@archive).merge(task).exclude('test/*')
archive(@archive).invoke
inspect_archive.should be_empty
end
end
it 'should expand another archive file into path' do
create_for_merge do |src|
archive(@archive).path('test').merge(src)
archive(@archive).invoke
inspect_archive { |archive| @files.each { |f| archive['test/' + File.basename(f)].should eql(content_for(f)) } }
end
end
it 'should expand another archive file into path with :path option' do
create_for_merge do |src|
archive(@archive).merge(src, :path=>'test')
archive(@archive).invoke
inspect_archive { |archive| @files.each { |f| archive['test/' + File.basename(f)].should eql(content_for(f)) } }
end
end
it "should expand another archive file into path with :path=>'/'" do
create_for_merge do |src|
archive(@archive).merge(src, :path=>'/')
archive(@archive).invoke
inspect_archive { |archive| @files.each { |f| archive[File.basename(f)].should eql(content_for(f)) } }
end
end
it 'should expand another archive file into path with merge option' do
create_for_merge do |src|
archive(@archive).include(src, :merge=>true)
archive(@archive).invoke
inspect_archive { |archive| @files.each { |f| archive[File.basename(f)].should eql(content_for(f)) } }
end
end
it 'should update if one of the files is recent' do
create_without_task { |archive| archive.include(@files) }
# Touch archive file to some point in the past. This effectively makes
# all included files newer.
File.utime Time.now - 100, Time.now - 100, @archive
archive(@archive).include(@files).invoke
File.stat(@archive).mtime.should be_close(Time.now, 10)
end
it 'should do nothing if all files are uptodate' do
create_without_task { |archive| archive.include(@files) }
# By touching all files in the past, there's nothing new to update.
(@files + [@archive]).each { |f| File.utime Time.now - 100, Time.now - 100, f }
archive(@archive).include(@files).invoke
File.stat(@archive).mtime.should be_close(Time.now - 100, 10)
end
it 'should update if one of the files is recent' do
create_without_task { |archive| archive.include(@files) }
# Change files, we expect to see new content.
write @files.first, '/* Refreshed */'
File.utime(Time.now - 100, Time.now - 100, @archive) # Touch archive file to some point in the past.
archive(@archive).include(@files).invoke
inspect_archive { |archive| archive[File.basename(@files.first)].should eql('/* Refreshed */') }
end
it 'should create new archive when updating' do
create_without_task { |archive| archive.include(@files) }
File.utime(Time.now - 100, Time.now - 100, @archive) # Touch archive file to some point in the past.
archive(@archive).include(@files[1..-1]).invoke
inspect_archive.size.should be(@files.size - 1)
end
it 'should not accept invalid options' do
archive(@archive).include(@files)
lambda { archive(@archive).with :option=>true }.should raise_error
end
end
describe TarTask do
it_should_behave_like 'ArchiveTask'
before { @archive = File.expand_path('test.tar') }
define_method(:archive) { |file| tar(file) }
def inspect_archive
entries = {}
Archive::Tar::Minitar.open @archive, 'r' do |reader|
reader.each { |entry| entries[entry.directory ? "#{entry.name}/" : entry.name] = entry.read }
end
yield entries if block_given?
entries
end
end
describe TarTask, ' gzipped' do
it_should_behave_like 'ArchiveTask'
before { @archive = File.expand_path('test.tgz') }
define_method(:archive) { |file| tar(file) }
def inspect_archive
entries = {}
Zlib::GzipReader.open @archive do |gzip|
Archive::Tar::Minitar.open gzip, 'r' do |reader|
reader.each { |entry| entries[entry.directory ? "#{entry.name}/" : entry.name] = entry.read }
end
end
yield entries if block_given?
entries
end
end
describe ZipTask do
it_should_behave_like 'ArchiveTask'
before { @archive = File.expand_path('test.zip') }
define_method(:archive) { |file| zip(file) }
after do
checkZip(@archive)
end
# Check for possible corruption usign Java's ZipInputStream since it's stricter than rubyzip
def checkZip(file)
return unless File.exist?(file)
zip = Java.java.util.zip.ZipInputStream.new(Java.java.io.FileInputStream.new(file))
while entry = zip.getNextEntry do
# just iterate over all entries
end
zip.close()
end
def inspect_archive
entries = {}
Zip::ZipFile.open @archive do |zip|
zip.entries.each do |entry|
# Ignore the / directory created for empty ZIPs when using java.util.zip.
entries[entry.to_s] = zip.read(entry) unless entry.to_s == '/'
end
end
yield entries if block_given?
entries
end
it 'should include empty dirs' do
archive(@archive).include(@dir)
archive(@archive).invoke
inspect_archive do |archive|
archive.keys.should include('test/EmptyDir1/')
end
end
it 'should include empty dirs from Dir' do
archive(@archive).include(Dir["#{@dir}/*"])
archive(@archive).invoke
inspect_archive do |archive|
archive.keys.should include('EmptyDir1/')
end
end
it 'should work with path object' do
archive(@archive).path('code').include(@files)
archive(@archive).invoke
inspect_archive { |archive| archive.keys.should include('code/') }
end
it 'should have path object that includes empty dirs' do
archive(@archive).path('code').include(Dir["#{@dir}/*"])
archive(@archive).invoke
inspect_archive do |archive|
archive.keys.should include('code/EmptyDir1/')
end
end
# chmod is not reliable on Windows
unless Buildr::Util.win_os?
it 'should preserve file permissions' do
# with JRuby it's important to use absolute paths with File.chmod()
# http://jira.codehaus.org/browse/JRUBY-3300
hello = File.expand_path('src/main/bin/hello')
write hello, 'echo hi'
File.chmod(0777, hello)
fail("Failed to set permission on #{hello}") unless (File.stat(hello).mode & 0777) == 0777
zip('foo.zip').include('src/main/bin/*').invoke
unzip('target' => 'foo.zip').extract
(File.stat('target/hello').mode & 0777).should == 0777
end
end
end
describe Unzip do
before do
@zip = File.expand_path('test.zip')
@dir = File.expand_path('test')
@files = %w{Test1.txt Text2.html}.map { |file| File.join(@dir, file) }.
each { |file| write file, content_for(file) }
@target = File.expand_path('target')
@targz = File.expand_path('test.tar.gz')
@targz2 = File.expand_path('test.tgz')
end
# Not too smart, we just create some content based on file name to
# make sure you read what you write.
def content_for(file)
"Content for #{File.basename(file)}"
end
def with_tar(*args)
tar(@targz).include(*args.empty? ? @files : args).invoke
yield
end
def with_tar_too(*args)
tar(@targz2).include(*args.empty? ? @files : args).invoke
yield
end
def with_zip(*args)
zip(@zip).include(*args.empty? ? @files : args).invoke
yield
end
it 'should touch target directory' do
with_zip do
mkdir @target
File.utime(Time.now - 10, Time.now - 10, @target)
unzip(@target=>@zip).target.invoke
end
File.stat(@target).mtime.should be_close(Time.now, 2)
end
it 'should expand files' do
with_zip do
unzip(@target=>@zip).target.invoke
@files.each { |f| File.read(File.join(@target, File.basename(f))).should eql(content_for(f)) }
end
end
it 'should expand files from a tar.gz file' do
with_tar do
unzip(@target=>@targz).target.invoke
@files.each { |f| File.read(File.join(@target, File.basename(f))).should eql(content_for(f)) }
end
end
it 'should expand files from a .tgz file' do
with_tar_too do
unzip(@target=>@targz2).target.invoke
@files.each { |f| File.read(File.join(@target, File.basename(f))).should eql(content_for(f)) }
end
end
it 'should expand all files' do
with_zip do
unzip(@target=>@zip).target.invoke
FileList[File.join(@target, '*')].size.should be(@files.size)
end
end
it 'should expand all files from a .tar.gz file' do
with_tar do
unzip(@target=>@targz).target.invoke
FileList[File.join(@target, '*')].size.should be(@files.size)
end
end
it 'should expand only included files' do
with_zip do
only = File.basename(@files.first)
unzip(@target=>@zip).include(only).target.invoke
FileList[File.join(@target, '*')].should include(File.expand_path(only, @target))
FileList[File.join(@target, '*')].size.should be(1)
end
end
it 'should expand only included files from a .tar.gz file' do
with_tar do
only = File.basename(@files.first)
unzip(@target=>@targz).include(only).target.invoke
FileList[File.join(@target, '*')].should include(File.expand_path(only, @target))
FileList[File.join(@target, '*')].size.should be(1)
end
end
it 'should expand all but excluded files' do
with_zip do
except = File.basename(@files.first)
unzip(@target=>@zip).exclude(except).target.invoke
FileList[File.join(@target, '*')].should_not include(File.expand_path(except, @target))
FileList[File.join(@target, '*')].size.should be(@files.size - 1)
end
end
it 'should expand all but excluded files with a .tar.gz file' do
with_tar do
except = File.basename(@files.first)
unzip(@target=>@targz).exclude(except).target.invoke
FileList[File.join(@target, '*')].should_not include(File.expand_path(except, @target))
FileList[File.join(@target, '*')].size.should be(@files.size - 1)
end
end
it 'should include with nested path patterns' do
with_zip @files, :path=>'test/path' do
only = File.basename(@files.first)
unzip(@target=>@zip).include(only).target.invoke
FileList[File.join(@target, '*')].should be_empty
Rake::Task.clear ; rm_rf @target
unzip(@target=>@zip).include('test/path/' + only).target.invoke
FileList[File.join(@target, 'test/path/*')].size.should be(1)
Rake::Task.clear ; rm_rf @target
unzip(@target=>@zip).include('test/**/*').target.invoke
FileList[File.join(@target, 'test/path/*')].size.should be(2)
Rake::Task.clear ; rm_rf @target
unzip(@target=>@zip).include('test/*').target.invoke
FileList[File.join(@target, 'test/path/*')].size.should be(2)
end
end
it 'should include with nested path patterns with a .tar.gz file' do
with_tar @files, :path=>'test/path' do
only = File.basename(@files.first)
unzip(@target=>@targz).include(only).target.invoke
FileList[File.join(@target, '*')].should be_empty
Rake::Task.clear ; rm_rf @target
unzip(@target=>@targz).include('test/path/' + only).target.invoke
FileList[File.join(@target, 'test/path/*')].size.should be(1)
Rake::Task.clear ; rm_rf @target
unzip(@target=>@targz).include('test/**/*').target.invoke
FileList[File.join(@target, 'test/path/*')].size.should be(2)
end
end
it 'should include with relative path' do
with_zip @files, :path=>'test/path' do
only = File.basename(@files.first)
unzip(@target=>@zip).tap { |unzip| unzip.from_path('test').include(only) }.target.invoke
FileList[File.join(@target, '*')].should be_empty
Rake::Task.clear ; rm_rf @target
unzip(@target=>@zip).tap { |unzip| unzip.from_path('test').include('test/*') }.target.invoke
FileList[File.join(@target, 'path/*')].should be_empty
Rake::Task.clear ; rm_rf @target
unzip(@target=>@zip).tap { |unzip| unzip.from_path('test').include('path/*' + only) }.target.invoke
FileList[File.join(@target, 'path/*')].size.should be(1)
Rake::Task.clear ; rm_rf @target
unzip(@target=>@zip).tap { |unzip| unzip.from_path('test').include('path/*') }.target.invoke
FileList[File.join(@target, 'path/*')].size.should be(2)
end
end
it 'should include with relative path with a .tar.gz file' do
with_tar @files, :path=>'test/path' do
only = File.basename(@files.first)
unzip(@target=>@targz).tap { |unzip| unzip.from_path('test').include(only) }.target.invoke
FileList[File.join(@target, '*')].should be_empty
Rake::Task.clear ; rm_rf @target
unzip(@target=>@targz).tap { |unzip| unzip.from_path('test').include('test/*') }.target.invoke
FileList[File.join(@target, 'path/*')].should be_empty
Rake::Task.clear ; rm_rf @target
unzip(@target=>@targz).tap { |unzip| unzip.from_path('test').include('path/*' + only) }.target.invoke
FileList[File.join(@target, 'path/*')].size.should be(1)
Rake::Task.clear ; rm_rf @target
unzip(@target=>@targz).tap { |unzip| unzip.from_path('test').include('path/*') }.target.invoke
FileList[File.join(@target, 'path/*')].size.should be(2)
end
end
it 'should exclude with relative path' do
with_zip @files, :path=>'test' do
except = File.basename(@files.first)
unzip(@target=>@zip).tap { |unzip| unzip.from_path('test').exclude(except) }.target.invoke
FileList[File.join(@target, '*')].should include(File.join(@target, File.basename(@files[1])))
FileList[File.join(@target, '*')].size.should be(@files.size - 1)
end
end
it 'should exclude with relative path on a tar.gz file' do
with_tar @files, :path=>'test' do
except = File.basename(@files.first)
unzip(@target=>@targz).tap { |unzip| unzip.from_path('test').exclude(except) }.target.invoke
FileList[File.join(@target, '*')].should include(File.join(@target, File.basename(@files[1])))
FileList[File.join(@target, '*')].size.should be(@files.size - 1)
end
end
it "should handle relative paths without any includes or excludes" do
lib_files = %w{Test3.so Test4.rb}.
map { |file| File.join(@dir, file) }.
each { |file| write file, content_for(file) }
zip(@zip).include(@files, :path => 'src').include(lib_files, :path => 'lib').invoke
unzip(@target=>@zip).tap { |unzip| unzip.from_path('lib') }.target.invoke
FileList[File.join(@target, '**/*')].should have(2).files
end
it "should handle relative paths without any includes or excludes with a tar.gz file" do
lib_files = %w{Test3.so Test4.rb}.
map { |file| File.join(@dir, file) }.
each { |file| write file, content_for(file) }
tar(@targz).include(@files, :path => 'src').include(lib_files, :path => 'lib').invoke
unzip(@target=>@targz).tap { |unzip| unzip.from_path('lib') }.target.invoke
FileList[File.join(@target, '**/*')].should have(2).files
end
it 'should return itself from root method' do
task = unzip(@target=>@zip)
task.root.should be(task)
task.from_path('foo').root.should be(task)
end
it 'should return target task from target method' do
task = unzip(@target=>@zip)
task.target.should be(file(@target))
task.from_path('foo').target.should be(file(@target))
end
it 'should alias from_path as path' do
task = unzip(@target=>@zip)
task.from_path('foo').should be(task.path('foo'))
end
end
cleanup
git-svn-id: d8f3215415546ce936cf3b822120ca56e5ebeaa0@1021594 13f79535-47bb-0310-9956-ffa450edef68
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with this
# work for additional information regarding copyright ownership. The ASF
# licenses this file to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helpers'))
describe 'ArchiveTask', :shared=>true do
before do
@dir = File.expand_path('test')
@files = %w{Test1.txt Text2.html}.map { |file| File.expand_path(file, @dir) }.
each { |file| write file, content_for(file) }
@empty_dirs = %w{EmptyDir1 EmptyDir2}.map { |file| File.expand_path(file, @dir) }.
each { |file| mkdir file }
end
# Not too smart, we just create some content based on file name to make sure you read what you write.
def content_for(file)
"Content for #{File.basename(file)}"
end
# Create an archive not using the archive task, this way we do have a file in existence, but we don't
# have an already invoked task. Yield an archive task to the block which can use it to include files,
# set options, etc.
def create_without_task
archive(@archive + '.tmp').tap do |task|
yield task if block_given?
task.invoke
mv task.name, @archive
end
end
def create_for_merge
zip(@archive + '.src').include(@files).tap do |task|
yield task
end
end
it 'should point to archive file' do
archive(@archive).name.should eql(@archive)
end
it 'should create file' do
lambda { archive(@archive).invoke }.should change { File.exist?(@archive) }.to(true)
end
it 'should create empty archive if no files included' do
archive(@archive).invoke
inspect_archive { |archive| archive.should be_empty }
end
it 'should raise error when include() is called with nil values' do
lambda { archive(@archive).include(nil) }.should raise_error
lambda { archive(@archive).include([nil]) }.should raise_error
end
it 'should create empty archive if called #clean method' do
archive(@archive).include(@files).clean.invoke
inspect_archive { |archive| archive.should be_empty }
end
it 'should archive all included files' do
archive(@archive).include(@files).invoke
inspect_archive { |archive| @files.each { |f| archive[File.basename(f)].should eql(content_for(f)) } }
inspect_archive.size.should eql(@files.size)
end
it 'should archive file tasks' do
tasks = @files.map { |fn| file(fn) }
archive(@archive).include(tasks).invoke
inspect_archive { |archive| @files.each { |f| archive[File.basename(f)].should eql(content_for(f)) } }
inspect_archive.size.should eql(@files.size)
end
it 'should invoke and archive file tasks' do
file = file('included') { write 'included' }
lambda { archive(@archive).include(file).invoke }.should change { File.exist?(file.to_s) }.to(true)
inspect_archive.keys.should include('included')
end
it 'should archive artifacts' do
write 'library-1.0.txt', 'library-1.0'
artifact("org.example:library:txt:1.0").from 'library-1.0.txt'
archive(@archive).include("org.example:library:txt:1.0").invoke
inspect_archive.keys.should include('library-1.0.txt')
end
it 'should archive project artifacts' do
define 'p1' do
project.version = '1.0'
package(:zip)
end
archive(@archive).include(project('p1')).invoke
inspect_archive.keys.should include('p1-1.0.zip')
end
it 'should include entry for directory' do
archive(@archive).include(@dir).invoke
inspect_archive { |archive| @files.each { |f| archive['test/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should not archive any excluded files' do
archive(@archive).include(@files).exclude(@files.last).invoke
inspect_archive do |archive|
archive.keys.should include(File.basename(@files.first))
archive.keys.should_not include(File.basename(@files.last))
end
end
it 'should not archive any excluded files in included directories' do
archive(@archive).include(@dir).exclude(@files.last).invoke
inspect_archive do |archive|
archive.keys.should include('test/' + File.basename(@files.first))
archive.keys.should_not include('test/' + File.basename(@files.last))
end
end
it 'should not archive any excluded files when using :from/:as' do
archive(@archive).include(:from=>@dir).exclude(@files.last).invoke
inspect_archive do |archive|
archive.keys.should include(File.basename(@files.first))
archive.keys.should_not include(File.basename(@files.last))
end
end
it 'should raise error when using :from with nil value' do
lambda {
archive(@archive).include(:from=>nil)
}.should raise_error
end
it 'should exclude entire directory and all its children' do
mkpath "#{@dir}/sub"
write "#{@dir}/sub/test"
archive(@archive).include(@dir).exclude("#{@dir}/sub").invoke
inspect_archive do |archive|
archive.keys.select { |file| file =~ /sub/ }.should be_empty
end
end
it 'should not archive any excluded files when pattern is *.ext' do
write "test/file.txt"
write "test/file.swf"
archive(@archive).include(@dir).exclude('**/*.swf').invoke
inspect_archive do |archive|
archive.keys.should include('test/file.txt')
archive.keys.should_not include('test/file.swf')
end
end
it 'should archive files into specified path' do
archive(@archive).include(@files, :path=>'code').invoke
inspect_archive { |archive| @files.each { |f| archive['code/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should include entry for directory' do
archive(@archive).include(@dir).invoke
inspect_archive { |archive| @files.each { |f| archive['test/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should archive files into specified path' do
archive(@archive).include(@files, :path=>'code').invoke
inspect_archive { |archive| @files.each { |f| archive['code/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should archive directories into specified path' do
archive(@archive).include(@dir, :path=>'code').invoke
inspect_archive { |archive| @files.each { |f| archive['code/test/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should understand . in path' do
archive(@archive).path('.').should == archive(@archive).path('')
archive(@archive).path('foo').path('.').should == archive(@archive).path('foo')
end
it 'should understand .. in path' do
archive(@archive).path('..').should == archive(@archive).path('')
archive(@archive).path('foo').path('..').should == archive(@archive).path('')
archive(@archive).path('foo/bar').path('..').should == archive(@archive).path('foo')
end
it 'should understand leading / in path' do
archive(@archive).path('/').should == archive(@archive).path('')
archive(@archive).path('foo/bar').path('/').should == archive(@archive).path('')
end
it 'should archive file into specified name' do
archive(@archive).include(@files.first, :as=>'test/sample').invoke
inspect_archive { |archive| @files.each { |f| archive['test/sample'].should eql(content_for(@files.first)) } }
end
it 'should archive directory into specified alias, without using "."' do
archive(@archive).include(@dir, :as=>'.').invoke
inspect_archive { |archive| archive.keys.should_not include(".") }
end
it 'should archive directories into specified alias, even if it has the same name' do
archive(@archive).include(@dir, :as=>File.basename(@dir)).invoke
inspect_archive { |archive|
archive.keys.should_not include "#{File.basename(@dir)}"
}
end
it 'should archive file into specified name/path' do
archive(@archive).include(@files.first, :as=>'test/sample', :path=>'path').invoke
inspect_archive { |archive| @files.each { |f| archive['path/test/sample'].should eql(content_for(@files.first)) } }
end
it 'should archive files starting with dot' do
write 'test/.config', '# configuration'
archive(@archive).include('test').invoke
inspect_archive { |archive| @files.each { |f| archive['test/.config'].should eql('# configuration') } }
end
it 'should archive directory into specified name' do
archive(@archive).include(@dir, :as=>'code').invoke
inspect_archive { |archive| @files.each { |f| archive['code/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should archive directory into specified name/path' do
archive(@archive).include(@dir, :as=>'code', :path=>'path').invoke
inspect_archive { |archive| @files.each { |f| archive['path/code/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should archive directory contents' do
archive(@archive).include(@dir, :as=>'.').invoke
inspect_archive { |archive| @files.each { |f| archive[File.basename(f)].should eql(content_for(f)) } }
end
it 'should archive directory contents into specified path' do
archive(@archive).include(@dir, :as=>'.', :path=>'path').invoke
inspect_archive { |archive| @files.each { |f| archive['path/' + File.basename(f)].should eql(content_for(f)) } }
end
it 'should not allow two files with the :as argument' do
lambda { archive(@archive).include(@files.first, @files.last, :as=>'test/sample') }.should raise_error(RuntimeError, /one file/)
end
it 'should expand another archive file' do
create_for_merge do |src|
archive(@archive).merge(src)
archive(@archive).invoke
inspect_archive { |archive| @files.each { |f| archive[File.basename(f)].should eql(content_for(f)) } }
end
end
it 'should expand another archive file with include pattern' do
create_for_merge do |src|
archive(@archive).merge(src).include(File.basename(@files.first))
archive(@archive).invoke
inspect_archive do |archive|
archive[File.basename(@files.first)].should eql(content_for(@files.first))
archive[File.basename(@files.last)].should be_nil
end
end
end
it 'should expand another archive file with exclude pattern' do
create_for_merge do |src|
archive(@archive).merge(src).exclude(File.basename(@files.first))
archive(@archive).invoke
inspect_archive do |archive|
@files[1..-1].each { |f| archive[File.basename(f)].should eql(content_for(f)) }
archive[File.basename(@files.first)].should be_nil
end
end
end
it 'should expand another archive file with nested exclude pattern' do
@files = %w{Test1.txt Text2.html}.map { |file| File.join(@dir, "foo", file) }.
each { |file| write file, content_for(file) }
zip(@archive + '.src').include(@dir).tap do |task|
archive(@archive).merge(task).exclude('test/*')
archive(@archive).invoke
inspect_archive.should be_empty
end
end
it 'should expand another archive file into path' do
create_for_merge do |src|
archive(@archive).path('test').merge(src)
archive(@archive).invoke
inspect_archive { |archive| @files.each { |f| archive['test/' + File.basename(f)].should eql(content_for(f)) } }
end
end
it 'should expand another archive file into path with :path option' do
create_for_merge do |src|
archive(@archive).merge(src, :path=>'test')
archive(@archive).invoke
inspect_archive { |archive| @files.each { |f| archive['test/' + File.basename(f)].should eql(content_for(f)) } }
end
end
it "should expand another archive file into path with :path=>'/'" do
create_for_merge do |src|
archive(@archive).merge(src, :path=>'/')
archive(@archive).invoke
inspect_archive { |archive| @files.each { |f| archive[File.basename(f)].should eql(content_for(f)) } }
end
end
it 'should expand another archive file into path with merge option' do
create_for_merge do |src|
archive(@archive).include(src, :merge=>true)
archive(@archive).invoke
inspect_archive { |archive| @files.each { |f| archive[File.basename(f)].should eql(content_for(f)) } }
end
end
it 'should update if one of the files is recent' do
create_without_task { |archive| archive.include(@files) }
# Touch archive file to some point in the past. This effectively makes
# all included files newer.
File.utime Time.now - 100, Time.now - 100, @archive
archive(@archive).include(@files).invoke
File.stat(@archive).mtime.should be_close(Time.now, 10)
end
it 'should do nothing if all files are uptodate' do
create_without_task { |archive| archive.include(@files) }
# By touching all files in the past, there's nothing new to update.
(@files + [@archive]).each { |f| File.utime Time.now - 100, Time.now - 100, f }
archive(@archive).include(@files).invoke
File.stat(@archive).mtime.should be_close(Time.now - 100, 10)
end
it 'should update if one of the files is recent' do
create_without_task { |archive| archive.include(@files) }
# Change files, we expect to see new content.
write @files.first, '/* Refreshed */'
File.utime(Time.now - 100, Time.now - 100, @archive) # Touch archive file to some point in the past.
archive(@archive).include(@files).invoke
inspect_archive { |archive| archive[File.basename(@files.first)].should eql('/* Refreshed */') }
end
it 'should create new archive when updating' do
create_without_task { |archive| archive.include(@files) }
File.utime(Time.now - 100, Time.now - 100, @archive) # Touch archive file to some point in the past.
archive(@archive).include(@files[1..-1]).invoke
inspect_archive.size.should be(@files.size - 1)
end
it 'should not accept invalid options' do
archive(@archive).include(@files)
lambda { archive(@archive).with :option=>true }.should raise_error
end
end
describe TarTask do
it_should_behave_like 'ArchiveTask'
before { @archive = File.expand_path('test.tar') }
define_method(:archive) { |file| tar(file) }
def inspect_archive
entries = {}
Archive::Tar::Minitar.open @archive, 'r' do |reader|
reader.each { |entry| entries[entry.directory ? "#{entry.name}/" : entry.name] = entry.read }
end
yield entries if block_given?
entries
end
end
describe TarTask, ' gzipped' do
it_should_behave_like 'ArchiveTask'
before { @archive = File.expand_path('test.tgz') }
define_method(:archive) { |file| tar(file) }
def inspect_archive
entries = {}
Zlib::GzipReader.open @archive do |gzip|
Archive::Tar::Minitar.open gzip, 'r' do |reader|
reader.each { |entry| entries[entry.directory ? "#{entry.name}/" : entry.name] = entry.read }
end
end
yield entries if block_given?
entries
end
end
describe ZipTask do
it_should_behave_like 'ArchiveTask'
before { @archive = File.expand_path('test.zip') }
define_method(:archive) { |file| zip(file) }
after do
checkZip(@archive)
end
# Check for possible corruption usign Java's ZipInputStream since it's stricter than rubyzip
def checkZip(file)
return unless File.exist?(file)
zip = Java.java.util.zip.ZipInputStream.new(Java.java.io.FileInputStream.new(file))
while entry = zip.getNextEntry do
# just iterate over all entries
end
zip.close()
end
def inspect_archive
entries = {}
Zip::ZipFile.open @archive do |zip|
zip.entries.each do |entry|
# Ignore the / directory created for empty ZIPs when using java.util.zip.
entries[entry.to_s] = zip.read(entry) unless entry.to_s == '/'
end
end
yield entries if block_given?
entries
end
it 'should include empty dirs' do
archive(@archive).include(@dir)
archive(@archive).invoke
inspect_archive do |archive|
archive.keys.should include('test/EmptyDir1/')
end
end
it 'should include empty dirs from Dir' do
archive(@archive).include(Dir["#{@dir}/*"])
archive(@archive).invoke
inspect_archive do |archive|
archive.keys.should include('EmptyDir1/')
end
end
it 'should work with path object' do
archive(@archive).path('code').include(@files)
archive(@archive).invoke
inspect_archive { |archive| archive.keys.should include('code/') }
end
it 'should have path object that includes empty dirs' do
archive(@archive).path('code').include(Dir["#{@dir}/*"])
archive(@archive).invoke
inspect_archive do |archive|
archive.keys.should include('code/EmptyDir1/')
end
end
# chmod is not reliable on Windows
unless Buildr::Util.win_os?
it 'should preserve file permissions' do
# with JRuby it's important to use absolute paths with File.chmod()
# http://jira.codehaus.org/browse/JRUBY-3300
hello = File.expand_path('src/main/bin/hello')
write hello, 'echo hi'
File.chmod(0777, hello)
fail("Failed to set permission on #{hello}") unless (File.stat(hello).mode & 0777) == 0777
zip('foo.zip').include('src/main/bin/*').invoke
unzip('target' => 'foo.zip').extract
(File.stat('target/hello').mode & 0777).should == 0777
end
end
end
describe Unzip do
before do
@zip = File.expand_path('test.zip')
@dir = File.expand_path('test')
@files = %w{Test1.txt Text2.html}.map { |file| File.join(@dir, file) }.
each { |file| write file, content_for(file) }
@target = File.expand_path('target')
@targz = File.expand_path('test.tar.gz')
@targz2 = File.expand_path('test.tgz')
end
# Not too smart, we just create some content based on file name to
# make sure you read what you write.
def content_for(file)
"Content for #{File.basename(file)}"
end
def with_tar(*args)
tar(@targz).include(*args.empty? ? @files : args).invoke
yield
end
def with_tar_too(*args)
tar(@targz2).include(*args.empty? ? @files : args).invoke
yield
end
def with_zip(*args)
zip(@zip).include(*args.empty? ? @files : args).invoke
yield
end
it 'should touch target directory' do
with_zip do
mkdir @target
File.utime(Time.now - 10, Time.now - 10, @target)
unzip(@target=>@zip).target.invoke
end
File.stat(@target).mtime.should be_close(Time.now, 2)
end
it 'should expand files' do
with_zip do
unzip(@target=>@zip).target.invoke
@files.each { |f| File.read(File.join(@target, File.basename(f))).should eql(content_for(f)) }
end
end
it 'should expand files from a tar.gz file' do
with_tar do
unzip(@target=>@targz).target.invoke
@files.each { |f| File.read(File.join(@target, File.basename(f))).should eql(content_for(f)) }
end
end
it 'should expand files from a .tgz file' do
with_tar_too do
unzip(@target=>@targz2).target.invoke
@files.each { |f| File.read(File.join(@target, File.basename(f))).should eql(content_for(f)) }
end
end
it 'should expand all files' do
with_zip do
unzip(@target=>@zip).target.invoke
FileList[File.join(@target, '*')].size.should be(@files.size)
end
end
it 'should expand all files from a .tar.gz file' do
with_tar do
unzip(@target=>@targz).target.invoke
FileList[File.join(@target, '*')].size.should be(@files.size)
end
end
it 'should expand only included files' do
with_zip do
only = File.basename(@files.first)
unzip(@target=>@zip).include(only).target.invoke
FileList[File.join(@target, '*')].should include(File.expand_path(only, @target))
FileList[File.join(@target, '*')].size.should be(1)
end
end
it 'should expand only included files from a .tar.gz file' do
with_tar do
only = File.basename(@files.first)
unzip(@target=>@targz).include(only).target.invoke
FileList[File.join(@target, '*')].should include(File.expand_path(only, @target))
FileList[File.join(@target, '*')].size.should be(1)
end
end
it 'should expand all but excluded files' do
with_zip do
except = File.basename(@files.first)
unzip(@target=>@zip).exclude(except).target.invoke
FileList[File.join(@target, '*')].should_not include(File.expand_path(except, @target))
FileList[File.join(@target, '*')].size.should be(@files.size - 1)
end
end
it 'should expand all but excluded files with a .tar.gz file' do
with_tar do
except = File.basename(@files.first)
unzip(@target=>@targz).exclude(except).target.invoke
FileList[File.join(@target, '*')].should_not include(File.expand_path(except, @target))
FileList[File.join(@target, '*')].size.should be(@files.size - 1)
end
end
it 'should include with nested path patterns' do
with_zip @files, :path=>'test/path' do
only = File.basename(@files.first)
unzip(@target=>@zip).include(only).target.invoke
FileList[File.join(@target, '*')].should be_empty
Rake::Task.clear ; rm_rf @target
unzip(@target=>@zip).include('test/path/' + only).target.invoke
FileList[File.join(@target, 'test/path/*')].size.should be(1)
Rake::Task.clear ; rm_rf @target
unzip(@target=>@zip).include('test/**/*').target.invoke
FileList[File.join(@target, 'test/path/*')].size.should be(2)
Rake::Task.clear ; rm_rf @target
unzip(@target=>@zip).include('test/*').target.invoke
FileList[File.join(@target, 'test/path/*')].size.should be(2)
end
end
it 'should include with nested path patterns with a .tar.gz file' do
with_tar @files, :path=>'test/path' do
only = File.basename(@files.first)
unzip(@target=>@targz).include(only).target.invoke
FileList[File.join(@target, '*')].should be_empty
Rake::Task.clear ; rm_rf @target
unzip(@target=>@targz).include('test/path/' + only).target.invoke
FileList[File.join(@target, 'test/path/*')].size.should be(1)
Rake::Task.clear ; rm_rf @target
unzip(@target=>@targz).include('test/**/*').target.invoke
FileList[File.join(@target, 'test/path/*')].size.should be(2)
end
end
it 'should include with relative path' do
with_zip @files, :path=>'test/path' do
only = File.basename(@files.first)
unzip(@target=>@zip).tap { |unzip| unzip.from_path('test').include(only) }.target.invoke
FileList[File.join(@target, '*')].should be_empty
Rake::Task.clear ; rm_rf @target
unzip(@target=>@zip).tap { |unzip| unzip.from_path('test').include('test/*') }.target.invoke
FileList[File.join(@target, 'path/*')].should be_empty
Rake::Task.clear ; rm_rf @target
unzip(@target=>@zip).tap { |unzip| unzip.from_path('test').include('path/*' + only) }.target.invoke
FileList[File.join(@target, 'path/*')].size.should be(1)
Rake::Task.clear ; rm_rf @target
unzip(@target=>@zip).tap { |unzip| unzip.from_path('test').include('path/*') }.target.invoke
FileList[File.join(@target, 'path/*')].size.should be(2)
end
end
it 'should include with relative path with a .tar.gz file' do
with_tar @files, :path=>'test/path' do
only = File.basename(@files.first)
unzip(@target=>@targz).tap { |unzip| unzip.from_path('test').include(only) }.target.invoke
FileList[File.join(@target, '*')].should be_empty
Rake::Task.clear ; rm_rf @target
unzip(@target=>@targz).tap { |unzip| unzip.from_path('test').include('test/*') }.target.invoke
FileList[File.join(@target, 'path/*')].should be_empty
Rake::Task.clear ; rm_rf @target
unzip(@target=>@targz).tap { |unzip| unzip.from_path('test').include('path/*' + only) }.target.invoke
FileList[File.join(@target, 'path/*')].size.should be(1)
Rake::Task.clear ; rm_rf @target
unzip(@target=>@targz).tap { |unzip| unzip.from_path('test').include('path/*') }.target.invoke
FileList[File.join(@target, 'path/*')].size.should be(2)
end
end
it 'should exclude with relative path' do
with_zip @files, :path=>'test' do
except = File.basename(@files.first)
unzip(@target=>@zip).tap { |unzip| unzip.from_path('test').exclude(except) }.target.invoke
FileList[File.join(@target, '*')].should include(File.join(@target, File.basename(@files[1])))
FileList[File.join(@target, '*')].size.should be(@files.size - 1)
end
end
it 'should exclude with relative path on a tar.gz file' do
with_tar @files, :path=>'test' do
except = File.basename(@files.first)
unzip(@target=>@targz).tap { |unzip| unzip.from_path('test').exclude(except) }.target.invoke
FileList[File.join(@target, '*')].should include(File.join(@target, File.basename(@files[1])))
FileList[File.join(@target, '*')].size.should be(@files.size - 1)
end
end
it "should handle relative paths without any includes or excludes" do
lib_files = %w{Test3.so Test4.rb}.
map { |file| File.join(@dir, file) }.
each { |file| write file, content_for(file) }
zip(@zip).include(@files, :path => 'src').include(lib_files, :path => 'lib').invoke
unzip(@target=>@zip).tap { |unzip| unzip.from_path('lib') }.target.invoke
FileList[File.join(@target, '**/*')].should have(2).files
end
it "should handle relative paths without any includes or excludes with a tar.gz file" do
lib_files = %w{Test3.so Test4.rb}.
map { |file| File.join(@dir, file) }.
each { |file| write file, content_for(file) }
tar(@targz).include(@files, :path => 'src').include(lib_files, :path => 'lib').invoke
unzip(@target=>@targz).tap { |unzip| unzip.from_path('lib') }.target.invoke
FileList[File.join(@target, '**/*')].should have(2).files
end
it 'should return itself from root method' do
task = unzip(@target=>@zip)
task.root.should be(task)
task.from_path('foo').root.should be(task)
end
it 'should return target task from target method' do
task = unzip(@target=>@zip)
task.target.should be(file(@target))
task.from_path('foo').target.should be(file(@target))
end
it 'should alias from_path as path' do
task = unzip(@target=>@zip)
task.from_path('foo').should be(task.path('foo'))
end
end
|
require_relative '../spec_helper'
describe 'Transform' do
describe '#initialize' do
subject { Plotrb::Transform }
it 'calls respective type method to initialize attributes' do
subject.any_instance.should_receive(:send).with(:foo)
subject.new(:foo)
end
it 'raises error if type is not recognized' do
expect { subject.new(:foo) }.to raise_error(NoMethodError)
end
end
describe '#array' do
subject { Plotrb::Transform.new(:array) }
it 'responds to #take' do
subject.take('foo_field', 'bar_field')
subject.fields.should match_array(['foo_field', 'bar_field'])
end
end
describe '#copy' do
subject { Plotrb::Transform.new(:copy) }
it 'responds to #take' do
subject.take('foo_field', 'bar_field').from('some_data').as('foo', 'bar')
subject.fields.should match_array(['foo_field', 'bar_field'])
end
it 'raises error if as and fields are of different size'
end
describe '#cross' do
subject { Plotrb::Transform.new(:cross) }
it 'raises error if the secondary data does not exist'
end
describe '#facet' do
subject { Plotrb::Transform.new(:facet) }
it 'responds to #group_by' do
subject.group_by('foo', 'bar')
subject.keys.should match_array(['foo', 'bar'])
end
end
describe '#filter' do
subject { Plotrb::Transform.new(:filter) }
it 'adds variable d if not present in the test expression'
end
describe '#flatten' do
subject { Plotrb::Transform.new(:flatten) }
end
describe '#fold' do
subject { Plotrb::Transform.new(:fold) }
it 'responds to #into' do
subject.into('foo', 'bar')
subject.fields.should match_array(['foo', 'bar'])
end
end
describe '#formula' do
subject { Plotrb::Transform.new(:formula) }
it 'responds to #apply and #into' do
subject.apply('some_expression').into('some_field')
subject.field.should == 'some_field'
subject.expr.should == 'some_expression'
end
end
describe '#slice' do
subject { Plotrb::Transform.new(:slice) }
it 'slices by a single value'
it 'slices by a range'
it 'slices by special positions'
it 'raises error otherwise'
end
describe '#sort' do
subject { Plotrb::Transform.new(:sort) }
it 'adds - in front for reverse sort'
end
describe '#stats' do
subject { Plotrb::Transform.new(:stats) }
it 'responds to #from, #include_median, and #store_stats' do
subject.from('foo').include_median.store_stats
subject.value.should == 'foo'
subject.median.should be_true
subject.assign.should be_true
end
end
describe '#truncate' do
subject { Plotrb::Transform.new(:truncate) }
it 'responds to #from, #to, and #max_length' do
subject.from('foo').to('bar').max_length(5)
subject.value.should == 'foo'
subject.output.should == 'bar'
subject.limit.should == 5
end
it 'responds to #in_position' do
subject.in_front
subject.position.should == :front
end
end
describe '#unique' do
subject { Plotrb::Transform.new(:unique) }
it 'responds to #from and #to' do
subject.from('foo').to('bar')
subject.field.should == 'foo'
subject.as.should == 'bar'
end
end
describe '#window' do
subject { Plotrb::Transform.new(:window) }
end
describe '#zip' do
subject { Plotrb::Transform.new(:zip) }
it 'responds to #match and #against' do
subject.with('foo').as('bar').match('foo_field').against('bar_field')
subject.key.should == 'foo_field'
subject.with_key.should == 'bar_field'
end
end
describe '#force' do
subject { Plotrb::Transform.new(:force) }
end
describe '#geo' do
subject { Plotrb::Transform.new(:geo) }
end
describe '#geopath' do
subject { Plotrb::Transform.new(:geopath) }
end
describe '#link' do
subject { Plotrb::Transform.new(:link) }
end
describe '#pie' do
subject { Plotrb::Transform.new(:pie) }
end
describe '#stack' do
subject { Plotrb::Transform.new(:stack) }
end
describe '#treemap' do
subject { Plotrb::Transform.new(:treemap) }
end
describe '#wordcloud' do
subject { Plotrb::Transform.new(:wordcloud) }
end
end
Pass tests for Transform.
require_relative '../spec_helper'
describe 'Transform' do
describe '#initialize' do
subject { Plotrb::Transform }
it 'calls respective type method to initialize attributes' do
subject.any_instance.should_receive(:send).with(:foo)
subject.new(:foo)
end
it 'raises error if type is not recognized' do
expect { subject.new(:foo) }.to raise_error(NoMethodError)
end
end
describe '#array' do
subject { Plotrb::Transform.new(:array) }
it 'responds to #take' do
subject.take('foo', 'bar')
subject.send(:process_array_fields)
subject.fields.should match_array(['data.foo', 'data.bar'])
end
end
describe '#copy' do
subject { Plotrb::Transform.new(:copy) }
it 'responds to #take' do
subject.take('foo_field', 'bar_field').from('some_data').as('foo', 'bar')
subject.fields.should match_array(['foo_field', 'bar_field'])
end
it 'raises error if as and fields are of different size' do
subject.take('foo', 'bar').from('data').as('baz')
expect { subject.send(:process_copy_as) }.to raise_error(ArgumentError)
end
end
describe '#cross' do
subject { Plotrb::Transform.new(:cross) }
it 'raises error if the secondary data does not exist' do
subject.with('foo')
::Plotrb::Kernel.stub(:find_data).and_return(nil)
expect { subject.send(:process_cross_with) }.to raise_error(ArgumentError)
end
end
describe '#facet' do
subject { Plotrb::Transform.new(:facet) }
it 'responds to #group_by' do
subject.group_by('foo', 'bar')
subject.send(:process_facet_keys)
subject.keys.should match_array(['data.foo', 'data.bar'])
end
end
describe '#filter' do
subject { Plotrb::Transform.new(:filter) }
it 'adds variable d if not present in the test expression' do
subject.test('x>10')
expect { subject.send(:process_filter_test) }.to raise_error(ArgumentError)
end
end
describe '#flatten' do
subject { Plotrb::Transform.new(:flatten) }
end
describe '#fold' do
subject { Plotrb::Transform.new(:fold) }
it 'responds to #into' do
subject.into('foo', 'bar')
subject.send(:process_fold_fields)
subject.fields.should match_array(['data.foo', 'data.bar'])
end
end
describe '#formula' do
subject { Plotrb::Transform.new(:formula) }
it 'responds to #apply and #into' do
subject.apply('some_expression').into('some_field')
subject.field.should == 'some_field'
subject.expr.should == 'some_expression'
end
end
describe '#slice' do
subject { Plotrb::Transform.new(:slice) }
it 'slices by a single value'
it 'slices by a range'
it 'slices by special positions'
it 'raises error otherwise'
end
describe '#sort' do
subject { Plotrb::Transform.new(:sort) }
it 'adds - in front for reverse sort'
end
describe '#stats' do
subject { Plotrb::Transform.new(:stats) }
it 'responds to #from, #include_median, and #store_stats' do
subject.from('foo').include_median.store_stats
subject.value.should == 'foo'
subject.median.should be_true
subject.assign.should be_true
end
end
describe '#truncate' do
subject { Plotrb::Transform.new(:truncate) }
it 'responds to #from, #to, and #max_length' do
subject.from('foo').to('bar').max_length(5)
subject.send(:process_truncate_value)
subject.value.should == 'data.foo'
subject.output.should == 'bar'
subject.limit.should == 5
end
it 'responds to #in_position' do
subject.in_front
subject.position.should == :front
end
end
describe '#unique' do
subject { Plotrb::Transform.new(:unique) }
it 'responds to #from and #to' do
subject.from('foo').to('bar')
subject.send(:process_unique_field)
subject.field.should == 'data.foo'
subject.as.should == 'bar'
end
end
describe '#window' do
subject { Plotrb::Transform.new(:window) }
end
describe '#zip' do
subject { Plotrb::Transform.new(:zip) }
it 'responds to #match and #against' do
subject.with('foo').as('bar').match('foo_field').against('bar_field')
subject.send(:process_zip_key)
subject.send(:process_zip_with_key)
subject.key.should == 'data.foo_field'
subject.with_key.should == 'data.bar_field'
end
end
describe '#force' do
subject { Plotrb::Transform.new(:force) }
end
describe '#geo' do
subject { Plotrb::Transform.new(:geo) }
end
describe '#geopath' do
subject { Plotrb::Transform.new(:geopath) }
end
describe '#link' do
subject { Plotrb::Transform.new(:link) }
end
describe '#pie' do
subject { Plotrb::Transform.new(:pie) }
end
describe '#stack' do
subject { Plotrb::Transform.new(:stack) }
end
describe '#treemap' do
subject { Plotrb::Transform.new(:treemap) }
end
describe '#wordcloud' do
subject { Plotrb::Transform.new(:wordcloud) }
end
end
|
require 'rails_helper'
RSpec.describe "V1::Sessions", type: :request do
describe "POST /v1/sign_in" do
before { @user = FactoryGirl.create(:confirmed_user) }
context "when resource is valid" do
it "responds with 200" do
post "/v1/sign_in", "user[email]" => @user.email, "user[password]" => @user.password
expect(response).to have_http_status(200)
body = JSON.parse(response.body)
expect(body["access_token"]).to eq(@user.access_token)
end
end
context "when resource has invalid email" do
it "reponds with 422" do
post "/v1/sign_in", "user[email]" => "trex", "user[password]" => @user.password
expect(response).to have_http_status(422)
body = JSON.parse(response.body)
expect(body["error"]).to eq("Invalid login attempt")
end
end
context "when resource has invalid password" do
it "reponds with 422" do
post "/v1/sign_in", "user[email]" => @user.email, "user[password]" => "trex"
expect(response).to have_http_status(422)
body = JSON.parse(response.body)
expect(body["error"]).to eq("Invalid login attempt")
end
end
end
end
Modificat din reponds in responds
require 'rails_helper'
RSpec.describe "V1::Sessions", type: :request do
describe "POST /v1/sign_in" do
before { @user = FactoryGirl.create(:confirmed_user) }
context "when resource is valid" do
it "responds with 200" do
post "/v1/sign_in", "user[email]" => @user.email, "user[password]" => @user.password
expect(response).to have_http_status(200)
body = JSON.parse(response.body)
expect(body["access_token"]).to eq(@user.access_token)
end
end
context "when resource has invalid email" do
it "responds with 422" do
post "/v1/sign_in", "user[email]" => "trex", "user[password]" => @user.password
expect(response).to have_http_status(422)
body = JSON.parse(response.body)
expect(body["error"]).to eq("Invalid login attempt")
end
end
context "when resource has invalid password" do
it "responds with 422" do
post "/v1/sign_in", "user[email]" => @user.email, "user[password]" => "trex"
expect(response).to have_http_status(422)
body = JSON.parse(response.body)
expect(body["error"]).to eq("Invalid login attempt")
end
end
end
end
|
require File.expand_path(File.dirname(__FILE__) + '/common')
describe "calendar" do
it_should_behave_like "in-process server selenium tests"
def go_to_calendar
get "/calendar"
wait_for_ajaximations
end
context "teacher view" do
before (:each) do
course_with_teacher_logged_in
end
it "should create an event" do
new_event_name = 'new event'
go_to_calendar
f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day").click
replace_content(f('#calendar_event_title'), new_event_name)
submit_form('#edit_calendar_event_form')
wait_for_ajaximations
f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day").should include_text(new_event_name)
CalendarEvent.find_by_title(new_event_name).should be_present
end
it "should edit an event" do
edit_name = 'edited cal title'
calendar_event_model(:title => "new event", :start_at => Time.now)
go_to_calendar
f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day .calendar_event").click
f('.edit_event_link').click
replace_content(f('#calendar_event_title'), edit_name)
submit_form('#edit_calendar_event_form')
wait_for_ajaximations
CalendarEvent.find_by_title(edit_name).should be_present
end
it "should delete an event" do
event_title = 'new event'
calendar_event_model(:title => event_title, :start_at => Time.now)
go_to_calendar
keep_trying_until do
f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day .calendar_event").click
f('.delete_event_link').click
driver.switch_to.alert.should_not be nil
driver.switch_to.alert.accept
true
end
wait_for_ajaximations
keep_trying_until do
f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day").should_not include_text(event_title)
CalendarEvent.find_by_title(event_title).workflow_state.should == 'deleted'
end
end
it "should view the full details of an event" do
event_title = 'new event'
calendar_event_model(:title => event_title, :start_at => Time.now)
go_to_calendar
f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day .calendar_event").click
expect_new_page_load { f('.view_event_link').click }
f('#full_calendar_event .title').should include_text(event_title)
end
it "should drag and drop an event" do
pending('drag and drop not working correctly')
event_title = 'new event'
calendar_event_model(:title => event_title, :start_at => Time.now)
go_to_calendar
event_date = f("##{Time.now.strftime("day_%Y_%m_%d")}").attribute(:id)
parsed_event_date = Date.parse(event_date[4..13].gsub('_', '/'))
driver.action.drag_and_drop_by(f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day .calendar_event"), f("##{(parsed_event_date - 1.day).strftime("day_%Y_%m_%d")} .calendar_day")).perform
wait_for_ajaximations
f("##{(parsed_event_date - 1.day).strftime("day_%Y_%m_%d")} .calendar_day").should include_text(event_title)
end
it "should view undated events" do
event_title = 'undated event'
calendar_event_model(:title => event_title)
go_to_calendar
undated_link = f('.show_undated_link')
keep_trying_until { undated_link.text.should == 'show 1 undated events' }
undated_link.click
wait_for_ajaximations
f('.calendar_undated').should include_text(event_title)
end
it "should validate the ical feed display" do
go_to_calendar
f('.calendar_feed_link').click
f('#calendar_feed_box').should be_displayed
end
it "should show the wiki sidebar when looking at the full event page" do
go_to_calendar
f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day").click
click_option('#edit_calendar_event_form .context_id', "course_#{@course.id}", :value)
expect_new_page_load { f("#edit_calendar_event_form .more_options_link").click }
keep_trying_until { f("#editor_tabs").should be_displayed }
end
it "should only display events for selected course" do
@course.name = 'first course'
@course.save!
due_date = Time.now.utc
first_assignment = @course.assignments.create(:name => 'first assignment', :due_at => due_date)
first_course = @course
student = @user
second_course = course_model({:name => 'second course'})
second_course.offer!
enrollment = second_course.enroll_student(student)
enrollment.workflow_state = 'active'
enrollment.save!
second_course.reload
second_assignment = second_course.assignments.create(:name => 'second assignment', :due_at => due_date)
go_to_calendar
#verify both assignments are visible
unless is_checked("#group_course_#{first_course.id}")
f("#group_course_#{first_course.id}").click
end
unless is_checked("#group_course_#{second_course.id}")
f("#group_course_#{second_course.id}").click
end
date_holder_id = "day_#{due_date.year}_#{due_date.strftime('%m')}_#{due_date.strftime('%d')}"
f("##{date_holder_id} #event_assignment_#{first_assignment.id}").should be_displayed
f("##{date_holder_id} #event_assignment_#{second_assignment.id}").should be_displayed
#verify first assignment is visible and not the second
f("#group_course_#{second_course.id}").click
f("##{date_holder_id} #event_assignment_#{first_assignment.id}").should be_displayed
f("##{date_holder_id} #event_assignment_#{second_assignment.id}").should_not be_displayed
end
end
context "student view" do
before (:each) do
course_with_student_logged_in
end
it "should not show students the description of an assignment that is locked" do
assignment = @course.assignments.create(:name => "locked assignment", :description => "this is secret", :due_at => Time.now + 2.days, :unlock_at => Time.now + 1.day)
assignment.locked_for?(@user).should_not be_nil
go_to_calendar
f("#event_assignment_#{assignment.id}").click
wait_for_ajax_requests
details = f("#event_details")
details.find_element(:css, ".description").should_not include_text('secret')
details.find_element(:css, ".lock_explanation").should include_text("This assignment is locked")
end
it "should allow flipping through months" do
go_to_calendar
month_name = f(".calendar_month .month_name").text
f(".calendar_month .prev_month_link").click
wait_for_ajax_requests
f(".calendar_month .month_name").text.should_not == month_name
f(".calendar_month .next_month_link").click
wait_for_ajax_requests
f(".calendar_month .next_month_link").click
wait_for_ajax_requests
f(".calendar_month .month_name").text.should_not == month_name
end
it "should navigate the mini calendar" do
go_to_calendar
current_month_name = f('.mini-cal-month-and-year .month_name').text
f('.mini-cal-header .next_month_link').click
f('.mini-cal-month-and-year .month_name').text.should_not == current_month_name
f('.mini-cal-header .prev_month_link').click
fj('.mini-cal-month-and-year .month_name').text.should == current_month_name #fj to avoid selenium caching
end
it "should navigate the main calendar when the mini calendar is navigated" do
go_to_calendar
f('.mini-cal-header .next_month_link').click
ff('.mini_calendar_day .day_number')[10].click
keep_trying_until { f('.calendar_month .month_name').text.should == f('.mini-cal-month-and-year .month_name').text }
end
it "should open an event dialog on calendar from URL" do
event_title = 'Test Event 123'
start_time = 3.months.ago
end_time = start_time + 1.hour
calendar_event_model(:title => event_title, :start_at => start_time, :end_at => end_time)
get "/calendar?event_id=#{@event.id}&include_contexts=course_#{@course.id}"
wait_for_ajax_requests
keep_trying_until { fj(".ui-dialog").should be_displayed } #using fj to bypass selenium cache
fj(".ui-dialog .title").text.should == event_title
end
end
end
spec: pending should delete an event in cal1
Change-Id: I6ec451fe6807da26cfacc4b0e9b41ad47267dc2b
Reviewed-on: https://gerrit.instructure.com/27311
Product-Review: Kyle Rosenbaum <21fbfd7cc2c11bcaa808c4b1a250e52398b19668@instructure.com>
Reviewed-by: Kyle Rosenbaum <21fbfd7cc2c11bcaa808c4b1a250e52398b19668@instructure.com>
QA-Review: Kyle Rosenbaum <21fbfd7cc2c11bcaa808c4b1a250e52398b19668@instructure.com>
Tested-by: Jenkins <d95b56ce41a2e1ac4cecdd398defd7414407cc08@instructure.com>
require File.expand_path(File.dirname(__FILE__) + '/common')
describe "calendar" do
it_should_behave_like "in-process server selenium tests"
def go_to_calendar
get "/calendar"
wait_for_ajaximations
end
context "teacher view" do
before (:each) do
course_with_teacher_logged_in
end
it "should create an event" do
new_event_name = 'new event'
go_to_calendar
f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day").click
replace_content(f('#calendar_event_title'), new_event_name)
submit_form('#edit_calendar_event_form')
wait_for_ajaximations
f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day").should include_text(new_event_name)
CalendarEvent.find_by_title(new_event_name).should be_present
end
it "should edit an event" do
edit_name = 'edited cal title'
calendar_event_model(:title => "new event", :start_at => Time.now)
go_to_calendar
f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day .calendar_event").click
f('.edit_event_link').click
replace_content(f('#calendar_event_title'), edit_name)
submit_form('#edit_calendar_event_form')
wait_for_ajaximations
CalendarEvent.find_by_title(edit_name).should be_present
end
it "should delete an event" do
pending('js failure when calendar event is deleted')
event_title = 'new event'
calendar_event_model(:title => event_title, :start_at => Time.now)
go_to_calendar
f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day .calendar_event").click
f('.delete_event_link').click
driver.switch_to.alert.should_not be nil
accept_alert
wait_for_ajax_requests
keep_trying_until do
f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day").should_not include_text(event_title)
CalendarEvent.find_by_title(event_title).workflow_state.should == 'deleted'
end
end
it "should view the full details of an event" do
event_title = 'new event'
calendar_event_model(:title => event_title, :start_at => Time.now)
go_to_calendar
f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day .calendar_event").click
expect_new_page_load { f('.view_event_link').click }
f('#full_calendar_event .title').should include_text(event_title)
end
it "should drag and drop an event" do
pending('drag and drop not working correctly')
event_title = 'new event'
calendar_event_model(:title => event_title, :start_at => Time.now)
go_to_calendar
event_date = f("##{Time.now.strftime("day_%Y_%m_%d")}").attribute(:id)
parsed_event_date = Date.parse(event_date[4..13].gsub('_', '/'))
driver.action.drag_and_drop_by(f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day .calendar_event"), f("##{(parsed_event_date - 1.day).strftime("day_%Y_%m_%d")} .calendar_day")).perform
wait_for_ajaximations
f("##{(parsed_event_date - 1.day).strftime("day_%Y_%m_%d")} .calendar_day").should include_text(event_title)
end
it "should view undated events" do
event_title = 'undated event'
calendar_event_model(:title => event_title)
go_to_calendar
undated_link = f('.show_undated_link')
keep_trying_until { undated_link.text.should == 'show 1 undated events' }
undated_link.click
wait_for_ajaximations
f('.calendar_undated').should include_text(event_title)
end
it "should validate the ical feed display" do
go_to_calendar
f('.calendar_feed_link').click
f('#calendar_feed_box').should be_displayed
end
it "should show the wiki sidebar when looking at the full event page" do
go_to_calendar
f("##{Time.now.strftime("day_%Y_%m_%d")} .calendar_day").click
click_option('#edit_calendar_event_form .context_id', "course_#{@course.id}", :value)
expect_new_page_load { f("#edit_calendar_event_form .more_options_link").click }
keep_trying_until { f("#editor_tabs").should be_displayed }
end
it "should only display events for selected course" do
@course.name = 'first course'
@course.save!
due_date = Time.now.utc
first_assignment = @course.assignments.create(:name => 'first assignment', :due_at => due_date)
first_course = @course
student = @user
second_course = course_model({:name => 'second course'})
second_course.offer!
enrollment = second_course.enroll_student(student)
enrollment.workflow_state = 'active'
enrollment.save!
second_course.reload
second_assignment = second_course.assignments.create(:name => 'second assignment', :due_at => due_date)
go_to_calendar
#verify both assignments are visible
unless is_checked("#group_course_#{first_course.id}")
f("#group_course_#{first_course.id}").click
end
unless is_checked("#group_course_#{second_course.id}")
f("#group_course_#{second_course.id}").click
end
date_holder_id = "day_#{due_date.year}_#{due_date.strftime('%m')}_#{due_date.strftime('%d')}"
f("##{date_holder_id} #event_assignment_#{first_assignment.id}").should be_displayed
f("##{date_holder_id} #event_assignment_#{second_assignment.id}").should be_displayed
#verify first assignment is visible and not the second
f("#group_course_#{second_course.id}").click
f("##{date_holder_id} #event_assignment_#{first_assignment.id}").should be_displayed
f("##{date_holder_id} #event_assignment_#{second_assignment.id}").should_not be_displayed
end
end
context "student view" do
before (:each) do
course_with_student_logged_in
end
it "should not show students the description of an assignment that is locked" do
assignment = @course.assignments.create(:name => "locked assignment", :description => "this is secret", :due_at => Time.now + 2.days, :unlock_at => Time.now + 1.day)
assignment.locked_for?(@user).should_not be_nil
go_to_calendar
f("#event_assignment_#{assignment.id}").click
wait_for_ajax_requests
details = f("#event_details")
details.find_element(:css, ".description").should_not include_text('secret')
details.find_element(:css, ".lock_explanation").should include_text("This assignment is locked")
end
it "should allow flipping through months" do
go_to_calendar
month_name = f(".calendar_month .month_name").text
f(".calendar_month .prev_month_link").click
wait_for_ajax_requests
f(".calendar_month .month_name").text.should_not == month_name
f(".calendar_month .next_month_link").click
wait_for_ajax_requests
f(".calendar_month .next_month_link").click
wait_for_ajax_requests
f(".calendar_month .month_name").text.should_not == month_name
end
it "should navigate the mini calendar" do
go_to_calendar
current_month_name = f('.mini-cal-month-and-year .month_name').text
f('.mini-cal-header .next_month_link').click
f('.mini-cal-month-and-year .month_name').text.should_not == current_month_name
f('.mini-cal-header .prev_month_link').click
fj('.mini-cal-month-and-year .month_name').text.should == current_month_name #fj to avoid selenium caching
end
it "should navigate the main calendar when the mini calendar is navigated" do
go_to_calendar
f('.mini-cal-header .next_month_link').click
ff('.mini_calendar_day .day_number')[10].click
keep_trying_until { f('.calendar_month .month_name').text.should == f('.mini-cal-month-and-year .month_name').text }
end
it "should open an event dialog on calendar from URL" do
event_title = 'Test Event 123'
start_time = 3.months.ago
end_time = start_time + 1.hour
calendar_event_model(:title => event_title, :start_at => start_time, :end_at => end_time)
get "/calendar?event_id=#{@event.id}&include_contexts=course_#{@course.id}"
wait_for_ajax_requests
keep_trying_until { fj(".ui-dialog").should be_displayed } #using fj to bypass selenium cache
fj(".ui-dialog .title").text.should == event_title
end
end
end
|
# Extensions namespace
module Middleman
module Extensions
# Automatic Image Sizes extension
module AutomaticImageSizes
# Setup extension
class << self
# Once registered
def registered(app)
# Include 3rd-party fastimage library
require "middleman-more/extensions/automatic_image_sizes/fastimage"
# Include methods
app.send :include, InstanceMethods
end
alias :included :registered
end
# Automatic Image Sizes Instance Methods
module InstanceMethods
# Override default image_tag helper to automatically calculate and include
# image dimensions.
#
# @param [String] path
# @param [Hash] params
# @return [String]
def image_tag(path, params={})
if !params.has_key?(:width) && !params.has_key?(:height) && !path.include?("://")
params[:alt] ||= ""
http_prefix = http_images_path rescue images_dir
begin
real_path = File.join(source, images_dir, path)
full_path = File.expand_path(real_path, root)
http_prefix = http_images_path rescue images_dir
if File.exists? full_path
dimensions = ::FastImage.size(full_path, :raise_on_failure => true)
params[:width] = dimensions[0]
params[:height] = dimensions[1]
end
rescue
# $stderr.puts params.inspect
end
end
super(path, params)
end
end
end
end
end
Make :automatic_image_sizes work for absolute image paths
# Extensions namespace
module Middleman
module Extensions
# Automatic Image Sizes extension
module AutomaticImageSizes
# Setup extension
class << self
# Once registered
def registered(app)
# Include 3rd-party fastimage library
require "middleman-more/extensions/automatic_image_sizes/fastimage"
# Include methods
app.send :include, InstanceMethods
end
alias :included :registered
end
# Automatic Image Sizes Instance Methods
module InstanceMethods
# Override default image_tag helper to automatically calculate and include
# image dimensions.
#
# @param [String] path
# @param [Hash] params
# @return [String]
def image_tag(path, params={})
if !params.has_key?(:width) && !params.has_key?(:height) && !path.include?("://")
params[:alt] ||= ""
real_path = path
real_path = File.join(images_dir, real_path) unless real_path =~ %r{^/}
full_path = File.join(source_dir, real_path)
if File.exists? full_path
begin
width, height = ::FastImage.size(full_path, :raise_on_failure => true)
params[:width] = width
params[:height] = height
rescue
warn "Couldn't determine dimensions for image #{path}: #{$!.message}"
end
end
end
super(path, params)
end
end
end
end
end
|
require 'formula'
class HerokuToolbelt < Formula
homepage 'https://toolbelt.heroku.com/other'
url 'http://assets.heroku.com.s3.amazonaws.com/heroku-client/heroku-client-2.33.5.tgz'
sha1 'da42fd5c23b54bc5e8239e9b4b0beb8524dc4d5f'
def install
libexec.install Dir["*"]
(bin/'heroku').write <<-EOS.undent
#!/usr/bin/env sh
exec "#{libexec}/bin/heroku" "$@"
EOS
end
def test
system "#{bin}/heroku", "version"
end
end
heroku-toolbelt 2.34.0
Closes #17240.
Signed-off-by: Adam Vandenberg <34c2b6407fd5a10249a15d699d40f9ed1782e98c@gmail.com>
require 'formula'
class HerokuToolbelt < Formula
homepage 'https://toolbelt.heroku.com/other'
url 'http://assets.heroku.com.s3.amazonaws.com/heroku-client/heroku-client-2.34.0.tgz'
sha1 'cc2616af9940516f3c265a27e740ae7b3c36eef8'
def install
libexec.install Dir["*"]
(bin/'heroku').write <<-EOS.undent
#!/usr/bin/env sh
exec "#{libexec}/bin/heroku" "$@"
EOS
end
def test
system "#{bin}/heroku", "version"
end
end
|
jsonschema2pojo 0.4.13 (new formula)
Closes #41949.
Signed-off-by: Baptiste Fontaine <bfee279af59f3e3f71f7ce1fa037ea7b90f93cbf@yahoo.fr>
class Jsonschema2pojo < Formula
desc "Generates Java types from JSON Schema (or example JSON)"
homepage "http://www.jsonschema2pojo.org/"
url "https://github.com/joelittlejohn/jsonschema2pojo/releases/download/jsonschema2pojo-0.4.13/jsonschema2pojo-0.4.13.tar.gz"
sha256 "b7002d929645dbadd6367ff2ac8a69bb0978538d4ad4f46a195d645b5d341d21"
depends_on :java => "1.6+"
def install
libexec.install %w[jsonschema2pojo-cli-0.4.13.jar lib]
bin.write_jar_script libexec/"jsonschema2pojo-cli-0.4.13.jar", "jsonschema2pojo"
end
test do
json = <<-EOS.undent.chomp
{
"type":"object",
"properties": {
"foo": {
"type": "string"
},
"bar": {
"type": "integer"
},
"baz": {
"type": "boolean"
}
}
}
EOS
(testpath/"src/jsonschema.json").write json
system "#{bin}/jsonschema2pojo", "-s", testpath/"src", "-t", testpath/"out"
assert (testpath/"out/Jsonschema.java").exist?
end
end
|
TAP_MIGRATIONS = {
"adobe-air-sdk" => "homebrew/binary",
"afuse" => "homebrew/fuse",
"aimage" => "homebrew/boneyard",
"aplus" => "homebrew/boneyard",
"apple-gcc42" => "homebrew/dupes",
"appswitch" => "homebrew/binary",
"archivemount" => "homebrew/fuse",
"atari++" => "homebrew/x11",
"auctex" => "homebrew/tex",
"authexec" => "homebrew/boneyard",
"avfs" => "homebrew/fuse",
"aws-iam-tools" => "homebrew/boneyard",
"awsenv" => "Luzifer/tools",
"bbcp" => "homebrew/head-only",
"bcwipe" => "homebrew/boneyard",
"bindfs" => "homebrew/fuse",
"blackbox" => "homebrew/boneyard",
"bochs" => "homebrew/x11",
"boost149" => "homebrew/versions",
"cantera" => "homebrew/science",
"cardpeek" => "homebrew/x11",
"catdoc" => "homebrew/boneyard",
"cdf" => "homebrew/boneyard",
"cdimgtools" => "homebrew/boneyard",
"celt" => "homebrew/boneyard",
"chktex" => "homebrew/tex",
"clam" => "homebrew/boneyard",
"clay" => "homebrew/boneyard",
"cloudfoundry-cli" => "pivotal/tap",
"clusterit" => "homebrew/x11",
"cmucl" => "homebrew/binary",
"comparepdf" => "homebrew/boneyard",
"connect" => "homebrew/boneyard",
"coremod" => "homebrew/boneyard",
"curlftpfs" => "homebrew/x11",
"cwm" => "homebrew/x11",
"dart" => "dart-lang/dart",
"datamash" => "homebrew/science",
"dbslayer" => "homebrew/boneyard",
"ddd" => "homebrew/x11",
"denyhosts" => "homebrew/boneyard",
"dgtal" => "homebrew/science",
"djmount" => "homebrew/fuse",
"dmenu" => "homebrew/x11",
"dotwrp" => "homebrew/science",
"drizzle" => "homebrew/boneyard",
"drush" => "homebrew/php",
"dsniff" => "homebrew/boneyard",
"dupx" => "homebrew/boneyard",
"dwm" => "homebrew/x11",
"dzen2" => "homebrew/x11",
"easy-tag" => "homebrew/x11",
"echoping" => "homebrew/boneyard",
"electric-fence" => "homebrew/boneyard",
"encfs" => "homebrew/fuse",
"ext2fuse" => "homebrew/fuse",
"ext4fuse" => "homebrew/fuse",
"fceux" => "homebrew/games",
"feh" => "homebrew/x11",
"ffts" => "homebrew/boneyard",
"figtoipe" => "homebrew/head-only",
"fox" => "homebrew/x11",
"freeglut" => "homebrew/x11",
"freerdp" => "homebrew/x11",
"fsv" => "homebrew/boneyard",
"fuse-zip" => "homebrew/fuse",
"fuse4x" => "homebrew/fuse",
"fuse4x-kext" => "homebrew/fuse",
"gant" => "homebrew/boneyard",
"gcsfuse" => "homebrew/fuse",
"gdrive" => "homebrew/boneyard",
"geany" => "homebrew/x11",
"geda-gaf" => "homebrew/x11",
"geeqie" => "homebrew/x11",
"geomview" => "homebrew/x11",
"gerbv" => "homebrew/x11",
"ggobi" => "homebrew/x11",
"giblib" => "homebrew/x11",
"git-encrypt" => "homebrew/boneyard",
"git-flow-clone" => "homebrew/boneyard",
"git-latexdiff" => "homebrew/tex",
"gitfs" => "homebrew/fuse",
"glade" => "homebrew/x11",
"gle" => "homebrew/x11",
"gnumeric" => "homebrew/x11",
"gnunet" => "homebrew/boneyard",
"gobby" => "homebrew/x11",
"googlecl" => "homebrew/boneyard",
"gpredict" => "homebrew/x11",
"gptfdisk" => "homebrew/boneyard",
"grace" => "homebrew/x11",
"grads" => "homebrew/binary",
"graylog2-server" => "homebrew/boneyard",
"gromacs" => "homebrew/science",
"gsmartcontrol" => "homebrew/x11",
"gtk-chtheme" => "homebrew/x11",
"gtkglarea" => "homebrew/boneyard",
"gtksourceviewmm" => "homebrew/x11",
"gtksourceviewmm3" => "homebrew/x11",
"gtkwave" => "homebrew/x11",
"guilt" => "homebrew/boneyard",
"gv" => "homebrew/x11",
"hatari" => "homebrew/x11",
"helios" => "spotify/public",
"hexchat" => "homebrew/x11",
"hllib" => "homebrew/boneyard",
"honeyd" => "homebrew/boneyard",
"hugs98" => "homebrew/boneyard",
"hwloc" => "homebrew/science",
"ifuse" => "homebrew/fuse",
"imake" => "homebrew/x11",
"inkscape" => "homebrew/x11",
"iojs" => "homebrew/versions",
"ipe" => "homebrew/boneyard",
"ipopt" => "homebrew/science",
"iptux" => "homebrew/x11",
"itsol" => "homebrew/science",
"iulib" => "homebrew/boneyard",
"jscoverage" => "homebrew/boneyard",
"jsl" => "homebrew/binary",
"jstalk" => "homebrew/boneyard",
"justniffer" => "homebrew/boneyard",
"kbtin" => "homebrew/boneyard",
"kerl" => "homebrew/head-only",
"kernagic" => "homebrew/x11",
"kismet" => "homebrew/boneyard",
"klavaro" => "homebrew/x11",
"kumofs" => "homebrew/boneyard",
"latex-mk" => "homebrew/tex",
"libdlna" => "homebrew/boneyard",
"libgtextutils" => "homebrew/science",
"libqxt" => "homebrew/boneyard",
"librets" => "homebrew/boneyard",
"libspotify" => "homebrew/binary",
"lilypond" => "homebrew/tex",
"lmutil" => "homebrew/binary",
"magit" => "homebrew/emacs",
"meld" => "homebrew/x11",
"mesalib-glw" => "homebrew/x11",
"mess" => "homebrew/games",
"metalua" => "homebrew/boneyard",
"mit-scheme" => "homebrew/x11",
"mlkit" => "homebrew/boneyard",
"mlton" => "homebrew/boneyard",
"morse" => "homebrew/x11",
"mp3fs" => "homebrew/fuse",
"mpio" => "homebrew/boneyard",
"mscgen" => "homebrew/x11",
"msgpack-rpc" => "homebrew/boneyard",
"mupdf" => "homebrew/x11",
"mysql-connector-odbc" => "homebrew/boneyard",
"mysql-proxy" => "homebrew/boneyard",
"mysqlreport" => "homebrew/boneyard",
"net6" => "homebrew/boneyard",
"newick-utils" => "homebrew/science",
"nlopt" => "homebrew/science",
"ntfs-3g" => "homebrew/fuse",
"octave" => "homebrew/science",
"opencv" => "homebrew/science",
"openfst" => "homebrew/science",
"opengrm-ngram" => "homebrew/science",
"ori" => "homebrew/fuse",
"p11-kit" => "homebrew/boneyard",
"pan" => "homebrew/boneyard",
"paq8px" => "homebrew/boneyard",
"par2tbb" => "homebrew/boneyard",
"pari" => "homebrew/x11",
"pathfinder" => "homebrew/boneyard",
"pcb" => "homebrew/x11",
"pdf-tools" => "homebrew/emacs",
"pdf2image" => "homebrew/x11",
"pdfjam" => "homebrew/tex",
"pdftoipe" => "homebrew/head-only",
"pebble-sdk" => "pebble/pebble-sdk",
"pgplot" => "homebrew/x11",
"pixie" => "homebrew/x11",
"pjsip" => "homebrew/boneyard",
"pocl" => "homebrew/science",
"pplatex" => "homebrew/tex",
"prooftree" => "homebrew/x11",
"pulse" => "homebrew/boneyard",
"pyenv-pip-rehash" => "homebrew/boneyard",
"pyxplot" => "homebrew/x11",
"qfits" => "homebrew/boneyard",
"qi" => "homebrew/boneyard",
"qiv" => "homebrew/boneyard",
"qrupdate" => "homebrew/science",
"rdesktop" => "homebrew/x11",
"rocket" => "homebrew/boneyard",
"rofs-filtered" => "homebrew/fuse",
"rtags" => "homebrew/boneyard",
"rxvt-unicode" => "homebrew/x11",
"s3-backer" => "homebrew/fuse",
"s3fs" => "homebrew/fuse",
"salt" => "homebrew/science",
"scantailor" => "homebrew/x11",
"sdelta3" => "homebrew/boneyard",
"sedna" => "homebrew/boneyard",
"shark" => "homebrew/science",
"shell.fm" => "homebrew/boneyard",
"simple-mtpfs" => "homebrew/fuse",
"sitecopy" => "homebrew/boneyard",
"slicot" => "homebrew/science",
"smartsim" => "homebrew/x11",
"solfege" => "homebrew/boneyard",
"sptk" => "homebrew/x11",
"sshfs" => "homebrew/fuse",
"stormfs" => "homebrew/fuse",
"sundials" => "homebrew/science",
"swi-prolog" => "homebrew/x11",
"sxiv" => "homebrew/x11",
"sylpheed" => "homebrew/x11",
"syslog-ng" => "homebrew/boneyard",
"tabbed" => "homebrew/x11",
"terminator" => "homebrew/x11",
"tetgen" => "homebrew/science",
"texmacs" => "homebrew/boneyard",
"texwrapper" => "homebrew/tex",
"ticcutils" => "homebrew/science",
"tiger-vnc" => "homebrew/x11",
"timbl" => "homebrew/science",
"tmap" => "homebrew/boneyard",
"transmission-remote-gtk" => "homebrew/x11",
"tup" => "homebrew/fuse",
"uim" => "homebrew/x11",
"ume" => "homebrew/games",
"upnp-router-control" => "homebrew/x11",
"urweb" => "homebrew/boneyard",
"ushare" => "homebrew/boneyard",
"viewglob" => "homebrew/boneyard",
"vobcopy" => "homebrew/boneyard",
"wdfs" => "homebrew/fuse",
"whereami" => "homebrew/boneyard",
"wkhtmltopdf" => "homebrew/boneyard",
"wmctrl" => "homebrew/x11",
"wopr" => "homebrew/science",
"wps2odt" => "homebrew/boneyard",
"x3270" => "homebrew/x11",
"xar" => "homebrew/dupes",
"xastir" => "homebrew/x11",
"xchat" => "homebrew/x11",
"xclip" => "homebrew/x11",
"xdotool" => "homebrew/x11",
"xdu" => "homebrew/x11",
"xmount" => "homebrew/fuse",
"xournal" => "homebrew/x11",
"xpa" => "homebrew/x11",
"xpdf" => "homebrew/x11",
"xplot" => "homebrew/x11",
"xspringies" => "homebrew/x11",
"yarp" => "homebrew/x11",
"ydict" => "homebrew/boneyard",
"zenity" => "homebrew/x11",
}
rtags: 2.1, back from boneyard
This reverts commit 5bcc7b1f3fd4992e67736bb3cd97141e82c94685 and updates
rtags to version 2.1. It had been moved to the boneyard because the tag
for version 2 had been deleted.
Closes Homebrew/homebrew#48800.
Signed-off-by: Andrew Janke <02e0a999c50b1f88df7a8f5a04e1b76b35ea6a88@apjanke.net>
TAP_MIGRATIONS = {
"adobe-air-sdk" => "homebrew/binary",
"afuse" => "homebrew/fuse",
"aimage" => "homebrew/boneyard",
"aplus" => "homebrew/boneyard",
"apple-gcc42" => "homebrew/dupes",
"appswitch" => "homebrew/binary",
"archivemount" => "homebrew/fuse",
"atari++" => "homebrew/x11",
"auctex" => "homebrew/tex",
"authexec" => "homebrew/boneyard",
"avfs" => "homebrew/fuse",
"aws-iam-tools" => "homebrew/boneyard",
"awsenv" => "Luzifer/tools",
"bbcp" => "homebrew/head-only",
"bcwipe" => "homebrew/boneyard",
"bindfs" => "homebrew/fuse",
"blackbox" => "homebrew/boneyard",
"bochs" => "homebrew/x11",
"boost149" => "homebrew/versions",
"cantera" => "homebrew/science",
"cardpeek" => "homebrew/x11",
"catdoc" => "homebrew/boneyard",
"cdf" => "homebrew/boneyard",
"cdimgtools" => "homebrew/boneyard",
"celt" => "homebrew/boneyard",
"chktex" => "homebrew/tex",
"clam" => "homebrew/boneyard",
"clay" => "homebrew/boneyard",
"cloudfoundry-cli" => "pivotal/tap",
"clusterit" => "homebrew/x11",
"cmucl" => "homebrew/binary",
"comparepdf" => "homebrew/boneyard",
"connect" => "homebrew/boneyard",
"coremod" => "homebrew/boneyard",
"curlftpfs" => "homebrew/x11",
"cwm" => "homebrew/x11",
"dart" => "dart-lang/dart",
"datamash" => "homebrew/science",
"dbslayer" => "homebrew/boneyard",
"ddd" => "homebrew/x11",
"denyhosts" => "homebrew/boneyard",
"dgtal" => "homebrew/science",
"djmount" => "homebrew/fuse",
"dmenu" => "homebrew/x11",
"dotwrp" => "homebrew/science",
"drizzle" => "homebrew/boneyard",
"drush" => "homebrew/php",
"dsniff" => "homebrew/boneyard",
"dupx" => "homebrew/boneyard",
"dwm" => "homebrew/x11",
"dzen2" => "homebrew/x11",
"easy-tag" => "homebrew/x11",
"echoping" => "homebrew/boneyard",
"electric-fence" => "homebrew/boneyard",
"encfs" => "homebrew/fuse",
"ext2fuse" => "homebrew/fuse",
"ext4fuse" => "homebrew/fuse",
"fceux" => "homebrew/games",
"feh" => "homebrew/x11",
"ffts" => "homebrew/boneyard",
"figtoipe" => "homebrew/head-only",
"fox" => "homebrew/x11",
"freeglut" => "homebrew/x11",
"freerdp" => "homebrew/x11",
"fsv" => "homebrew/boneyard",
"fuse-zip" => "homebrew/fuse",
"fuse4x" => "homebrew/fuse",
"fuse4x-kext" => "homebrew/fuse",
"gant" => "homebrew/boneyard",
"gcsfuse" => "homebrew/fuse",
"gdrive" => "homebrew/boneyard",
"geany" => "homebrew/x11",
"geda-gaf" => "homebrew/x11",
"geeqie" => "homebrew/x11",
"geomview" => "homebrew/x11",
"gerbv" => "homebrew/x11",
"ggobi" => "homebrew/x11",
"giblib" => "homebrew/x11",
"git-encrypt" => "homebrew/boneyard",
"git-flow-clone" => "homebrew/boneyard",
"git-latexdiff" => "homebrew/tex",
"gitfs" => "homebrew/fuse",
"glade" => "homebrew/x11",
"gle" => "homebrew/x11",
"gnumeric" => "homebrew/x11",
"gnunet" => "homebrew/boneyard",
"gobby" => "homebrew/x11",
"googlecl" => "homebrew/boneyard",
"gpredict" => "homebrew/x11",
"gptfdisk" => "homebrew/boneyard",
"grace" => "homebrew/x11",
"grads" => "homebrew/binary",
"graylog2-server" => "homebrew/boneyard",
"gromacs" => "homebrew/science",
"gsmartcontrol" => "homebrew/x11",
"gtk-chtheme" => "homebrew/x11",
"gtkglarea" => "homebrew/boneyard",
"gtksourceviewmm" => "homebrew/x11",
"gtksourceviewmm3" => "homebrew/x11",
"gtkwave" => "homebrew/x11",
"guilt" => "homebrew/boneyard",
"gv" => "homebrew/x11",
"hatari" => "homebrew/x11",
"helios" => "spotify/public",
"hexchat" => "homebrew/x11",
"hllib" => "homebrew/boneyard",
"honeyd" => "homebrew/boneyard",
"hugs98" => "homebrew/boneyard",
"hwloc" => "homebrew/science",
"ifuse" => "homebrew/fuse",
"imake" => "homebrew/x11",
"inkscape" => "homebrew/x11",
"iojs" => "homebrew/versions",
"ipe" => "homebrew/boneyard",
"ipopt" => "homebrew/science",
"iptux" => "homebrew/x11",
"itsol" => "homebrew/science",
"iulib" => "homebrew/boneyard",
"jscoverage" => "homebrew/boneyard",
"jsl" => "homebrew/binary",
"jstalk" => "homebrew/boneyard",
"justniffer" => "homebrew/boneyard",
"kbtin" => "homebrew/boneyard",
"kerl" => "homebrew/head-only",
"kernagic" => "homebrew/x11",
"kismet" => "homebrew/boneyard",
"klavaro" => "homebrew/x11",
"kumofs" => "homebrew/boneyard",
"latex-mk" => "homebrew/tex",
"libdlna" => "homebrew/boneyard",
"libgtextutils" => "homebrew/science",
"libqxt" => "homebrew/boneyard",
"librets" => "homebrew/boneyard",
"libspotify" => "homebrew/binary",
"lilypond" => "homebrew/tex",
"lmutil" => "homebrew/binary",
"magit" => "homebrew/emacs",
"meld" => "homebrew/x11",
"mesalib-glw" => "homebrew/x11",
"mess" => "homebrew/games",
"metalua" => "homebrew/boneyard",
"mit-scheme" => "homebrew/x11",
"mlkit" => "homebrew/boneyard",
"mlton" => "homebrew/boneyard",
"morse" => "homebrew/x11",
"mp3fs" => "homebrew/fuse",
"mpio" => "homebrew/boneyard",
"mscgen" => "homebrew/x11",
"msgpack-rpc" => "homebrew/boneyard",
"mupdf" => "homebrew/x11",
"mysql-connector-odbc" => "homebrew/boneyard",
"mysql-proxy" => "homebrew/boneyard",
"mysqlreport" => "homebrew/boneyard",
"net6" => "homebrew/boneyard",
"newick-utils" => "homebrew/science",
"nlopt" => "homebrew/science",
"ntfs-3g" => "homebrew/fuse",
"octave" => "homebrew/science",
"opencv" => "homebrew/science",
"openfst" => "homebrew/science",
"opengrm-ngram" => "homebrew/science",
"ori" => "homebrew/fuse",
"p11-kit" => "homebrew/boneyard",
"pan" => "homebrew/boneyard",
"paq8px" => "homebrew/boneyard",
"par2tbb" => "homebrew/boneyard",
"pari" => "homebrew/x11",
"pathfinder" => "homebrew/boneyard",
"pcb" => "homebrew/x11",
"pdf-tools" => "homebrew/emacs",
"pdf2image" => "homebrew/x11",
"pdfjam" => "homebrew/tex",
"pdftoipe" => "homebrew/head-only",
"pebble-sdk" => "pebble/pebble-sdk",
"pgplot" => "homebrew/x11",
"pixie" => "homebrew/x11",
"pjsip" => "homebrew/boneyard",
"pocl" => "homebrew/science",
"pplatex" => "homebrew/tex",
"prooftree" => "homebrew/x11",
"pulse" => "homebrew/boneyard",
"pyenv-pip-rehash" => "homebrew/boneyard",
"pyxplot" => "homebrew/x11",
"qfits" => "homebrew/boneyard",
"qi" => "homebrew/boneyard",
"qiv" => "homebrew/boneyard",
"qrupdate" => "homebrew/science",
"rdesktop" => "homebrew/x11",
"rocket" => "homebrew/boneyard",
"rofs-filtered" => "homebrew/fuse",
"rxvt-unicode" => "homebrew/x11",
"s3-backer" => "homebrew/fuse",
"s3fs" => "homebrew/fuse",
"salt" => "homebrew/science",
"scantailor" => "homebrew/x11",
"sdelta3" => "homebrew/boneyard",
"sedna" => "homebrew/boneyard",
"shark" => "homebrew/science",
"shell.fm" => "homebrew/boneyard",
"simple-mtpfs" => "homebrew/fuse",
"sitecopy" => "homebrew/boneyard",
"slicot" => "homebrew/science",
"smartsim" => "homebrew/x11",
"solfege" => "homebrew/boneyard",
"sptk" => "homebrew/x11",
"sshfs" => "homebrew/fuse",
"stormfs" => "homebrew/fuse",
"sundials" => "homebrew/science",
"swi-prolog" => "homebrew/x11",
"sxiv" => "homebrew/x11",
"sylpheed" => "homebrew/x11",
"syslog-ng" => "homebrew/boneyard",
"tabbed" => "homebrew/x11",
"terminator" => "homebrew/x11",
"tetgen" => "homebrew/science",
"texmacs" => "homebrew/boneyard",
"texwrapper" => "homebrew/tex",
"ticcutils" => "homebrew/science",
"tiger-vnc" => "homebrew/x11",
"timbl" => "homebrew/science",
"tmap" => "homebrew/boneyard",
"transmission-remote-gtk" => "homebrew/x11",
"tup" => "homebrew/fuse",
"uim" => "homebrew/x11",
"ume" => "homebrew/games",
"upnp-router-control" => "homebrew/x11",
"urweb" => "homebrew/boneyard",
"ushare" => "homebrew/boneyard",
"viewglob" => "homebrew/boneyard",
"vobcopy" => "homebrew/boneyard",
"wdfs" => "homebrew/fuse",
"whereami" => "homebrew/boneyard",
"wkhtmltopdf" => "homebrew/boneyard",
"wmctrl" => "homebrew/x11",
"wopr" => "homebrew/science",
"wps2odt" => "homebrew/boneyard",
"x3270" => "homebrew/x11",
"xar" => "homebrew/dupes",
"xastir" => "homebrew/x11",
"xchat" => "homebrew/x11",
"xclip" => "homebrew/x11",
"xdotool" => "homebrew/x11",
"xdu" => "homebrew/x11",
"xmount" => "homebrew/fuse",
"xournal" => "homebrew/x11",
"xpa" => "homebrew/x11",
"xpdf" => "homebrew/x11",
"xplot" => "homebrew/x11",
"xspringies" => "homebrew/x11",
"yarp" => "homebrew/x11",
"ydict" => "homebrew/boneyard",
"zenity" => "homebrew/x11",
}
|
require 'beaker-rspec'
require 'beaker-puppet'
require 'beaker/puppet_install_helper'
require 'beaker/module_install_helper'
require 'pry'
install_puppet_agent_on(hosts)
RSpec.configure do |c|
# Project root
proj_root = File.expand_path(File.join(File.dirname(__FILE__), '..'))
# Readable test descriptions
c.formatter = :documentation
# Configure all nodes in nodeset
c.before :suite do
# Install module
puppet_module_install(source: proj_root, module_name: 'virtualbox')
hosts.each do |host|
on host, puppet('module', 'install', 'puppetlabs-stdlib'), acceptable_exit_codes: [0, 1]
on host, puppet('module', 'install', 'puppetlabs-apt'), acceptable_exit_codes: [0, 1]
on host, puppet('module', 'install', 'stahnma-epel'), acceptable_exit_codes: [0, 1]
on host, puppet('module', 'install', 'darin-zypprepo'), acceptable_exit_codes: [0, 1]
end
end
end
Clean up acceptance spec helper
require 'beaker-rspec'
require 'beaker-puppet'
require 'beaker/puppet_install_helper'
require 'beaker/module_install_helper'
run_puppet_install_helper unless ENV['BEAKER_provision'] == 'no'
RSpec.configure do |c|
# Readable test descriptions
c.formatter = :documentation
# Configure all nodes in nodeset
c.before :suite do
install_module
install_module_dependencies
end
end
|
require 'beaker-rspec'
hosts.each do |host|
# Install Puppet
install_puppet()
# Install ruby-augeas
case fact('osfamily')
when 'Debian'
install_package host, 'libaugeas-ruby'
when 'RedHat'
install_package host, 'net-tools'
install_package host, 'gcc'
install_package host, 'ruby-devel'
install_package host, 'augeas-devel'
on host, 'gem install ruby-augeas --no-ri --no-rdoc'
on host, 'yum makecache'
else
puts 'Sorry, this osfamily is not supported.'
exit
end
end
RSpec.configure do |c|
# Project root
proj_root = File.expand_path(File.join(File.dirname(__FILE__), '..'))
# Readable test descriptions
c.formatter = :documentation
# Configure all nodes in nodeset
c.before :suite do
# Install module and dependencies
puppet_module_install(:source => proj_root, :module_name => 'tomcat')
hosts.each do |host|
on host, puppet('module','install','camptocamp-archive'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module','install','camptocamp-systemd'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module','install','herculesteam-augeasproviders_core'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module','install','herculesteam-augeasproviders_shellvar'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module','install','puppetlabs-inifile'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module','install','puppetlabs-stdlib'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module','install','theforeman-concat_native'), { :acceptable_exit_codes => [0,1] }
end
end
end
Fix for acceptance tests
require 'beaker-rspec'
hosts.each do |host|
# Install Puppet
install_puppet()
# Install ruby-augeas
case fact('osfamily')
when 'Debian'
install_package host, 'libaugeas-ruby'
when 'RedHat'
install_package host, 'net-tools'
install_package host, 'gcc'
install_package host, 'ruby-devel'
install_package host, 'augeas-devel'
on host, 'gem install ruby-augeas --no-ri --no-rdoc'
on host, 'yum makecache'
else
puts 'Sorry, this osfamily is not supported.'
exit
end
end
RSpec.configure do |c|
# Project root
proj_root = File.expand_path(File.join(File.dirname(__FILE__), '..'))
# Readable test descriptions
c.formatter = :documentation
# Configure all nodes in nodeset
c.before :suite do
# Install module and dependencies
puppet_module_install(:source => proj_root, :module_name => 'tomcat')
hosts.each do |host|
on host, puppet('module','install','camptocamp-archive'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module','install','camptocamp-systemd'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module','install','herculesteam-augeasproviders_core'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module','install','herculesteam-augeasproviders_shellvar'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module','install','puppetlabs-concat'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module','install','puppetlabs-inifile'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module','install','puppetlabs-stdlib'), { :acceptable_exit_codes => [0,1] }
end
end
end
|
# rubocop:disable Style/FileName
require 'capybara/email/rspec'
Cleanup
require 'capybara/email/rspec'
|
RSpec.shared_context 'an error' do
it do
expect { subject.call }.to raise_error do |raised|
expect(raised).to be_a(described_class)
expect(raised.message).to eql(message)
end
end
end
Upgrade assertion level
RSpec.shared_context 'an error' do
it do
expect { subject.call }.to raise_error do |raised|
expect(raised).to be_an_instance_of(described_class)
expect(raised.message).to eql(message)
end
end
end
|
class CheckMatcher
def initialize(options)
@expected_status = options[:status]
@expected_message = options[:message]
end
def matches?(check)
actual_status, actual_message = [check.run_check].flatten
status_matches?(actual_status) && message_matches?(actual_message)
end
def status_matches?(actual_status)
@expected_status.nil? || actual_status == @expected_status
end
def message_matches?(actual_message)
return true if @expected_message.nil?
@expected_message.is_a?(Regexp) ?
actual_message =~ @expected_message :
actual_message == @expected_message
end
def failure_message(actual, error_message)
actual_status, actual_message = [actual].flatten
if status_matches?(actual_status)
error_message <<
" with message matching #{@expected_message.inspect}," <<
" but was #{actual_message.inspect}"
end
error_message
end
end
# Can't call this `fail` since that is a reserved word in Ruby
RSpec::Matchers.define :fail_check do |message|
check_matcher = CheckMatcher.new(:status => :bad, :message => message)
match do |check|
check_matcher.matches?(actual)
end
failure_message_for_should do
check_matcher.failure_message(
check.run_check,
'expected that the check would fail'
)
end
failure_message_for_should_not do
'expected that the check would not fail'
end
description { 'fail the check' }
end
RSpec::Matchers.define :stop do |message|
check_matcher = CheckMatcher.new(:status => :stop, :message => message)
match do |check|
check_matcher.matches?(actual)
end
failure_message_for_should do
check_matcher.failure_message(
check.run_check,
'expected that the check would fail and halt further checking'
)
end
failure_message_for_should_not do
'expected that the check would not fail with a stopping error'
end
description { 'fail and halt further checking' }
end
RSpec::Matchers.define :pass do |message|
check_matcher = CheckMatcher.new(:status => :good, :message => message)
match do |check|
check_matcher.matches?(actual)
end
failure_message_for_should do
check_matcher.failure_message(
check.run_check,
'expected that the check would pass'
)
end
failure_message_for_should_not do
'expected that the check would not pass'
end
description { 'pass the check' }
end
RSpec::Matchers.define :warn do |message|
check_matcher = CheckMatcher.new(:status => :warn, :message => message)
match do |check|
check_matcher.matches?(check)
end
failure_message_for_should do |check|
check_matcher.failure_message(
check.run_check,
'expected that the check would report a warning'
)
end
failure_message_for_should_not do
'expected that the check would not report a warning'
end
description { 'report a warning' }
end
Add missing parameter to block definitions
The custom matcher was missing a parameter in the block definitions for
the failure cases. This wasn't noticed because our test suite was
designed to pass, not fail (and thus this code would never get
executed).
Adding in the `check` parameter solves the problem.
Change-Id: I1b8a8c4164d50add9c4f44be65f78a64deccc093
Reviewed-on: https://gerrit.causes.com/25160
Tested-by: jenkins <d95b56ce41a2e1ac4cecdd398defd7414407cc08@causes.com>
Reviewed-by: Shane da Silva <6f6e68d1df92f30cb4b3ce35260ddf94a402f33d@causes.com>
class CheckMatcher
def initialize(options)
@expected_status = options[:status]
@expected_message = options[:message]
end
def matches?(check)
actual_status, actual_message = [check.run_check].flatten
status_matches?(actual_status) && message_matches?(actual_message)
end
def status_matches?(actual_status)
@expected_status.nil? || actual_status == @expected_status
end
def message_matches?(actual_message)
return true if @expected_message.nil?
@expected_message.is_a?(Regexp) ?
actual_message =~ @expected_message :
actual_message == @expected_message
end
def failure_message(actual, error_message)
actual_status, actual_message = [actual].flatten
if status_matches?(actual_status)
error_message <<
" with message matching #{@expected_message.inspect}," <<
" but was #{actual_message.inspect}"
end
error_message
end
end
# Can't call this `fail` since that is a reserved word in Ruby
RSpec::Matchers.define :fail_check do |message|
check_matcher = CheckMatcher.new(:status => :bad, :message => message)
match do |check|
check_matcher.matches?(actual)
end
failure_message_for_should do |check|
check_matcher.failure_message(
check.run_check,
'expected that the check would fail'
)
end
failure_message_for_should_not do
'expected that the check would not fail'
end
description { 'fail the check' }
end
RSpec::Matchers.define :stop do |message|
check_matcher = CheckMatcher.new(:status => :stop, :message => message)
match do |check|
check_matcher.matches?(actual)
end
failure_message_for_should do |check|
check_matcher.failure_message(
check.run_check,
'expected that the check would fail and halt further checking'
)
end
failure_message_for_should_not do
'expected that the check would not fail with a stopping error'
end
description { 'fail and halt further checking' }
end
RSpec::Matchers.define :pass do |message|
check_matcher = CheckMatcher.new(:status => :good, :message => message)
match do |check|
check_matcher.matches?(actual)
end
failure_message_for_should do |check|
check_matcher.failure_message(
check.run_check,
'expected that the check would pass'
)
end
failure_message_for_should_not do
'expected that the check would not pass'
end
description { 'pass the check' }
end
RSpec::Matchers.define :warn do |message|
check_matcher = CheckMatcher.new(:status => :warn, :message => message)
match do |check|
check_matcher.matches?(check)
end
failure_message_for_should do |check|
check_matcher.failure_message(
check.run_check,
'expected that the check would report a warning'
)
end
failure_message_for_should_not do
'expected that the check would not report a warning'
end
description { 'report a warning' }
end
|
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
describe Tangerine::Backlot do
authenticate!
describe Tangerine::Backlot::HTTP do
subject { Tangerine::Backlot::HTTP }
its(:base_uri) { should == 'http://api.ooyala.com/partner' }
end
describe Tangerine::Backlot::API do
context 'class methods' do
subject { Tangerine::Backlot::API }
let(:params) { {'some_key' => 'some_value'} }
# it { puts subject.get('/query', 'contentType' => 'MultiChannel').inspect }
describe '.get' do
let(:response) { mock }
let(:request) { '/some-request-path' }
before { Tangerine::Backlot::HTTP.should_receive(:get).with(request).and_return(response) }
it 'returns the HTTP response' do
subject.get(request, params).should == response
end
end
end
end
end
remove bogus params to fix test
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
describe Tangerine::Backlot do
authenticate!
describe Tangerine::Backlot::HTTP do
subject { Tangerine::Backlot::HTTP }
its(:base_uri) { should == 'http://api.ooyala.com/partner' }
end
describe Tangerine::Backlot::API do
context 'class methods' do
subject { Tangerine::Backlot::API }
describe '.get' do
let(:response) { mock }
let(:request) { '/some-request-path' }
it 'returns the HTTP response' do
Tangerine::Backlot::HTTP.should_receive(:get).with(request).and_return(response)
subject.get(request).should == response
end
end
end
end
end
|
require 'helper'
describe Toy::List do
uses_constants('Game', 'Move')
before do
@list = Game.embedded_list(:moves)
end
let(:list) { @list }
it "has model" do
list.model.should == Game
end
it "has name" do
list.name.should == :moves
end
it "has type" do
list.type.should == Move
end
it "has key" do
list.key.should == :move_attributes
end
it "has instance_variable" do
list.instance_variable.should == :@_moves
end
it "adds list to model" do
Game.embedded_lists.keys.should include(:moves)
end
it "adds attribute to model" do
Game.attributes.keys.should include(:move_attributes)
end
it "adds reader method" do
Game.new.should respond_to(:moves)
end
it "adds writer method" do
Game.new.should respond_to(:moves=)
end
describe "#eql?" do
it "returns true if same class, model, and name" do
list.should eql(list)
end
it "returns false if not same class" do
list.should_not eql({})
end
it "returns false if not same model" do
list.should_not eql(Toy::List.new(Move, :moves))
end
it "returns false if not the same name" do
list.should_not eql(Toy::List.new(Game, :recent_moves))
end
end
describe "setting list type" do
before do
@list = Game.list(:recent_moves, Move)
end
let(:list) { @list }
it "uses type provided instead of inferring from name" do
list.type.should be(Move)
end
it "works properly when reading and writing" do
game = Game.create
move = Move.create
game.recent_moves = [move]
game.recent_moves.should == [move]
end
end
describe "list reader" do
before do
@move = Move.new
@game = Game.create(:move_attributes => [@move.attributes])
end
it "returns instances" do
@game.moves.should == [@move]
end
it "sets reference to parent for each instance" do
@game.moves.each do |move|
move.parent_reference.should == @game
end
end
end
describe "list writer (with instances)" do
before do
@move1 = Move.new
@move2 = Move.new
@game = Game.create(:moves => [@move2])
end
it "set attribute" do
@game.move_attributes.should == [@move2.attributes]
end
it "unmemoizes reader" do
@game.moves.should == [@move2]
@game.moves = [@move1]
@game.moves.should == [@move1]
end
it "sets reference to parent for each instance" do
@game.moves.each do |move|
move.parent_reference.should == @game
end
end
end
describe "list writer (with hashes)" do
before do
@move1 = Move.new
@move2 = Move.new
@game = Game.create(:moves => [@move2.attributes])
end
it "set attribute" do
@game.move_attributes.should == [@move2.attributes]
end
it "unmemoizes reader" do
@game.moves.should == [@move2]
@game.moves = [@move1.attributes]
@game.moves.should == [@move1]
end
it "sets reference to parent for each instance" do
@game.moves.each do |move|
move.parent_reference.should == @game
end
end
end
describe "list#reset" do
before do
@move = Move.new
@game = Game.create(:move_attributes => [@move.attributes])
end
it "unmemoizes the list" do
@game.moves.should == [@move]
@game.moves.reset
Move.should_receive(:load).and_return(@move)
@game.moves.should == [@move]
end
it "should be reset when owner is reloaded" do
@game.moves.should == [@move]
@game.reload
Move.should_receive(:load).and_return(@move)
@game.moves.should == [@move]
end
end
describe "list#push" do
before do
@move = Move.new
@game = Game.create
@game.moves.push(@move)
end
it "adds attributes to attribute" do
@game.move_attributes.should == [@move.attributes]
end
it "raises error if wrong type assigned" do
lambda {
@game.moves.push(Game.new)
}.should raise_error(ArgumentError, "Move expected, but was Game")
end
it "sets reference to parent" do
# right now pushing a move adds a different instance to the proxy
# so i'm checking that it adds reference to both
@game.moves.each do |move|
move.parent_reference.should == @game
end
@move.parent_reference.should == @game
end
it "works with hashes" do
@game.moves = []
move = Move.new
@game.moves.push(move.attributes)
@game.moves.should == [move]
end
end
describe "list#<<" do
before do
@move = Move.new
@game = Game.create
@game.moves << @move
end
it "adds attributes to attribute" do
@game.move_attributes.should == [@move.attributes]
end
it "raises error if wrong type assigned" do
lambda {
@game.moves << Game.new
}.should raise_error(ArgumentError, "Move expected, but was Game")
end
it "sets reference to parent" do
# right now pushing a move adds a different instance to the proxy
# so i'm checking that it adds reference to both
@game.moves.each do |move|
move.parent_reference.should == @game
end
@move.parent_reference.should == @game
end
it "works with hashes" do
@game.moves = []
move = Move.new
@game.moves << move.attributes
@game.moves.should == [move]
end
end
describe "list#concat" do
before do
@move1 = Move.new
@move2 = Move.new
@game = Game.create
@game.moves.concat(@move1, @move2)
end
it "adds attributes to attribute" do
@game.move_attributes.should == [@move1.attributes, @move2.attributes]
end
it "raises error if wrong type assigned" do
lambda {
@game.moves.concat(Game.new, Move.new)
}.should raise_error(ArgumentError, "Move expected, but was Game")
end
it "sets reference to parent" do
# right now pushing a move adds a different instance to the proxy
# so i'm checking that it adds reference to both
@game.moves.each do |move|
move.parent_reference.should == @game
end
@move1.parent_reference.should == @game
@move2.parent_reference.should == @game
end
it "works with hashes" do
@game.moves = []
move = Move.new
@game.moves.concat(move.attributes)
@game.moves.should == [move]
end
end
describe "list#concat (with array)" do
before do
@move1 = Move.new
@move2 = Move.new
@game = Game.create
@game.moves.concat([@move1, @move2])
end
it "adds id to attribute" do
@game.move_attributes.should == [@move1.attributes, @move2.attributes]
end
it "raises error if wrong type assigned" do
lambda {
@game.moves.concat([Game.new, Move.new])
}.should raise_error(ArgumentError, "Move expected, but was Game")
end
it "sets reference to parent" do
# right now pushing a move adds a different instance to the proxy
# so i'm checking that it adds reference to both
@game.moves.each do |move|
move.parent_reference.should == @game
end
@move1.parent_reference.should == @game
@move2.parent_reference.should == @game
end
end
shared_examples_for("embedded_list#create") do
it "creates instance" do
@move.should be_persisted
end
it "assigns reference to parent" do
@move.parent_reference.should == @game
end
it "assigns id" do
@move.id.should_not be_nil
end
it "adds attributes to attribute" do
@game.move_attributes.should == [@move.attributes]
end
it "adds instance to reader" do
@game.moves.should == [@move]
end
end
describe "list#create" do
before do
@game = Game.create
@move = @game.moves.create
end
it_should_behave_like "embedded_list#create"
end
describe "list#create (with attributes)" do
before do
Move.attribute(:move_index, Integer)
@game = Game.create
@move = @game.moves.create(:move_index => 0)
end
it_should_behave_like "embedded_list#create"
it "sets attributes on instance" do
@move.move_index.should == 0
end
end
describe "list#create (invalid)" do
before do
@game = Game.create
@game.moves.should_not_receive(:push)
@game.moves.should_not_receive(:reset)
@game.should_not_receive(:reload)
@game.should_not_receive(:save)
Move.attribute(:move_index, Integer)
Move.validates_presence_of(:move_index)
@move = @game.moves.create
end
it "returns instance" do
@move.should be_instance_of(Move)
end
it "is not persisted" do
@move.should_not be_persisted
end
it "assigns reference to parent" do
@move.parent_reference.should == @game
end
end
describe "list#destroy" do
before do
Move.attribute(:move_index, Integer)
@game = Game.create
@move1 = @game.moves.create(:move_index => 0)
@move2 = @game.moves.create(:move_index => 1)
end
it "should take multiple ids" do
@game.moves.destroy(@move1.id, @move2.id)
@game.moves.should be_empty
@game.reload
@game.moves.should be_empty
end
it "should take an array of ids" do
@game.moves.destroy([@move1.id, @move2.id])
@game.moves.should be_empty
@game.reload
@game.moves.should be_empty
end
it "should take a block to filter on" do
@game.moves.destroy { |move| move.move_index == 1 }
@game.moves.should == [@move1]
@game.reload
@game.moves.should == [@move1]
end
end
describe "list#each" do
before do
@move1 = Move.new
@move2 = Move.new
@game = Game.create(:moves => [@move1, @move2])
end
it "iterates through each instance" do
moves = []
@game.moves.each do |move|
moves << move
end
moves.should == [@move1, @move2]
end
end
describe "enumerating" do
before do
Move.attribute(:move_index, Integer)
@move1 = Move.new(:move_index => 0)
@move2 = Move.new(:move_index => 1)
@game = Game.create(:moves => [@move1, @move2])
end
it "works" do
@game.moves.select { |move| move.move_index > 0 }.should == [@move2]
@game.moves.reject { |move| move.move_index > 0 }.should == [@move1]
end
end
describe "list#include?" do
before do
@move1 = Move.new
@move2 = Move.new
@game = Game.create(:moves => [@move1])
end
it "returns true if instance in association" do
@game.moves.should include(@move1)
end
it "returns false if instance not in association" do
@game.moves.should_not include(@move2)
end
it "returns false for nil" do
@game.moves.should_not include(nil)
end
end
describe "list with block" do
before do
Move.attribute(:old, Boolean)
Game.embedded_list(:moves) do
def old
target.select { |move| move.old? }
end
end
@move_new = Move.create(:old => false)
@move_old = Move.create(:old => true)
@game = Game.create(:moves => [@move_new, @move_old])
end
it "extends block methods onto proxy" do
@game.moves.should respond_to(:old)
@game.moves.old.should == [@move_old]
end
end
describe "list extension with :extensions option" do
before do
old_module = Module.new do
def old
target.select { |m| m.old? }
end
end
recent_proc = Proc.new do
def recent
target.select { |m| !m.old? }
end
end
Move.attribute(:old, Boolean)
Game.embedded_list(:moves, :extensions => [old_module, recent_proc])
@move_new = Move.new(:old => false)
@move_old = Move.new(:old => true)
@game = Game.create(:moves => [@move_new, @move_old])
end
it "extends modules" do
@game.moves.should respond_to(:old)
@game.moves.old.should == [@move_old]
end
it "extends procs" do
@game.moves.should respond_to(:recent)
@game.moves.recent.should == [@move_new]
end
end
end
Ensuring that instances are marked persisted.
require 'helper'
describe Toy::List do
uses_constants('Game', 'Move')
before do
@list = Game.embedded_list(:moves)
end
let(:list) { @list }
it "has model" do
list.model.should == Game
end
it "has name" do
list.name.should == :moves
end
it "has type" do
list.type.should == Move
end
it "has key" do
list.key.should == :move_attributes
end
it "has instance_variable" do
list.instance_variable.should == :@_moves
end
it "adds list to model" do
Game.embedded_lists.keys.should include(:moves)
end
it "adds attribute to model" do
Game.attributes.keys.should include(:move_attributes)
end
it "adds reader method" do
Game.new.should respond_to(:moves)
end
it "adds writer method" do
Game.new.should respond_to(:moves=)
end
describe "#eql?" do
it "returns true if same class, model, and name" do
list.should eql(list)
end
it "returns false if not same class" do
list.should_not eql({})
end
it "returns false if not same model" do
list.should_not eql(Toy::List.new(Move, :moves))
end
it "returns false if not the same name" do
list.should_not eql(Toy::List.new(Game, :recent_moves))
end
end
describe "setting list type" do
before do
@list = Game.list(:recent_moves, Move)
end
let(:list) { @list }
it "uses type provided instead of inferring from name" do
list.type.should be(Move)
end
it "works properly when reading and writing" do
game = Game.create
move = Move.create
game.recent_moves = [move]
game.recent_moves.should == [move]
end
end
describe "list reader" do
before do
@move = Move.new
@game = Game.create(:move_attributes => [@move.attributes])
end
it "returns instances" do
@game.moves.should == [@move]
end
it "sets reference to parent for each instance" do
@game.moves.each do |move|
move.parent_reference.should == @game
end
end
end
describe "list writer (with instances)" do
before do
@move1 = Move.new
@move2 = Move.new
@game = Game.create(:moves => [@move2])
end
it "set attribute" do
@game.move_attributes.should == [@move2.attributes]
end
it "unmemoizes reader" do
@game.moves.should == [@move2]
@game.moves = [@move1]
@game.moves.should == [@move1]
end
it "sets reference to parent for each instance" do
@game.moves.each do |move|
move.parent_reference.should == @game
end
end
end
describe "list writer (with hashes)" do
before do
@move1 = Move.new
@move2 = Move.new
@game = Game.create(:moves => [@move2.attributes])
end
it "set attribute" do
@game.move_attributes.should == [@move2.attributes]
end
it "unmemoizes reader" do
@game.moves.should == [@move2]
@game.moves = [@move1.attributes]
@game.moves.should == [@move1]
end
it "sets reference to parent for each instance" do
@game.moves.each do |move|
move.parent_reference.should == @game
end
end
end
describe "list#reset" do
before do
@move = Move.new
@game = Game.create(:move_attributes => [@move.attributes])
end
it "unmemoizes the list" do
@game.moves.should == [@move]
@game.moves.reset
Move.should_receive(:load).and_return(@move)
@game.moves.should == [@move]
end
it "should be reset when owner is reloaded" do
@game.moves.should == [@move]
@game.reload
Move.should_receive(:load).and_return(@move)
@game.moves.should == [@move]
end
end
describe "list#push" do
before do
@move = Move.new
@game = Game.create
@game.moves.push(@move)
end
it "adds attributes to attribute" do
@game.move_attributes.should == [@move.attributes]
end
it "raises error if wrong type assigned" do
lambda {
@game.moves.push(Game.new)
}.should raise_error(ArgumentError, "Move expected, but was Game")
end
it "sets reference to parent" do
# right now pushing a move adds a different instance to the proxy
# so i'm checking that it adds reference to both
@game.moves.each do |move|
move.parent_reference.should == @game
end
@move.parent_reference.should == @game
end
it "marks instances as persisted when parent saved" do
@game.save
@game.moves.each do |move|
move.should be_persisted
end
end
it "works with hashes" do
@game.moves = []
move = Move.new
@game.moves.push(move.attributes)
@game.moves.should == [move]
end
end
describe "list#<<" do
before do
@move = Move.new
@game = Game.create
@game.moves << @move
end
it "adds attributes to attribute" do
@game.move_attributes.should == [@move.attributes]
end
it "raises error if wrong type assigned" do
lambda {
@game.moves << Game.new
}.should raise_error(ArgumentError, "Move expected, but was Game")
end
it "sets reference to parent" do
# right now pushing a move adds a different instance to the proxy
# so i'm checking that it adds reference to both
@game.moves.each do |move|
move.parent_reference.should == @game
end
@move.parent_reference.should == @game
end
it "marks instances as persisted when parent saved" do
@game.save
@game.moves.each do |move|
move.should be_persisted
end
end
it "works with hashes" do
@game.moves = []
move = Move.new
@game.moves << move.attributes
@game.moves.should == [move]
end
end
describe "list#concat" do
before do
@move1 = Move.new
@move2 = Move.new
@game = Game.create
@game.moves.concat(@move1, @move2)
end
it "adds attributes to attribute" do
@game.move_attributes.should == [@move1.attributes, @move2.attributes]
end
it "raises error if wrong type assigned" do
lambda {
@game.moves.concat(Game.new, Move.new)
}.should raise_error(ArgumentError, "Move expected, but was Game")
end
it "sets reference to parent" do
# right now pushing a move adds a different instance to the proxy
# so i'm checking that it adds reference to both
@game.moves.each do |move|
move.parent_reference.should == @game
end
@move1.parent_reference.should == @game
@move2.parent_reference.should == @game
end
it "marks instances as persisted when parent saved" do
@game.save
@game.moves.each do |move|
move.should be_persisted
end
end
it "works with hashes" do
@game.moves = []
move = Move.new
@game.moves.concat(move.attributes)
@game.moves.should == [move]
end
end
describe "list#concat (with array)" do
before do
@move1 = Move.new
@move2 = Move.new
@game = Game.create
@game.moves.concat([@move1, @move2])
end
it "adds id to attribute" do
@game.move_attributes.should == [@move1.attributes, @move2.attributes]
end
it "raises error if wrong type assigned" do
lambda {
@game.moves.concat([Game.new, Move.new])
}.should raise_error(ArgumentError, "Move expected, but was Game")
end
it "sets reference to parent" do
# right now pushing a move adds a different instance to the proxy
# so i'm checking that it adds reference to both
@game.moves.each do |move|
move.parent_reference.should == @game
end
@move1.parent_reference.should == @game
@move2.parent_reference.should == @game
end
it "marks instances as persisted when parent saved" do
@game.save
@game.moves.each do |move|
move.should be_persisted
end
end
end
shared_examples_for("embedded_list#create") do
it "creates instance" do
@move.should be_persisted
end
it "assigns reference to parent" do
@move.parent_reference.should == @game
end
it "assigns id" do
@move.id.should_not be_nil
end
it "adds attributes to attribute" do
@game.move_attributes.should == [@move.attributes]
end
it "adds instance to reader" do
@game.moves.should == [@move]
end
it "marks instance as persisted" do
@move.should be_persisted
end
end
describe "list#create" do
before do
@game = Game.create
@move = @game.moves.create
end
it_should_behave_like "embedded_list#create"
end
describe "list#create (with attributes)" do
before do
Move.attribute(:move_index, Integer)
@game = Game.create
@move = @game.moves.create(:move_index => 0)
end
it_should_behave_like "embedded_list#create"
it "sets attributes on instance" do
@move.move_index.should == 0
end
end
describe "list#create (invalid)" do
before do
@game = Game.create
@game.moves.should_not_receive(:push)
@game.moves.should_not_receive(:reset)
@game.should_not_receive(:reload)
@game.should_not_receive(:save)
Move.attribute(:move_index, Integer)
Move.validates_presence_of(:move_index)
@move = @game.moves.create
end
it "returns instance" do
@move.should be_instance_of(Move)
end
it "is not persisted" do
@move.should_not be_persisted
end
it "assigns reference to parent" do
@move.parent_reference.should == @game
end
end
describe "list#destroy" do
before do
Move.attribute(:move_index, Integer)
@game = Game.create
@move1 = @game.moves.create(:move_index => 0)
@move2 = @game.moves.create(:move_index => 1)
end
it "should take multiple ids" do
@game.moves.destroy(@move1.id, @move2.id)
@game.moves.should be_empty
@game.reload
@game.moves.should be_empty
end
it "should take an array of ids" do
@game.moves.destroy([@move1.id, @move2.id])
@game.moves.should be_empty
@game.reload
@game.moves.should be_empty
end
it "should take a block to filter on" do
@game.moves.destroy { |move| move.move_index == 1 }
@game.moves.should == [@move1]
@game.reload
@game.moves.should == [@move1]
end
end
describe "list#each" do
before do
@move1 = Move.new
@move2 = Move.new
@game = Game.create(:moves => [@move1, @move2])
end
it "iterates through each instance" do
moves = []
@game.moves.each do |move|
moves << move
end
moves.should == [@move1, @move2]
end
end
describe "enumerating" do
before do
Move.attribute(:move_index, Integer)
@move1 = Move.new(:move_index => 0)
@move2 = Move.new(:move_index => 1)
@game = Game.create(:moves => [@move1, @move2])
end
it "works" do
@game.moves.select { |move| move.move_index > 0 }.should == [@move2]
@game.moves.reject { |move| move.move_index > 0 }.should == [@move1]
end
end
describe "list#include?" do
before do
@move1 = Move.new
@move2 = Move.new
@game = Game.create(:moves => [@move1])
end
it "returns true if instance in association" do
@game.moves.should include(@move1)
end
it "returns false if instance not in association" do
@game.moves.should_not include(@move2)
end
it "returns false for nil" do
@game.moves.should_not include(nil)
end
end
describe "list with block" do
before do
Move.attribute(:old, Boolean)
Game.embedded_list(:moves) do
def old
target.select { |move| move.old? }
end
end
@move_new = Move.create(:old => false)
@move_old = Move.create(:old => true)
@game = Game.create(:moves => [@move_new, @move_old])
end
it "extends block methods onto proxy" do
@game.moves.should respond_to(:old)
@game.moves.old.should == [@move_old]
end
end
describe "list extension with :extensions option" do
before do
old_module = Module.new do
def old
target.select { |m| m.old? }
end
end
recent_proc = Proc.new do
def recent
target.select { |m| !m.old? }
end
end
Move.attribute(:old, Boolean)
Game.embedded_list(:moves, :extensions => [old_module, recent_proc])
@move_new = Move.new(:old => false)
@move_old = Move.new(:old => true)
@game = Game.create(:moves => [@move_new, @move_old])
end
it "extends modules" do
@game.moves.should respond_to(:old)
@game.moves.old.should == [@move_old]
end
it "extends procs" do
@game.moves.should respond_to(:recent)
@game.moves.recent.should == [@move_new]
end
end
end |
# -*- encoding: utf-8 -*-
require 'spec_helper'
describe TTY::Table, 'options' do
let(:rows) { [['a1', 'a2'], ['b1', 'b2']] }
let(:widths) { [] }
let(:aligns) { [] }
let(:object) {
described_class.new rows, :column_widths => widths, :column_aligns => aligns
}
subject { object.to_s; object.renderer }
its(:column_widths) { should == [2,2] }
its(:column_aligns) { should == [] }
context '#column_widths' do
let(:widths) { [10, 10] }
its(:column_widths) { should == widths }
end
context '#column_aligns' do
let(:aligns) { [:center, :center] }
its(:column_aligns) { should == aligns }
end
end
Fix test failing on some rubies.
# -*- encoding: utf-8 -*-
require 'spec_helper'
describe TTY::Table, 'options' do
let(:rows) { [['a1', 'a2'], ['b1', 'b2']] }
let(:widths) { [] }
let(:aligns) { [] }
let(:object) {
described_class.new rows,
:column_widths => widths,
:column_aligns => aligns,
:renderer => :basic
}
subject { object.to_s; object.renderer }
its(:column_widths) { should == [2,2] }
its(:column_aligns) { should == [] }
context '#column_widths' do
let(:widths) { [10, 10] }
its(:column_widths) { should == widths }
end
context '#column_aligns' do
let(:aligns) { [:center, :center] }
its(:column_aligns) { should == aligns }
end
end
|
require 'spec_helper'
SimpleCov.command_name('Example') unless RUBY_VERSION.to_s < '1.9.0'
describe 'Example, Unit' do
let(:clazz) { CukeModeler::Example }
it_should_behave_like 'a feature element'
it_should_behave_like 'a nested element'
it_should_behave_like 'a tagged element'
it_should_behave_like 'a bare bones element'
it_should_behave_like 'a prepopulated element'
it_should_behave_like 'a sourced element'
it_should_behave_like 'a containing element'
it_should_behave_like 'a raw element'
it 'can be parsed from stand alone text' do
source = ['Examples: test example',
'|param| ',
'|value|']
source = source.join("\n")
expect { @element = clazz.new(source) }.to_not raise_error
# Sanity check in case instantiation failed in a non-explosive manner
@element.name.should == 'test example'
end
# todo - add more tests like this to the 'barebones' test set
it 'can be instantiated with the minimum viable Gherkin' do
source = ['Examples:']
source = source.join("\n")
expect { @element = clazz.new(source) }.to_not raise_error
end
it 'provides a descriptive filename when being parsed from stand alone text' do
source = 'bad example text'
expect { clazz.new(source) }.to raise_error(/'cuke_modeler_stand_alone_example\.feature'/)
end
it 'stores the original data generated by the parsing adapter', :gherkin4 => true do
example = clazz.new("Examples: test example\n|param|\n|value|")
raw_data = example.raw_element
expect(raw_data.keys).to match_array([:type, :tags, :location, :keyword, :name, :tableHeader, :tableBody])
expect(raw_data[:type]).to eq(:Examples)
end
it 'stores the original data generated by the parsing adapter', :gherkin3 => true do
example = clazz.new("Examples: test example\n|param|\n|value|")
raw_data = example.raw_element
expect(raw_data.keys).to match_array([:type, :tags, :location, :keyword, :name, :tableHeader, :tableBody])
expect(raw_data[:type]).to eq(:Examples)
end
it 'stores the original data generated by the parsing adapter', :gherkin2 => true do
example = clazz.new("Examples: test example\n|param|\n|value|")
raw_data = example.raw_element
expect(raw_data.keys).to match_array(['keyword', 'name', 'line', 'description', 'id', 'rows'])
expect(raw_data['keyword']).to eq('Examples')
end
before(:each) do
@example = clazz.new
end
it 'has parameters - #parameters' do
@example.should respond_to(:parameters)
end
it 'can get and set its parameters - #parameters, #parameters=' do
@example.parameters = :some_parameters
@example.parameters.should == :some_parameters
@example.parameters = :some_other_parameters
@example.parameters.should == :some_other_parameters
end
it 'starts with no parameters' do
@example.parameters.should == []
end
it 'has rows - #rows' do
@example.should respond_to(:rows)
end
#todo - remove once Hash rows are no longer supported
it 'can get and set its rows - #rows, #rows=' do
@example.rows = :some_rows
@example.rows.should == :some_rows
@example.rows = :some_other_rows
@example.rows.should == :some_other_rows
end
#todo - remove once Hash rows are no longer supported
it 'starts with no rows' do
@example.rows.should == []
end
#todo - remove once Hash rows are no longer supported
it 'stores its rows as an nested array of hashes' do
source = "Examples:\n|param1|param2|\n|value1|value2|"
example = CukeModeler::Example.new(source)
rows = example.rows
rows.is_a?(Array).should be_true
rows.empty?.should be_false
rows.each { |row| row.is_a?(Hash).should be_true }
end
it 'does not include the parameter row as a row' do
source = "Examples:\n|param1|param2|\n|value1|value2|\n|value3|value4|"
example = CukeModeler::Example.new(source)
rows = example.rows
rows.should == [{'param1' => 'value1', 'param2' => 'value2'}, {'param1' => 'value3', 'param2' => 'value4'}]
end
it 'has row elements - #row_elements' do
@example.should respond_to(:row_elements)
end
it 'can get and set its row elements - #row_elements, #row_elements=' do
@example.row_elements = :some_row_elements
@example.row_elements.should == :some_row_elements
@example.row_elements = :some_other_row_elements
@example.row_elements.should == :some_other_row_elements
end
it 'starts with no row elements' do
@example.row_elements.should == []
end
context '#add_row' do
it 'can add a new example row' do
clazz.new.should respond_to(:add_row)
end
it 'can add a new row as a hash' do
source = "Examples:\n|param1|param2|\n|value1|value2|"
example = CukeModeler::Example.new(source)
new_row = {'param1' => 'value3', 'param2' => 'value4'}
example.add_row(new_row)
#todo - remove once Hash rows are no longer supported
example.rows.should == [{'param1' => 'value1', 'param2' => 'value2'}, {'param1' => 'value3', 'param2' => 'value4'}]
example.row_elements.collect { |row| row.cells }[1..example.row_elements.count].should == [['value1', 'value2'], ['value3', 'value4']]
end
it 'can add a new row as an array' do
source = "Examples:\n|param1|param2|\n|value1|value2|"
example = CukeModeler::Example.new(source)
new_row = ['value3', 'value4']
example.add_row(new_row)
#todo - remove once Hash rows are no longer supported
example.rows.should == [{'param1' => 'value1', 'param2' => 'value2'}, {'param1' => 'value3', 'param2' => 'value4'}]
example.row_elements.collect { |row| row.cells }[1..example.row_elements.count].should == [['value1', 'value2'], ['value3', 'value4']]
end
it 'can only use a Hash or an Array to add a new row' do
expect { @example.add_row({}) }.to_not raise_error
expect { @example.add_row([]) }.to_not raise_error
expect { @example.add_row(:a_row) }.to raise_error(ArgumentError)
end
it 'trims whitespace from added rows' do
source = "Examples:\n|param1|param2|\n|value1|value2|"
example = CukeModeler::Example.new(source)
hash_row = {'param1' => 'value3 ', 'param2' => ' value4'}
array_row = ['value5', ' value6 ']
example.add_row(hash_row)
example.add_row(array_row)
#todo - remove once Hash rows are no longer supported
example.rows.should == [{'param1' => 'value1', 'param2' => 'value2'}, {'param1' => 'value3', 'param2' => 'value4'}, {'param1' => 'value5', 'param2' => 'value6'}]
example.row_elements.collect { |row| row.cells }[1..example.row_elements.count].should == [['value1', 'value2'], ['value3', 'value4'], ['value5', 'value6']]
end
end
context '#remove_row' do
it 'can remove an existing example row' do
clazz.new.should respond_to(:remove_row)
end
it 'can remove an existing row as a hash' do
source = "Examples:\n|param1|param2|\n|value1|value2|\n|value3|value4|"
example = CukeModeler::Example.new(source)
old_row = {'param1' => 'value3', 'param2' => 'value4'}
example.remove_row(old_row)
#todo - remove once Hash rows are no longer supported
example.rows.should == [{'param1' => 'value1', 'param2' => 'value2'}]
example.row_elements.collect { |row| row.cells }[1..example.row_elements.count].should == [['value1', 'value2']]
end
it 'can remove an existing row as an array' do
source = "Examples:\n|param1|param2|\n|value1|value2|\n|value3|value4|"
example = CukeModeler::Example.new(source)
old_row = ['value3', 'value4']
example.remove_row(old_row)
#todo - remove once Hash rows are no longer supported
example.rows.should == [{'param1' => 'value1', 'param2' => 'value2'}]
example.row_elements.collect { |row| row.cells }[1..example.row_elements.count].should == [['value1', 'value2']]
end
it 'can only use a Hash or an Array to remove an existing row' do
expect { @example.remove_row({}) }.to_not raise_error
expect { @example.remove_row([]) }.to_not raise_error
expect { @example.remove_row(:a_row) }.to raise_error(ArgumentError)
end
it 'trims whitespace from removed rows' do
source = "Examples:\n|param1|param2|\n|value1|value2|\n|value3|value4|\n|value5|value6|"
example = CukeModeler::Example.new(source)
hash_row = {'param1' => 'value3 ', 'param2' => ' value4'}
array_row = ['value5', ' value6 ']
example.remove_row(hash_row)
example.remove_row(array_row)
#todo - remove once Hash rows are no longer supported
example.rows.should == [{'param1' => 'value1', 'param2' => 'value2'}]
example.row_elements.collect { |row| row.cells }[1..example.row_elements.count].should == [['value1', 'value2']]
end
end
context 'example output edge cases' do
it 'is a String' do
@example.to_s.should be_a(String)
end
it 'can output an empty example' do
expect { @example.to_s }.to_not raise_error
end
it 'can output an example that has only a name' do
@example.name = 'a name'
expect { @example.to_s }.to_not raise_error
end
it 'can output an example that has only a description' do
@example.description_text = 'a description'
expect { @example.to_s }.to_not raise_error
end
it 'can output an example that has only a tags' do
@example.tags = ['a tag']
expect { @example.to_s }.to_not raise_error
end
#todo - remove once Hash rows are no longer supported
it 'can output an example that only has parameters' do
@example.parameters = ['param1']
expect { @example.to_s }.to_not raise_error
end
#todo - remove once Hash rows are no longer supported
it 'can output an example that only has rows' do
@example.rows = [{:param1 => 'row1'}]
expect { @example.to_s }.to_not raise_error
end
end
end
Test fixes.
Broke a generic test into several version specific tests because
different versions of Gherkin have different valid test data.
require 'spec_helper'
SimpleCov.command_name('Example') unless RUBY_VERSION.to_s < '1.9.0'
describe 'Example, Unit' do
let(:clazz) { CukeModeler::Example }
it_should_behave_like 'a feature element'
it_should_behave_like 'a nested element'
it_should_behave_like 'a tagged element'
it_should_behave_like 'a bare bones element'
it_should_behave_like 'a prepopulated element'
it_should_behave_like 'a sourced element'
it_should_behave_like 'a containing element'
it_should_behave_like 'a raw element'
it 'can be parsed from stand alone text' do
source = ['Examples: test example',
'|param| ',
'|value|']
source = source.join("\n")
expect { @element = clazz.new(source) }.to_not raise_error
# Sanity check in case instantiation failed in a non-explosive manner
@element.name.should == 'test example'
end
# todo - add more tests like this to the 'barebones' test set
it 'can be instantiated with the minimum viable Gherkin', :gherkin4 => true do
source = ['Examples:']
source = source.join("\n")
expect { @element = clazz.new(source) }.to_not raise_error
end
# todo - add more tests like this to the 'barebones' test set
it 'can be instantiated with the minimum viable Gherkin', :gherkin3 => true do
source = ['Examples:',
'|param|',
'|value|']
source = source.join("\n")
expect { @element = clazz.new(source) }.to_not raise_error
end
# todo - add more tests like this to the 'barebones' test set
it 'can be instantiated with the minimum viable Gherkin', :gherkin2 => true do
source = ['Examples:',
'|param|']
source = source.join("\n")
expect { @element = clazz.new(source) }.to_not raise_error
end
it 'provides a descriptive filename when being parsed from stand alone text' do
source = 'bad example text'
expect { clazz.new(source) }.to raise_error(/'cuke_modeler_stand_alone_example\.feature'/)
end
it 'stores the original data generated by the parsing adapter', :gherkin4 => true do
example = clazz.new("Examples: test example\n|param|\n|value|")
raw_data = example.raw_element
expect(raw_data.keys).to match_array([:type, :tags, :location, :keyword, :name, :tableHeader, :tableBody])
expect(raw_data[:type]).to eq(:Examples)
end
it 'stores the original data generated by the parsing adapter', :gherkin3 => true do
example = clazz.new("Examples: test example\n|param|\n|value|")
raw_data = example.raw_element
expect(raw_data.keys).to match_array([:type, :tags, :location, :keyword, :name, :tableHeader, :tableBody])
expect(raw_data[:type]).to eq(:Examples)
end
it 'stores the original data generated by the parsing adapter', :gherkin2 => true do
example = clazz.new("Examples: test example\n|param|\n|value|")
raw_data = example.raw_element
expect(raw_data.keys).to match_array(['keyword', 'name', 'line', 'description', 'id', 'rows'])
expect(raw_data['keyword']).to eq('Examples')
end
before(:each) do
@example = clazz.new
end
it 'has parameters - #parameters' do
@example.should respond_to(:parameters)
end
it 'can get and set its parameters - #parameters, #parameters=' do
@example.parameters = :some_parameters
@example.parameters.should == :some_parameters
@example.parameters = :some_other_parameters
@example.parameters.should == :some_other_parameters
end
it 'starts with no parameters' do
@example.parameters.should == []
end
it 'has rows - #rows' do
@example.should respond_to(:rows)
end
#todo - remove once Hash rows are no longer supported
it 'can get and set its rows - #rows, #rows=' do
@example.rows = :some_rows
@example.rows.should == :some_rows
@example.rows = :some_other_rows
@example.rows.should == :some_other_rows
end
#todo - remove once Hash rows are no longer supported
it 'starts with no rows' do
@example.rows.should == []
end
#todo - remove once Hash rows are no longer supported
it 'stores its rows as an nested array of hashes' do
source = "Examples:\n|param1|param2|\n|value1|value2|"
example = CukeModeler::Example.new(source)
rows = example.rows
rows.is_a?(Array).should be_true
rows.empty?.should be_false
rows.each { |row| row.is_a?(Hash).should be_true }
end
it 'does not include the parameter row as a row' do
source = "Examples:\n|param1|param2|\n|value1|value2|\n|value3|value4|"
example = CukeModeler::Example.new(source)
rows = example.rows
rows.should == [{'param1' => 'value1', 'param2' => 'value2'}, {'param1' => 'value3', 'param2' => 'value4'}]
end
it 'has row elements - #row_elements' do
@example.should respond_to(:row_elements)
end
it 'can get and set its row elements - #row_elements, #row_elements=' do
@example.row_elements = :some_row_elements
@example.row_elements.should == :some_row_elements
@example.row_elements = :some_other_row_elements
@example.row_elements.should == :some_other_row_elements
end
it 'starts with no row elements' do
@example.row_elements.should == []
end
context '#add_row' do
it 'can add a new example row' do
clazz.new.should respond_to(:add_row)
end
it 'can add a new row as a hash' do
source = "Examples:\n|param1|param2|\n|value1|value2|"
example = CukeModeler::Example.new(source)
new_row = {'param1' => 'value3', 'param2' => 'value4'}
example.add_row(new_row)
#todo - remove once Hash rows are no longer supported
example.rows.should == [{'param1' => 'value1', 'param2' => 'value2'}, {'param1' => 'value3', 'param2' => 'value4'}]
example.row_elements.collect { |row| row.cells }[1..example.row_elements.count].should == [['value1', 'value2'], ['value3', 'value4']]
end
it 'can add a new row as an array' do
source = "Examples:\n|param1|param2|\n|value1|value2|"
example = CukeModeler::Example.new(source)
new_row = ['value3', 'value4']
example.add_row(new_row)
#todo - remove once Hash rows are no longer supported
example.rows.should == [{'param1' => 'value1', 'param2' => 'value2'}, {'param1' => 'value3', 'param2' => 'value4'}]
example.row_elements.collect { |row| row.cells }[1..example.row_elements.count].should == [['value1', 'value2'], ['value3', 'value4']]
end
it 'can only use a Hash or an Array to add a new row' do
expect { @example.add_row({}) }.to_not raise_error
expect { @example.add_row([]) }.to_not raise_error
expect { @example.add_row(:a_row) }.to raise_error(ArgumentError)
end
it 'trims whitespace from added rows' do
source = "Examples:\n|param1|param2|\n|value1|value2|"
example = CukeModeler::Example.new(source)
hash_row = {'param1' => 'value3 ', 'param2' => ' value4'}
array_row = ['value5', ' value6 ']
example.add_row(hash_row)
example.add_row(array_row)
#todo - remove once Hash rows are no longer supported
example.rows.should == [{'param1' => 'value1', 'param2' => 'value2'}, {'param1' => 'value3', 'param2' => 'value4'}, {'param1' => 'value5', 'param2' => 'value6'}]
example.row_elements.collect { |row| row.cells }[1..example.row_elements.count].should == [['value1', 'value2'], ['value3', 'value4'], ['value5', 'value6']]
end
end
context '#remove_row' do
it 'can remove an existing example row' do
clazz.new.should respond_to(:remove_row)
end
it 'can remove an existing row as a hash' do
source = "Examples:\n|param1|param2|\n|value1|value2|\n|value3|value4|"
example = CukeModeler::Example.new(source)
old_row = {'param1' => 'value3', 'param2' => 'value4'}
example.remove_row(old_row)
#todo - remove once Hash rows are no longer supported
example.rows.should == [{'param1' => 'value1', 'param2' => 'value2'}]
example.row_elements.collect { |row| row.cells }[1..example.row_elements.count].should == [['value1', 'value2']]
end
it 'can remove an existing row as an array' do
source = "Examples:\n|param1|param2|\n|value1|value2|\n|value3|value4|"
example = CukeModeler::Example.new(source)
old_row = ['value3', 'value4']
example.remove_row(old_row)
#todo - remove once Hash rows are no longer supported
example.rows.should == [{'param1' => 'value1', 'param2' => 'value2'}]
example.row_elements.collect { |row| row.cells }[1..example.row_elements.count].should == [['value1', 'value2']]
end
it 'can only use a Hash or an Array to remove an existing row' do
expect { @example.remove_row({}) }.to_not raise_error
expect { @example.remove_row([]) }.to_not raise_error
expect { @example.remove_row(:a_row) }.to raise_error(ArgumentError)
end
it 'trims whitespace from removed rows' do
source = "Examples:\n|param1|param2|\n|value1|value2|\n|value3|value4|\n|value5|value6|"
example = CukeModeler::Example.new(source)
hash_row = {'param1' => 'value3 ', 'param2' => ' value4'}
array_row = ['value5', ' value6 ']
example.remove_row(hash_row)
example.remove_row(array_row)
#todo - remove once Hash rows are no longer supported
example.rows.should == [{'param1' => 'value1', 'param2' => 'value2'}]
example.row_elements.collect { |row| row.cells }[1..example.row_elements.count].should == [['value1', 'value2']]
end
end
context 'example output edge cases' do
it 'is a String' do
@example.to_s.should be_a(String)
end
it 'can output an empty example' do
expect { @example.to_s }.to_not raise_error
end
it 'can output an example that has only a name' do
@example.name = 'a name'
expect { @example.to_s }.to_not raise_error
end
it 'can output an example that has only a description' do
@example.description_text = 'a description'
expect { @example.to_s }.to_not raise_error
end
it 'can output an example that has only a tags' do
@example.tags = ['a tag']
expect { @example.to_s }.to_not raise_error
end
#todo - remove once Hash rows are no longer supported
it 'can output an example that only has parameters' do
@example.parameters = ['param1']
expect { @example.to_s }.to_not raise_error
end
#todo - remove once Hash rows are no longer supported
it 'can output an example that only has rows' do
@example.rows = [{:param1 => 'row1'}]
expect { @example.to_s }.to_not raise_error
end
end
end
|
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
require File.join(File.dirname(__FILE__), '../spec_helper')
describe Astute::Orchestrator do
include SpecHelpers
before(:each) do
@orchestrator = Astute::Orchestrator.new
@reporter = mock('reporter')
@reporter.stub_everything
end
describe '#verify_networks' do
def make_nodes(*uids)
uids.map do |uid|
{
'uid' => uid.to_s,
'networks' => [
{
'iface' => 'eth0',
'vlans' => [100, 101]
}
]
}
end
end
it "must be able to complete" do
nodes = make_nodes(1, 2)
res1 = {
:data => {
:uid => "1",
:neighbours => {
"eth0" => {
"100" => {"1" => ["eth0"], "2" => ["eth0"]},
"101" => {"1" => ["eth0"]}}}},
:sender => "1"}
res2 = {
:data => {
:uid => "2",
:neighbours => {
"eth0" => {
"100" => {"1" => ["eth0"], "2" => ["eth0"]},
"101" => {"1" => ["eth0"], "2" => ["eth0"]}
}}},
:sender => "2"}
valid_res = {:statuscode => 0, :sender => '1'}
mc_res1 = mock_mc_result(res1)
mc_res2 = mock_mc_result(res2)
mc_valid_res = mock_mc_result
rpcclient = mock_rpcclient(nodes)
rpcclient.expects(:get_probing_info).once.returns([mc_res1, mc_res2])
nodes.each do |node|
rpcclient.expects(:discover).with(:nodes => [node['uid']]).at_least_once
data_to_send = {}
node['networks'].each{ |net| data_to_send[net['iface']] = net['vlans'].join(",") }
rpcclient.expects(:start_frame_listeners).
with(:interfaces => data_to_send.to_json).
returns([mc_valid_res]*2)
rpcclient.expects(:send_probing_frames).
with(:interfaces => data_to_send.to_json).
returns([mc_valid_res]*2)
end
Astute::Network.expects(:check_dhcp)
Astute::MClient.any_instance.stubs(:rpcclient).returns(rpcclient)
res = @orchestrator.verify_networks(@reporter, 'task_uuid', nodes)
expected = {"nodes" => [{"networks" => [{"iface"=>"eth0", "vlans"=>[100]}], "uid"=>"1"},
{"networks"=>[{"iface"=>"eth0", "vlans"=>[100, 101]}], "uid"=>"2"}]}
res.should eql(expected)
end
it "returns error if nodes list is empty" do
res = @orchestrator.verify_networks(@reporter, 'task_uuid', [])
res.should eql({'status' => 'error', 'error' => "Network verification requires a minimum of two nodes."})
end
it "returns all vlans passed if only one node provided" do
nodes = make_nodes(1)
res = @orchestrator.verify_networks(@reporter, 'task_uuid', nodes)
expected = {"nodes" => [{"uid"=>"1", "networks" => [{"iface"=>"eth0", "vlans"=>[100, 101]}]}]}
res.should eql(expected)
end
end
it "must be able to return node type" do
nodes = [{'uid' => '1'}]
res = {:data => {:node_type => 'target'},
:sender=>"1"}
mc_res = mock_mc_result(res)
mc_timeout = 5
rpcclient = mock_rpcclient(nodes, mc_timeout)
rpcclient.expects(:get_type).once.returns([mc_res])
types = @orchestrator.node_type(@reporter, 'task_uuid', nodes, mc_timeout)
types.should eql([{"node_type"=>"target", "uid"=>"1"}])
end
it "in remove_nodes, it returns empty list if nodes are not provided" do
res = @orchestrator.remove_nodes(@reporter, 'task_uuid', [])
res.should eql({'nodes' => []})
end
it "remove_nodes cleans nodes and reboots them" do
removed_hash = {:sender => '1',
:data => {:rebooted => true}}
error_hash = {:sender => '2',
:data => {:rebooted => false, :error_msg => 'Could not reboot'}}
nodes = [{'uid' => 1}, {'uid' => 2}]
rpcclient = mock_rpcclient
mc_removed_res = mock_mc_result(removed_hash)
mc_error_res = mock_mc_result(error_hash)
rpcclient.expects(:erase_node).at_least_once.with(:reboot => true).returns([mc_removed_res, mc_error_res])
res = @orchestrator.remove_nodes(@reporter, 'task_uuid', nodes)
res.should eql({'nodes' => [{'uid' => '1'}], 'status' => 'error',
'error_nodes' => [{"uid"=>"2", "error"=>"RPC method 'erase_node' failed "\
"with message: Could not reboot"}]})
end
it "it calls deploy method with valid arguments" do
nodes = [{'uid' => 1}]
attrs = {'a' => 'b'}
Astute::DeploymentEngine::NailyFact.any_instance.expects(:deploy).
with([{'uid' => '1'}], attrs)
@orchestrator.deploy(@reporter, 'task_uuid', nodes, attrs)
end
it "deploy method raises error if nodes list is empty" do
expect {@orchestrator.deploy(@reporter, 'task_uuid', [], {})}.
to raise_error(/Nodes to deploy are not provided!/)
end
it "remove_nodes try to call MCAgent multiple times on error" do
removed_hash = {:sender => '1',
:data => {:rebooted => true}}
error_hash = {:sender => '2',
:data => {:rebooted => false, :error_msg => 'Could not reboot'}}
nodes = [{'uid' => 1}, {'uid' => 2}]
rpcclient = mock_rpcclient(nodes)
mc_removed_res = mock_mc_result(removed_hash)
mc_error_res = mock_mc_result(error_hash)
retries = Astute.config[:MC_RETRIES]
retries.should == 5
rpcclient.expects(:discover).with(:nodes => ['2']).times(retries)
rpcclient.expects(:erase_node).times(retries + 1).with(:reboot => true).returns([mc_removed_res, mc_error_res]).then.returns([mc_error_res])
res = @orchestrator.remove_nodes(@reporter, 'task_uuid', nodes)
res.should eql({'nodes' => [{'uid' => '1'}], 'status' => 'error',
'error_nodes' => [{"uid"=>"2", "error"=>"RPC method 'erase_node' failed "\
"with message: Could not reboot"}]})
end
it "remove_nodes try to call MCAgent multiple times on no response" do
removed_hash = {:sender => '2', :data => {:rebooted => true}}
then_removed_hash = {:sender => '3', :data => {:rebooted => true}}
nodes = [{'uid' => 1}, {'uid' => 2}, {'uid' => 3}]
rpcclient = mock_rpcclient(nodes)
mc_removed_res = mock_mc_result(removed_hash)
mc_then_removed_res = mock_mc_result(then_removed_hash)
retries = Astute.config[:MC_RETRIES]
rpcclient.expects(:discover).with(:nodes => %w(1 3)).times(1)
rpcclient.expects(:discover).with(:nodes => %w(1)).times(retries - 1)
rpcclient.expects(:erase_node).times(retries + 1).with(:reboot => true).
returns([mc_removed_res]).then.returns([mc_then_removed_res]).then.returns([])
res = @orchestrator.remove_nodes(@reporter, 'task_uuid', nodes)
res['nodes'] = res['nodes'].sort_by{|n| n['uid'] }
res.should eql({'nodes' => [{'uid' => '2'}, {'uid' => '3'}],
'inaccessible_nodes' => [{'uid'=>'1', 'error'=>'Node not answered by RPC.'}]})
end
it "remove_nodes and returns early if retries were successful" do
removed_hash = {:sender => '1', :data => {:rebooted => true}}
then_removed_hash = {:sender => '2', :data => {:rebooted => true}}
nodes = [{'uid' => 1}, {'uid' => 2}]
rpcclient = mock_rpcclient(nodes)
mc_removed_res = mock_mc_result(removed_hash)
mc_then_removed_res = mock_mc_result(then_removed_hash)
retries = Astute.config[:MC_RETRIES]
retries.should_not == 2
rpcclient.expects(:discover).with(:nodes => %w(2)).times(1)
rpcclient.expects(:erase_node).times(2).with(:reboot => true).
returns([mc_removed_res]).then.returns([mc_then_removed_res])
res = @orchestrator.remove_nodes(@reporter, 'task_uuid', nodes)
res['nodes'] = res['nodes'].sort_by{|n| n['uid'] }
res.should eql({'nodes' => [{'uid' => '1'}, {'uid' => '2'}]})
end
it "remove_nodes do not fail if any of nodes failed"
before(:all) do
@data = {
"engine"=>{
"url"=>"http://localhost/cobbler_api",
"username"=>"cobbler",
"password"=>"cobbler"
},
"task_uuid"=>"a5c44b9a-285a-4a0c-ae65-2ed6b3d250f4",
"nodes" => [
{
'uid' => '1',
'profile' => 'centos-x86_64',
"name"=>"controller-1",
'power_type' => 'ssh',
'power_user' => 'root',
'power_pass' => '/root/.ssh/bootstrap.rsa',
'power-address' => '1.2.3.5',
'hostname' => 'name.domain.tld',
'name_servers' => '1.2.3.4 1.2.3.100',
'name_servers_search' => 'some.domain.tld domain.tld',
'netboot_enabled' => '1',
'ks_meta' => 'some_param=1 another_param=2',
'interfaces' => {
'eth0' => {
'mac_address' => '00:00:00:00:00:00',
'static' => '1',
'netmask' => '255.255.255.0',
'ip_address' => '1.2.3.5',
'dns_name' => 'node.mirantis.net',
},
'eth1' => {
'mac_address' => '00:00:00:00:00:01',
'static' => '0',
'netmask' => '255.255.255.0',
'ip_address' => '1.2.3.6',
}
},
'interfaces_extra' => {
'eth0' => {
'peerdns' => 'no',
'onboot' => 'yes',
},
'eth1' => {
'peerdns' => 'no',
'onboot' => 'yes',
}
}
}
]
}.freeze
end
describe '#fast_provision' do
context 'cobler cases' do
it "raise error if cobler settings empty" do
expect {@orchestrator.fast_provision(@reporter, {}, @data['nodes'])}.
to raise_error(StopIteration)
end
end
context 'node state cases' do
before(:each) do
remote = mock() do
stubs(:call)
stubs(:call).with('login', 'cobbler', 'cobbler').returns('remotetoken')
end
@tmp = XMLRPC::Client
XMLRPC::Client = mock() do
stubs(:new).returns(remote)
end
end
it "raises error if nodes list is empty" do
expect {@orchestrator.fast_provision(@reporter, @data['engine'], {})}.
to raise_error(/Nodes to provision are not provided!/)
end
it "try to reboot nodes from list" do
Astute::Provision::Cobbler.any_instance do
expects(:power_reboot).with('controller-1')
end
@orchestrator.stubs(:check_reboot_nodes).returns([])
@orchestrator.fast_provision(@reporter, @data['engine'], @data['nodes'])
end
before(:each) { Astute::Provision::Cobbler.any_instance.stubs(:power_reboot).returns(333) }
context 'node reboot success' do
before(:each) { Astute::Provision::Cobbler.any_instance.stubs(:event_status).
returns([Time.now.to_f, 'controller-1', 'complete'])}
it "does not find failed nodes" do
Astute::Provision::Cobbler.any_instance.stubs(:event_status).
returns([Time.now.to_f, 'controller-1', 'complete'])
@orchestrator.fast_provision(@reporter, @data['engine'], @data['nodes'])
end
it "report about success" do
@reporter.expects(:report).with({'status' => 'ready', 'progress' => 100}).returns(true)
@orchestrator.fast_provision(@reporter, @data['engine'], @data['nodes'])
end
it "sync engine state" do
Astute::Provision::Cobbler.any_instance do
expects(:sync).once
end
@orchestrator.fast_provision(@reporter, @data['engine'], @data['nodes'])
end
end
context 'node reboot fail' do
before(:each) { Astute::Provision::Cobbler.any_instance.stubs(:event_status).
returns([Time.now.to_f, 'controller-1', 'failed'])}
it "should sync engine state" do
Astute::Provision::Cobbler.any_instance do
expects(:sync).once
end
begin
@orchestrator.fast_provision(@reporter, @data['engine'], @data['nodes'])
rescue
end
end
it "raise error if failed node find" do
expect {@orchestrator.fast_provision(@reporter, @data['engine'], @data['nodes'])}.to raise_error(StopIteration)
end
end
end
end
describe '#provision' do
before(:each) do
# Disable sleeping in test env (doubles the test speed)
def @orchestrator.sleep_not_greater_than(time, &block)
block.call
end
end
it "raises error if nodes list is empty" do
expect {@orchestrator.provision(@reporter, @data['task_uuid'], {})}.
to raise_error(/Nodes to provision are not provided!/)
end
it "prepare provision log for parsing" do
Astute::LogParser::ParseProvisionLogs.any_instance do
expects(:prepare).with(@data['nodes']).once
end
@orchestrator.stubs(:report_about_progress).returns()
@orchestrator.stubs(:node_type).returns([{'uid' => '1', 'node_type' => 'target' }])
@orchestrator.provision(@reporter, @data['task_uuid'], @data['nodes'])
end
it "ignore problem with parsing provision log" do
Astute::LogParser::ParseProvisionLogs.any_instance do
stubs(:prepare).with(@data['nodes']).raises
end
@orchestrator.stubs(:report_about_progress).returns()
@orchestrator.stubs(:node_type).returns([{'uid' => '1', 'node_type' => 'target' }])
@orchestrator.provision(@reporter, @data['task_uuid'], @data['nodes'])
end
it 'provision nodes using mclient' do
@orchestrator.stubs(:report_about_progress).returns()
@orchestrator.expects(:node_type).returns([{'uid' => '1', 'node_type' => 'target' }])
@orchestrator.provision(@reporter, @data['task_uuid'], @data['nodes'])
end
it "fail if timeout of provisioning is exceeded" do
Astute::LogParser::ParseProvisionLogs.any_instance do
stubs(:prepare).returns()
end
Timeout.stubs(:timeout).raises(Timeout::Error)
msg = 'Timeout of provisioning is exceeded.'
error_mgs = {'status' => 'error', 'error' => msg, 'nodes' => [{ 'uid' => '1',
'status' => 'error',
'error_msg' => msg,
'progress' => 100,
'error_type' => 'provision'}]}
@reporter.expects(:report).with(error_mgs).once
@orchestrator.provision(@reporter, @data['task_uuid'], @data['nodes'])
end
end
describe 'Red-hat checking' do
let(:credentials) do
{
'release_name' => 'RELEASE_NAME',
'redhat' => {
'username' => 'user',
'password' => 'password'
}
}
end
def mc_result(result)
[mock_mc_result({:data => result})]
end
def stub_rpc(stdout='')
mock_rpcclient.stubs(:execute).returns(mc_result(:exit_code => 0, :stdout => stdout, :stderr => ''))
end
describe '#check_redhat_credentials' do
it 'Should raise StopIteration in case of errors ' do
stub_rpc("Before\nInvalid username or password\nAfter")
expect do
@orchestrator.check_redhat_credentials(@reporter, @data['task_uuid'], credentials)
end.to raise_error(StopIteration)
end
it 'Should not raise errors ' do
stub_rpc
@orchestrator.check_redhat_credentials(@reporter, @data['task_uuid'], credentials)
end
end
describe '#check_redhat_licenses' do
it 'Should raise StopIteration in case of errors ' do
stub_rpc('{"openstack_licenses_physical_hosts_count":0}')
expect do
@orchestrator.check_redhat_licenses(@reporter, @data['task_uuid'], credentials)
end.to raise_error(StopIteration)
end
it 'Should not raise errors ' do
stub_rpc('{"openstack_licenses_physical_hosts_count":1}')
@orchestrator.check_redhat_licenses(@reporter, @data['task_uuid'], credentials)
end
end
end
end
add test for check dhcp call in network module
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
require File.join(File.dirname(__FILE__), '../spec_helper')
describe Astute::Orchestrator do
include SpecHelpers
before(:each) do
@orchestrator = Astute::Orchestrator.new
@reporter = mock('reporter')
@reporter.stub_everything
end
describe '#verify_networks' do
def make_nodes(*uids)
uids.map do |uid|
{
'uid' => uid.to_s,
'networks' => [
{
'iface' => 'eth0',
'vlans' => [100, 101]
}
]
}
end
end
it "must be able to complete" do
nodes = make_nodes(1, 2)
res1 = {
:data => {
:uid => "1",
:neighbours => {
"eth0" => {
"100" => {"1" => ["eth0"], "2" => ["eth0"]},
"101" => {"1" => ["eth0"]}}}},
:sender => "1"}
res2 = {
:data => {
:uid => "2",
:neighbours => {
"eth0" => {
"100" => {"1" => ["eth0"], "2" => ["eth0"]},
"101" => {"1" => ["eth0"], "2" => ["eth0"]}
}}},
:sender => "2"}
valid_res = {:statuscode => 0, :sender => '1'}
mc_res1 = mock_mc_result(res1)
mc_res2 = mock_mc_result(res2)
mc_valid_res = mock_mc_result
rpcclient = mock_rpcclient(nodes)
rpcclient.expects(:get_probing_info).once.returns([mc_res1, mc_res2])
nodes.each do |node|
rpcclient.expects(:discover).with(:nodes => [node['uid']]).at_least_once
data_to_send = {}
node['networks'].each{ |net| data_to_send[net['iface']] = net['vlans'].join(",") }
rpcclient.expects(:start_frame_listeners).
with(:interfaces => data_to_send.to_json).
returns([mc_valid_res]*2)
rpcclient.expects(:send_probing_frames).
with(:interfaces => data_to_send.to_json).
returns([mc_valid_res]*2)
end
Astute::Network.expects(:check_dhcp)
Astute::MClient.any_instance.stubs(:rpcclient).returns(rpcclient)
res = @orchestrator.verify_networks(@reporter, 'task_uuid', nodes)
expected = {"nodes" => [{"networks" => [{"iface"=>"eth0", "vlans"=>[100]}], "uid"=>"1"},
{"networks"=>[{"iface"=>"eth0", "vlans"=>[100, 101]}], "uid"=>"2"}]}
res.should eql(expected)
end
it "dhcp check returns expected info" do
nodes = make_nodes(1, 2)
json_output = JSON.dump({'iface'=>'eth1',
'mac'=> 'ff:fa:1f:er:ds:as'})
res1 = {
:data => {:out => json_output},
:sender => "1"}
res2 = {
:data => {:out => json_output},
:sender => "2"}
rpcclient = mock_rpcclient(nodes)
rpcclient.expects(:dhcp_discover).at_least_once.returns([res1, res2])
rpcclient.discover(:nodes => ['1', '2'])
res = Astute::Network.check_dhcp(rpcclient, nodes)
expected = {"nodes" => [{:status=>"ready", :uid=>"1", :data=>{"iface"=>"eth1", "mac"=>"ff:fa:1f:er:ds:as"}},
{:status=>"ready", :uid=>"2", :data=>{"iface"=>"eth1", "mac"=>"ff:fa:1f:er:ds:as"}}]}
res.should eql(expected)
end
it "returns error if nodes list is empty" do
res = @orchestrator.verify_networks(@reporter, 'task_uuid', [])
res.should eql({'status' => 'error', 'error' => "Network verification requires a minimum of two nodes."})
end
it "returns all vlans passed if only one node provided" do
nodes = make_nodes(1)
res = @orchestrator.verify_networks(@reporter, 'task_uuid', nodes)
expected = {"nodes" => [{"uid"=>"1", "networks" => [{"iface"=>"eth0", "vlans"=>[100, 101]}]}]}
res.should eql(expected)
end
end
it "must be able to return node type" do
nodes = [{'uid' => '1'}]
res = {:data => {:node_type => 'target'},
:sender=>"1"}
mc_res = mock_mc_result(res)
mc_timeout = 5
rpcclient = mock_rpcclient(nodes, mc_timeout)
rpcclient.expects(:get_type).once.returns([mc_res])
types = @orchestrator.node_type(@reporter, 'task_uuid', nodes, mc_timeout)
types.should eql([{"node_type"=>"target", "uid"=>"1"}])
end
it "in remove_nodes, it returns empty list if nodes are not provided" do
res = @orchestrator.remove_nodes(@reporter, 'task_uuid', [])
res.should eql({'nodes' => []})
end
it "remove_nodes cleans nodes and reboots them" do
removed_hash = {:sender => '1',
:data => {:rebooted => true}}
error_hash = {:sender => '2',
:data => {:rebooted => false, :error_msg => 'Could not reboot'}}
nodes = [{'uid' => 1}, {'uid' => 2}]
rpcclient = mock_rpcclient
mc_removed_res = mock_mc_result(removed_hash)
mc_error_res = mock_mc_result(error_hash)
rpcclient.expects(:erase_node).at_least_once.with(:reboot => true).returns([mc_removed_res, mc_error_res])
res = @orchestrator.remove_nodes(@reporter, 'task_uuid', nodes)
res.should eql({'nodes' => [{'uid' => '1'}], 'status' => 'error',
'error_nodes' => [{"uid"=>"2", "error"=>"RPC method 'erase_node' failed "\
"with message: Could not reboot"}]})
end
it "it calls deploy method with valid arguments" do
nodes = [{'uid' => 1}]
attrs = {'a' => 'b'}
Astute::DeploymentEngine::NailyFact.any_instance.expects(:deploy).
with([{'uid' => '1'}], attrs)
@orchestrator.deploy(@reporter, 'task_uuid', nodes, attrs)
end
it "deploy method raises error if nodes list is empty" do
expect {@orchestrator.deploy(@reporter, 'task_uuid', [], {})}.
to raise_error(/Nodes to deploy are not provided!/)
end
it "remove_nodes try to call MCAgent multiple times on error" do
removed_hash = {:sender => '1',
:data => {:rebooted => true}}
error_hash = {:sender => '2',
:data => {:rebooted => false, :error_msg => 'Could not reboot'}}
nodes = [{'uid' => 1}, {'uid' => 2}]
rpcclient = mock_rpcclient(nodes)
mc_removed_res = mock_mc_result(removed_hash)
mc_error_res = mock_mc_result(error_hash)
retries = Astute.config[:MC_RETRIES]
retries.should == 5
rpcclient.expects(:discover).with(:nodes => ['2']).times(retries)
rpcclient.expects(:erase_node).times(retries + 1).with(:reboot => true).returns([mc_removed_res, mc_error_res]).then.returns([mc_error_res])
res = @orchestrator.remove_nodes(@reporter, 'task_uuid', nodes)
res.should eql({'nodes' => [{'uid' => '1'}], 'status' => 'error',
'error_nodes' => [{"uid"=>"2", "error"=>"RPC method 'erase_node' failed "\
"with message: Could not reboot"}]})
end
it "remove_nodes try to call MCAgent multiple times on no response" do
removed_hash = {:sender => '2', :data => {:rebooted => true}}
then_removed_hash = {:sender => '3', :data => {:rebooted => true}}
nodes = [{'uid' => 1}, {'uid' => 2}, {'uid' => 3}]
rpcclient = mock_rpcclient(nodes)
mc_removed_res = mock_mc_result(removed_hash)
mc_then_removed_res = mock_mc_result(then_removed_hash)
retries = Astute.config[:MC_RETRIES]
rpcclient.expects(:discover).with(:nodes => %w(1 3)).times(1)
rpcclient.expects(:discover).with(:nodes => %w(1)).times(retries - 1)
rpcclient.expects(:erase_node).times(retries + 1).with(:reboot => true).
returns([mc_removed_res]).then.returns([mc_then_removed_res]).then.returns([])
res = @orchestrator.remove_nodes(@reporter, 'task_uuid', nodes)
res['nodes'] = res['nodes'].sort_by{|n| n['uid'] }
res.should eql({'nodes' => [{'uid' => '2'}, {'uid' => '3'}],
'inaccessible_nodes' => [{'uid'=>'1', 'error'=>'Node not answered by RPC.'}]})
end
it "remove_nodes and returns early if retries were successful" do
removed_hash = {:sender => '1', :data => {:rebooted => true}}
then_removed_hash = {:sender => '2', :data => {:rebooted => true}}
nodes = [{'uid' => 1}, {'uid' => 2}]
rpcclient = mock_rpcclient(nodes)
mc_removed_res = mock_mc_result(removed_hash)
mc_then_removed_res = mock_mc_result(then_removed_hash)
retries = Astute.config[:MC_RETRIES]
retries.should_not == 2
rpcclient.expects(:discover).with(:nodes => %w(2)).times(1)
rpcclient.expects(:erase_node).times(2).with(:reboot => true).
returns([mc_removed_res]).then.returns([mc_then_removed_res])
res = @orchestrator.remove_nodes(@reporter, 'task_uuid', nodes)
res['nodes'] = res['nodes'].sort_by{|n| n['uid'] }
res.should eql({'nodes' => [{'uid' => '1'}, {'uid' => '2'}]})
end
it "remove_nodes do not fail if any of nodes failed"
before(:all) do
@data = {
"engine"=>{
"url"=>"http://localhost/cobbler_api",
"username"=>"cobbler",
"password"=>"cobbler"
},
"task_uuid"=>"a5c44b9a-285a-4a0c-ae65-2ed6b3d250f4",
"nodes" => [
{
'uid' => '1',
'profile' => 'centos-x86_64',
"name"=>"controller-1",
'power_type' => 'ssh',
'power_user' => 'root',
'power_pass' => '/root/.ssh/bootstrap.rsa',
'power-address' => '1.2.3.5',
'hostname' => 'name.domain.tld',
'name_servers' => '1.2.3.4 1.2.3.100',
'name_servers_search' => 'some.domain.tld domain.tld',
'netboot_enabled' => '1',
'ks_meta' => 'some_param=1 another_param=2',
'interfaces' => {
'eth0' => {
'mac_address' => '00:00:00:00:00:00',
'static' => '1',
'netmask' => '255.255.255.0',
'ip_address' => '1.2.3.5',
'dns_name' => 'node.mirantis.net',
},
'eth1' => {
'mac_address' => '00:00:00:00:00:01',
'static' => '0',
'netmask' => '255.255.255.0',
'ip_address' => '1.2.3.6',
}
},
'interfaces_extra' => {
'eth0' => {
'peerdns' => 'no',
'onboot' => 'yes',
},
'eth1' => {
'peerdns' => 'no',
'onboot' => 'yes',
}
}
}
]
}.freeze
end
describe '#fast_provision' do
context 'cobler cases' do
it "raise error if cobler settings empty" do
expect {@orchestrator.fast_provision(@reporter, {}, @data['nodes'])}.
to raise_error(StopIteration)
end
end
context 'node state cases' do
before(:each) do
remote = mock() do
stubs(:call)
stubs(:call).with('login', 'cobbler', 'cobbler').returns('remotetoken')
end
@tmp = XMLRPC::Client
XMLRPC::Client = mock() do
stubs(:new).returns(remote)
end
end
it "raises error if nodes list is empty" do
expect {@orchestrator.fast_provision(@reporter, @data['engine'], {})}.
to raise_error(/Nodes to provision are not provided!/)
end
it "try to reboot nodes from list" do
Astute::Provision::Cobbler.any_instance do
expects(:power_reboot).with('controller-1')
end
@orchestrator.stubs(:check_reboot_nodes).returns([])
@orchestrator.fast_provision(@reporter, @data['engine'], @data['nodes'])
end
before(:each) { Astute::Provision::Cobbler.any_instance.stubs(:power_reboot).returns(333) }
context 'node reboot success' do
before(:each) { Astute::Provision::Cobbler.any_instance.stubs(:event_status).
returns([Time.now.to_f, 'controller-1', 'complete'])}
it "does not find failed nodes" do
Astute::Provision::Cobbler.any_instance.stubs(:event_status).
returns([Time.now.to_f, 'controller-1', 'complete'])
@orchestrator.fast_provision(@reporter, @data['engine'], @data['nodes'])
end
it "report about success" do
@reporter.expects(:report).with({'status' => 'ready', 'progress' => 100}).returns(true)
@orchestrator.fast_provision(@reporter, @data['engine'], @data['nodes'])
end
it "sync engine state" do
Astute::Provision::Cobbler.any_instance do
expects(:sync).once
end
@orchestrator.fast_provision(@reporter, @data['engine'], @data['nodes'])
end
end
context 'node reboot fail' do
before(:each) { Astute::Provision::Cobbler.any_instance.stubs(:event_status).
returns([Time.now.to_f, 'controller-1', 'failed'])}
it "should sync engine state" do
Astute::Provision::Cobbler.any_instance do
expects(:sync).once
end
begin
@orchestrator.fast_provision(@reporter, @data['engine'], @data['nodes'])
rescue
end
end
it "raise error if failed node find" do
expect {@orchestrator.fast_provision(@reporter, @data['engine'], @data['nodes'])}.to raise_error(StopIteration)
end
end
end
end
describe '#provision' do
before(:each) do
# Disable sleeping in test env (doubles the test speed)
def @orchestrator.sleep_not_greater_than(time, &block)
block.call
end
end
it "raises error if nodes list is empty" do
expect {@orchestrator.provision(@reporter, @data['task_uuid'], {})}.
to raise_error(/Nodes to provision are not provided!/)
end
it "prepare provision log for parsing" do
Astute::LogParser::ParseProvisionLogs.any_instance do
expects(:prepare).with(@data['nodes']).once
end
@orchestrator.stubs(:report_about_progress).returns()
@orchestrator.stubs(:node_type).returns([{'uid' => '1', 'node_type' => 'target' }])
@orchestrator.provision(@reporter, @data['task_uuid'], @data['nodes'])
end
it "ignore problem with parsing provision log" do
Astute::LogParser::ParseProvisionLogs.any_instance do
stubs(:prepare).with(@data['nodes']).raises
end
@orchestrator.stubs(:report_about_progress).returns()
@orchestrator.stubs(:node_type).returns([{'uid' => '1', 'node_type' => 'target' }])
@orchestrator.provision(@reporter, @data['task_uuid'], @data['nodes'])
end
it 'provision nodes using mclient' do
@orchestrator.stubs(:report_about_progress).returns()
@orchestrator.expects(:node_type).returns([{'uid' => '1', 'node_type' => 'target' }])
@orchestrator.provision(@reporter, @data['task_uuid'], @data['nodes'])
end
it "fail if timeout of provisioning is exceeded" do
Astute::LogParser::ParseProvisionLogs.any_instance do
stubs(:prepare).returns()
end
Timeout.stubs(:timeout).raises(Timeout::Error)
msg = 'Timeout of provisioning is exceeded.'
error_mgs = {'status' => 'error', 'error' => msg, 'nodes' => [{ 'uid' => '1',
'status' => 'error',
'error_msg' => msg,
'progress' => 100,
'error_type' => 'provision'}]}
@reporter.expects(:report).with(error_mgs).once
@orchestrator.provision(@reporter, @data['task_uuid'], @data['nodes'])
end
end
describe 'Red-hat checking' do
let(:credentials) do
{
'release_name' => 'RELEASE_NAME',
'redhat' => {
'username' => 'user',
'password' => 'password'
}
}
end
def mc_result(result)
[mock_mc_result({:data => result})]
end
def stub_rpc(stdout='')
mock_rpcclient.stubs(:execute).returns(mc_result(:exit_code => 0, :stdout => stdout, :stderr => ''))
end
describe '#check_redhat_credentials' do
it 'Should raise StopIteration in case of errors ' do
stub_rpc("Before\nInvalid username or password\nAfter")
expect do
@orchestrator.check_redhat_credentials(@reporter, @data['task_uuid'], credentials)
end.to raise_error(StopIteration)
end
it 'Should not raise errors ' do
stub_rpc
@orchestrator.check_redhat_credentials(@reporter, @data['task_uuid'], credentials)
end
end
describe '#check_redhat_licenses' do
it 'Should raise StopIteration in case of errors ' do
stub_rpc('{"openstack_licenses_physical_hosts_count":0}')
expect do
@orchestrator.check_redhat_licenses(@reporter, @data['task_uuid'], credentials)
end.to raise_error(StopIteration)
end
it 'Should not raise errors ' do
stub_rpc('{"openstack_licenses_physical_hosts_count":1}')
@orchestrator.check_redhat_licenses(@reporter, @data['task_uuid'], credentials)
end
end
end
end
|
# sub_shell_expansion - class SubShellExpansion - :( ...block...) -returns
# content of the last command's output
class SubShellExpansion < SubShell
def call env:, frames:
my_env = env._clone
sio = StringIO.new
my_env[:out] = sio
super env:my_env, frames:frames
sio.close_write
sio.rewind
return sio.read.gsub(/\n/, ' ')
end
def ordinal
COMMAND
end
def to_s
':(' + @block.to_s + ')'
end
end
\'corrected behaviiour for subshell expansion to chunk output via split, will be flattened in statement.rb\'
# sub_shell_expansion - class SubShellExpansion - :( ...block...) -returns
# content of the last command's output
class SubShellExpansion < SubShell
def call env:, frames:
my_env = env._clone
sio = StringIO.new
my_env[:out] = sio
super env:my_env, frames:frames
sio.close_write
sio.rewind
return sio.read.gsub(/\n/, ' ').split
end
def ordinal
COMMAND
end
def to_s
':(' + @block.to_s + ')'
end
end
|
# encoding: utf-8
require File.expand_path("../spec_helper", __FILE__)
describe "Element" do
before :each do
browser.goto(WatirSpec.files + "/forms_with_input_elements.html")
end
describe ".new" do
it "finds elements matching the conditions when given a hash of :how => 'what' arguments" do
browser.checkbox(:name => 'new_user_interests', :title => 'Dancing is fun!').value.should == 'dancing'
browser.text_field(:class_name => 'name', :index => 1).id.should == 'new_user_last_name'
end
it "raises UnknownObjectException with a sane error message when given a hash of :how => 'what' arguments (non-existing object)" do
lambda { browser.text_field(:index => 100, :name => "foo").id }.should raise_error(UnknownObjectException)
end
it "raises ArgumentError if given the wrong number of arguments" do
container = mock("container").as_null_object
lambda { Element.new(container, 1,2,3,4) }.should raise_error(ArgumentError)
lambda { Element.new(container, "foo") }.should raise_error(ArgumentError)
end
end
describe "#== and #eql?" do
before { browser.goto(WatirSpec.files + "/definition_lists.html") }
it "returns true if the two elements point to the same DOM element" do
a = browser.dl(:id => "experience-list")
b = browser.dl
a.should == b
a.should eql(b)
end
it "returns false if the two elements are not the same" do
a = browser.dls[0]
b = browser.dls[1]
a.should_not == b
a.should_not eql(b)
end
it "returns false if the other object is not an Element" do
browser.dl.should_not == 1
end
end
describe "data-* attributes" do
before { browser.goto("file://" + File.expand_path("html/data_attributes.html", File.dirname(__FILE__))) }
bug "http://github.com/jarib/celerity/issues#issue/27", :celerity do
it "finds elements by a data-* attribute" do
browser.p(:data_type => "ruby-library").should exist
end
it "returns the value of a data-* attribute" do
browser.p.data_type.should == "ruby-library"
end
end
end
describe "finding with unknown tag name" do
it "finds an element by xpath" do
browser.element(:xpath => "//*[@for='new_user_first_name']").should exist
end
it "finds an element by arbitrary attribute" do
browser.element(:id => "new_user").should exist
end
it "finds several elements by xpath" do
browser.elements(:xpath => "//a").length.should == 1
end
it "finds finds several elements by arbitrary attribute" do
browser.elements(:name => /^new_user/).length.should == 30
end
end
describe "#to_subtype" do
it "returns a more precise subtype of Element" do
el = browser.element(:xpath => "//input[@type='radio']").to_subtype
el.should be_kind_of(Watir::Radio)
end
end
describe "#focus" do
bug "http://github.com/jarib/watir-webdriver/issues/issue/20", [:webdriver, :firefox] do
it "fires the onfocus event for the given element" do
tf = browser.text_field(:id, "new_user_occupation")
tf.value.should == "Developer"
tf.focus
browser.div(:id, "onfocus_test").text.should == "changed by onfocus event"
end
end
end
describe "#fire_event" do
it "should fire the given event" do
browser.div(:id, "onfocus_test").text.should be_empty
browser.text_field(:id, "new_user_occupation").fire_event('onfocus')
browser.div(:id, "onfocus_test").text.should == "changed by onfocus event"
end
end
describe "#parent" do
bug "http://github.com/jarib/celerity/issues#issue/28", :celerity do
it "gets the parent of this element" do
browser.text_field(:id, "new_user_email").parent.should be_instance_of(FieldSet)
end
end
end
describe "#visible?" do
it "returns true if the element is visible" do
browser.text_field(:id, "new_user_email").should be_visible
end
it "returns false if the element is input element where type == 'hidden'" do
browser.text_field(:id, "new_user_interests_dolls").should_not be_visible
end
it "returns false if the element has style='display: none;'" do
browser.div(:id, 'changed_language').should_not be_visible
end
it "returns false if the element has style='visibility: hidden;" do
browser.div(:id, 'wants_newsletter').should_not be_visible
end
it "returns false if one of the parent elements is hidden" do
browser.div(:id, 'hidden_parent').should_not be_visible
end
end
describe "#exist?" do
it "doesn't raise when called on nested elements" do
browser.div(:id, 'no_such_div').link(:id, 'no_such_id').should_not exist
end
it "raises ArgumentError error if selector hash with :xpath has multiple entries" do
lambda { browser.div(:xpath => "//div", :class => "foo").exists? }.should raise_error(ArgumentError)
end
end
end
Cover Element#to_subtype for both <input> and other elements.
# encoding: utf-8
require File.expand_path("../spec_helper", __FILE__)
describe "Element" do
before :each do
browser.goto(WatirSpec.files + "/forms_with_input_elements.html")
end
describe ".new" do
it "finds elements matching the conditions when given a hash of :how => 'what' arguments" do
browser.checkbox(:name => 'new_user_interests', :title => 'Dancing is fun!').value.should == 'dancing'
browser.text_field(:class_name => 'name', :index => 1).id.should == 'new_user_last_name'
end
it "raises UnknownObjectException with a sane error message when given a hash of :how => 'what' arguments (non-existing object)" do
lambda { browser.text_field(:index => 100, :name => "foo").id }.should raise_error(UnknownObjectException)
end
it "raises ArgumentError if given the wrong number of arguments" do
container = mock("container").as_null_object
lambda { Element.new(container, 1,2,3,4) }.should raise_error(ArgumentError)
lambda { Element.new(container, "foo") }.should raise_error(ArgumentError)
end
end
describe "#== and #eql?" do
before { browser.goto(WatirSpec.files + "/definition_lists.html") }
it "returns true if the two elements point to the same DOM element" do
a = browser.dl(:id => "experience-list")
b = browser.dl
a.should == b
a.should eql(b)
end
it "returns false if the two elements are not the same" do
a = browser.dls[0]
b = browser.dls[1]
a.should_not == b
a.should_not eql(b)
end
it "returns false if the other object is not an Element" do
browser.dl.should_not == 1
end
end
describe "data-* attributes" do
before { browser.goto("file://" + File.expand_path("html/data_attributes.html", File.dirname(__FILE__))) }
bug "http://github.com/jarib/celerity/issues#issue/27", :celerity do
it "finds elements by a data-* attribute" do
browser.p(:data_type => "ruby-library").should exist
end
it "returns the value of a data-* attribute" do
browser.p.data_type.should == "ruby-library"
end
end
end
describe "finding with unknown tag name" do
it "finds an element by xpath" do
browser.element(:xpath => "//*[@for='new_user_first_name']").should exist
end
it "finds an element by arbitrary attribute" do
browser.element(:id => "new_user").should exist
end
it "finds several elements by xpath" do
browser.elements(:xpath => "//a").length.should == 1
end
it "finds finds several elements by arbitrary attribute" do
browser.elements(:name => /^new_user/).length.should == 30
end
end
describe "#to_subtype" do
it "returns a more precise subtype of Element (input element)" do
el = browser.element(:xpath => "//input[@type='radio']").to_subtype
el.should be_kind_of(Watir::Radio)
end
it "returns a more precise subtype of Element" do
el = browser.element(:xpath => "//*[@id='messages']").to_subtype
el.should be_kind_of(Watir::Div)
end
end
describe "#focus" do
bug "http://github.com/jarib/watir-webdriver/issues/issue/20", [:webdriver, :firefox] do
it "fires the onfocus event for the given element" do
tf = browser.text_field(:id, "new_user_occupation")
tf.value.should == "Developer"
tf.focus
browser.div(:id, "onfocus_test").text.should == "changed by onfocus event"
end
end
end
describe "#fire_event" do
it "should fire the given event" do
browser.div(:id, "onfocus_test").text.should be_empty
browser.text_field(:id, "new_user_occupation").fire_event('onfocus')
browser.div(:id, "onfocus_test").text.should == "changed by onfocus event"
end
end
describe "#parent" do
bug "http://github.com/jarib/celerity/issues#issue/28", :celerity do
it "gets the parent of this element" do
browser.text_field(:id, "new_user_email").parent.should be_instance_of(FieldSet)
end
end
end
describe "#visible?" do
it "returns true if the element is visible" do
browser.text_field(:id, "new_user_email").should be_visible
end
it "returns false if the element is input element where type == 'hidden'" do
browser.text_field(:id, "new_user_interests_dolls").should_not be_visible
end
it "returns false if the element has style='display: none;'" do
browser.div(:id, 'changed_language').should_not be_visible
end
it "returns false if the element has style='visibility: hidden;" do
browser.div(:id, 'wants_newsletter').should_not be_visible
end
it "returns false if one of the parent elements is hidden" do
browser.div(:id, 'hidden_parent').should_not be_visible
end
end
describe "#exist?" do
it "doesn't raise when called on nested elements" do
browser.div(:id, 'no_such_div').link(:id, 'no_such_id').should_not exist
end
it "raises ArgumentError error if selector hash with :xpath has multiple entries" do
lambda { browser.div(:xpath => "//div", :class => "foo").exists? }.should raise_error(ArgumentError)
end
end
end
|
Given(/^I use configuration (\d+)$/) do |height|
@i = ImageTagBuilder.new
end
Given(/^a window\.devicePixelRatio of (\d+)$/) do |ratio|
@i.window_device_pixel_ratio = ratio.to_i
end
Given(/^a window\.innerWidth of (\d+)$/) do |width|
@i.window_inner_width = width.to_i
end
Given(/^a source image that's (\d+)x(\d+)$/) do |width, height|
@i.source_width = width
@i.source_height = height
end
Then(/^the source image width should be (\d+)$/) do |width|
expect(@i.source_width).to eq(width)
end
Then(/^the source image height should be (\d+)$/) do |height|
expect(@i.source_height).to eq(height)
end
Then(/^the image call width should be (\d+)$/) do |width|
expect(@i.image_call_width).to eq(width)
end
Then(/^the image call height should be (\d+)$/) do |height|
expect(@i.image_call_height).to eq(height)
end
Then(/^the width attribute should be (\d+)$/) do |width|
expect(@i.attribute_width).to eq(width)
end
Then(/^the height attribute should be (\d+)$/) do |height|
expect(@i.attribute_height).to eq(height)
end
Making sure the checks are against integers.
Given(/^I use configuration (\d+)$/) do |height|
@i = ImageTagBuilder.new
end
Given(/^a window\.devicePixelRatio of (\d+)$/) do |ratio|
@i.window_device_pixel_ratio = ratio.to_i
end
Given(/^a window\.innerWidth of (\d+)$/) do |width|
@i.window_inner_width = width.to_i
end
Given(/^a source image that's (\d+)x(\d+)$/) do |width, height|
@i.source_width = width.to_i
@i.source_height = height.to_i
end
Then(/^the source image width should be (\d+)$/) do |width|
expect(@i.source_width).to eq(width.to_i)
end
Then(/^the source image height should be (\d+)$/) do |height|
expect(@i.source_height).to eq(height.to_i)
end
Then(/^the image call width should be (\d+)$/) do |width|
expect(@i.image_call_width).to eq(width.to_i)
end
Then(/^the image call height should be (\d+)$/) do |height|
expect(@i.image_call_height).to eq(height.to_i)
end
Then(/^the width attribute should be (\d+)$/) do |width|
expect(@i.attribute_width).to eq(width.to_i)
end
Then(/^the height attribute should be (\d+)$/) do |height|
expect(@i.attribute_height).to eq(height.to_i)
end
|
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_product_assembly'
s.version = '3.0.0'
s.summary = 'Adds oportunity to make bundle of products to your Spree store'
s.description = s.summary
s.required_ruby_version = '>= 2.1.0'
s.author = 'Roman Smirnov'
s.email = 'POMAHC@gmail.com'
s.homepage = 'https://github.com/spree-contrib/spree-product-assembly'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
s.add_dependency 'spree_backend', '~> 3.0'
s.add_development_dependency 'active_model_serializers', '0.9.0.alpha1'
s.add_development_dependency 'rspec-rails', '~> 3.3.0'
s.add_development_dependency 'sqlite3'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'factory_girl', '~> 4.4'
s.add_development_dependency 'coffee-rails', '~> 4.0.0'
s.add_development_dependency 'sass-rails', '~> 4.0.0'
s.add_development_dependency 'capybara', '~> 2.4'
s.add_development_dependency 'poltergeist', '~> 1.6'
s.add_development_dependency 'database_cleaner', '~> 1.4'
s.add_development_dependency 'simplecov'
s.add_development_dependency 'pg'
s.add_development_dependency 'launchy'
end
Alphbetize
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_product_assembly'
s.version = '3.0.0'
s.summary = 'Adds oportunity to make bundle of products to your Spree store'
s.description = s.summary
s.required_ruby_version = '>= 2.1.0'
s.author = 'Roman Smirnov'
s.email = 'POMAHC@gmail.com'
s.homepage = 'https://github.com/spree-contrib/spree-product-assembly'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
s.add_dependency 'spree_backend', '~> 3.0'
s.add_development_dependency 'active_model_serializers', '0.9.0.alpha1'
s.add_development_dependency 'capybara', '~> 2.4'
s.add_development_dependency 'coffee-rails', '~> 4.0.0'
s.add_development_dependency 'database_cleaner', '~> 1.4'
s.add_development_dependency 'factory_girl', '~> 4.4'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'launchy'
s.add_development_dependency 'pg'
s.add_development_dependency 'poltergeist', '~> 1.6'
s.add_development_dependency 'rspec-rails', '~> 3.3.0'
s.add_development_dependency 'sass-rails', '~> 4.0.0'
s.add_development_dependency 'simplecov'
s.add_development_dependency 'sqlite3'
end
|
lib = File.expand_path('../lib/', __FILE__)
$LOAD_PATH.unshift lib unless $LOAD_PATH.include?(lib)
require 'spree_shopify_importer/version'
# rubocop:disable Metrics/BlockLength
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_shopify_importer'
s.version = SpreeShopifyImporter::VERSION
s.summary = 'Import your old Shopify store into spree'
s.description = 'Import Shopify store to spree for easier migration.'
s.required_ruby_version = '>= 2.2.7'
s.authors = ['Viktor Fonic', 'Peter Rybarczyk']
s.email = %w[viktor.fonic@gmail.com argonuspiotr@gmail.com]
s.homepage = 'https://github.com/spark-solutions/spree_shopify_importer'
s.license = 'BSD-3-Clause'
# s.files = `git ls-files`.split("\n")
# s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
s.add_dependency 'activeresource', '>= 5.0.0'
s.add_dependency 'activesupport', '>= 5.0.0'
s.add_dependency 'curb'
s.add_dependency 'shopify_api', '>= 4.2.2'
s.add_dependency 'spree_address_book', '>= 3.1.0', '< 4.0'
s.add_dependency 'spree_core', '>= 3.1.0', '< 4.0'
s.add_dependency 'spree_extension'
s.add_development_dependency 'appraisal'
s.add_development_dependency 'capybara'
s.add_development_dependency 'capybara-screenshot'
s.add_development_dependency 'codeclimate-test-reporter'
s.add_development_dependency 'coffee-rails'
s.add_development_dependency 'database_cleaner'
s.add_development_dependency 'factory_bot_rails'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'guard'
s.add_development_dependency 'guard-bundler'
s.add_development_dependency 'guard-rspec'
s.add_development_dependency 'guard-rubocop'
s.add_development_dependency 'guard-spring'
s.add_development_dependency 'mysql2'
s.add_development_dependency 'pg'
s.add_development_dependency 'pry-rails'
s.add_development_dependency 'rspec-rails'
s.add_development_dependency 'rubocop-rspec'
s.add_development_dependency 'sass-rails'
s.add_development_dependency 'selenium-webdriver'
s.add_development_dependency 'shoulda-matchers'
s.add_development_dependency 'simplecov'
s.add_development_dependency 'spring-commands-rspec'
s.add_development_dependency 'spring-commands-rubocop'
s.add_development_dependency 'sqlite3'
s.add_development_dependency 'vcr'
s.add_development_dependency 'webmock'
end
# rubocop:enable Metrics/BlockLength
lock pg version
lib = File.expand_path('../lib/', __FILE__)
$LOAD_PATH.unshift lib unless $LOAD_PATH.include?(lib)
require 'spree_shopify_importer/version'
# rubocop:disable Metrics/BlockLength
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_shopify_importer'
s.version = SpreeShopifyImporter::VERSION
s.summary = 'Import your old Shopify store into spree'
s.description = 'Import Shopify store to spree for easier migration.'
s.required_ruby_version = '>= 2.2.7'
s.authors = ['Viktor Fonic', 'Peter Rybarczyk']
s.email = %w[viktor.fonic@gmail.com argonuspiotr@gmail.com]
s.homepage = 'https://github.com/spark-solutions/spree_shopify_importer'
s.license = 'BSD-3-Clause'
# s.files = `git ls-files`.split("\n")
# s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
s.add_dependency 'activeresource', '>= 5.0.0'
s.add_dependency 'activesupport', '>= 5.0.0'
s.add_dependency 'curb'
s.add_dependency 'shopify_api', '>= 4.2.2'
s.add_dependency 'spree_address_book', '>= 3.1.0', '< 4.0'
s.add_dependency 'spree_core', '>= 3.1.0', '< 4.0'
s.add_dependency 'spree_extension'
s.add_development_dependency 'appraisal'
s.add_development_dependency 'capybara'
s.add_development_dependency 'capybara-screenshot'
s.add_development_dependency 'codeclimate-test-reporter'
s.add_development_dependency 'coffee-rails'
s.add_development_dependency 'database_cleaner'
s.add_development_dependency 'factory_bot_rails'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'guard'
s.add_development_dependency 'guard-bundler'
s.add_development_dependency 'guard-rspec'
s.add_development_dependency 'guard-rubocop'
s.add_development_dependency 'guard-spring'
s.add_development_dependency 'mysql2'
s.add_development_dependency 'pg', '~> 0.18'
s.add_development_dependency 'pry-rails'
s.add_development_dependency 'rspec-rails'
s.add_development_dependency 'rubocop-rspec'
s.add_development_dependency 'sass-rails'
s.add_development_dependency 'selenium-webdriver'
s.add_development_dependency 'shoulda-matchers'
s.add_development_dependency 'simplecov'
s.add_development_dependency 'spring-commands-rspec'
s.add_development_dependency 'spring-commands-rubocop'
s.add_development_dependency 'sqlite3'
s.add_development_dependency 'vcr'
s.add_development_dependency 'webmock'
end
# rubocop:enable Metrics/BlockLength
|
module ActiveScaffold
module Helpers
# All extra helpers that should be included in the View.
# Also a dumping ground for uncategorized helpers.
module ViewHelpers
include ActiveScaffold::Helpers::IdHelpers
include ActiveScaffold::Helpers::AssociationHelpers
include ActiveScaffold::Helpers::PaginationHelpers
include ActiveScaffold::Helpers::ListColumnHelpers
include ActiveScaffold::Helpers::ShowColumnHelpers
include ActiveScaffold::Helpers::FormColumnHelpers
include ActiveScaffold::Helpers::SearchColumnHelpers
include ActiveScaffold::Helpers::HumanConditionHelpers
##
## Delegates
##
# access to the configuration variable
def active_scaffold_config
controller.class.active_scaffold_config
end
def active_scaffold_config_for(*args)
controller.class.active_scaffold_config_for(*args)
end
def active_scaffold_controller_for(*args)
controller.class.active_scaffold_controller_for(*args)
end
##
## Uncategorized
##
def controller_path_for_activerecord(klass)
begin
controller = active_scaffold_controller_for(klass)
controller.controller_path
rescue ActiveScaffold::ControllerNotFound
controller = nil
end
end
# This is the template finder logic, keep it updated with however we find stuff in rails
# currently this very similar to the logic in ActionBase::Base.render for options file
def template_exists?(template_name, partial = false)
lookup_context.exists? template_name, '', partial
end
def generate_temporary_id
(Time.now.to_f*1000).to_i.to_s
end
# Turns [[label, value]] into <option> tags
# Takes optional parameter of :include_blank
def option_tags_for(select_options, options = {})
select_options.insert(0,[as_(:_select_),nil]) if options[:include_blank]
select_options.collect do |option|
label, value = option[0], option[1]
value.nil? ? "<option value="">#{label}</option>" : "<option value=\"#{value}\">#{label}</option>"
end
end
# Should this column be displayed in the subform?
def in_subform?(column, parent_record)
return true unless column.association
# Polymorphic associations can't appear because they *might* be the reverse association, and because you generally don't assign an association from the polymorphic side ... I think.
return false if column.polymorphic_association?
# A column shouldn't be in the subform if it's the reverse association to the parent
return false if column.association.reverse_for?(parent_record.class)
return true
end
def form_remote_upload_tag(url_for_options = {}, options = {})
options[:target] = action_iframe_id(url_for_options)
options[:multipart] ||= true
options[:class] = "#{options[:class]} as_remote_upload".strip
output=""
output << form_tag(url_for_options, options)
(output << "<iframe id='#{action_iframe_id(url_for_options)}' name='#{action_iframe_id(url_for_options)}' style='display:none'></iframe>").html_safe
end
# Provides list of javascripts to include with +javascript_include_tag+
# You can use this with your javascripts like
# <%= javascript_include_tag :defaults, 'your_own_cool_script', active_scaffold_javascripts, :cache => true %>
def active_scaffold_javascripts(frontend = :default)
ActiveScaffold::Config::Core.javascripts(frontend).collect do |name|
ActiveScaffold::Config::Core.asset_path(name, frontend)
end
end
# Provides stylesheets to include with +stylesheet_link_tag+
def active_scaffold_stylesheets(frontend = :default)
[ActiveScaffold::Config::Core.asset_path("stylesheet.css", frontend)]
end
# Provides stylesheets for IE to include with +stylesheet_link_tag+
def active_scaffold_ie_stylesheets(frontend = :default)
[ActiveScaffold::Config::Core.asset_path("stylesheet-ie.css", frontend)]
end
# easy way to include ActiveScaffold assets
def active_scaffold_includes(*args)
frontend = args.first.is_a?(Symbol) ? args.shift : :default
options = args.first.is_a?(Hash) ? args.shift : {}
js = javascript_include_tag(*active_scaffold_javascripts(frontend).push(options))
css = stylesheet_link_tag(*active_scaffold_stylesheets(frontend).push(options))
options[:cache] += '_ie' if options[:cache].is_a? String
options[:concat] += '_ie' if options[:concat].is_a? String
ie_css = stylesheet_link_tag(*active_scaffold_ie_stylesheets(frontend).push(options))
js + "\n" + css + "\n<!--[if IE]>".html_safe + ie_css + "<![endif]-->\n".html_safe
end
# a general-use loading indicator (the "stuff is happening, please wait" feedback)
def loading_indicator_tag(options)
image_tag "indicator.gif", :style => "visibility:hidden;", :id => loading_indicator_id(options), :alt => "loading indicator", :class => "loading-indicator"
end
# Creates a javascript-based link that toggles the visibility of some element on the page.
# By default, it toggles the visibility of the sibling after the one it's nested in. You may pass custom javascript logic in options[:of] to change that, though. For example, you could say :of => '$("my_div_id")'.
# You may also flag whether the other element is visible by default or not, and the initial text will adjust accordingly.
def link_to_visibility_toggle(id, options = {})
options[:default_visible] = true if options[:default_visible].nil?
options[:hide_label] = as_(:hide)
options[:show_label] = as_(:show)
javascript_tag("ActiveScaffold.create_visibility_toggle('#{id}', #{options.to_json});")
end
def skip_action_link(link, *args)
(!link.ignore_method.nil? && controller.respond_to?(link.ignore_method) && controller.send(link.ignore_method, *args)) || ((link.security_method_set? or controller.respond_to? link.security_method) and !controller.send(link.security_method, *args))
end
def render_action_link(link, url_options, record = nil, html_options = {})
url_options = action_link_url_options(link, url_options, record)
html_options = action_link_html_options(link, url_options, record, html_options)
action_link_html(link, url_options, html_options, record)
end
def render_group_action_link(link, url_options, options, record = nil)
if link.type == :member && !options[:authorized]
action_link_html(link, nil, {:class => "disabled #{link.action}#{link.html_options[:class].blank? ? '' : (' ' + link.html_options[:class])}"}, record)
else
render_action_link(link, url_options, record)
end
end
def action_link_url_options(link, url_options, record, options = {})
url_options = url_options.clone
url_options[:action] = link.action
url_options[:controller] = link.controller.to_s if link.controller
url_options.delete(:search) if link.controller and link.controller.to_s != params[:controller]
url_options.merge! link.parameters if link.parameters
@link_record = record
url_options.merge! self.instance_eval(&(link.dynamic_parameters)) if link.dynamic_parameters.is_a?(Proc)
@link_record = nil
url_options_for_nested_link(link.column, record, link, url_options, options) if link.nested_link?
url_options_for_sti_link(link.column, record, link, url_options, options) unless record.nil? || active_scaffold_config.sti_children.nil?
url_options[:_method] = link.method if !link.confirm? && link.inline? && link.method != :get
url_options
end
def action_link_html_options(link, url_options, record, html_options)
link_id = get_action_link_id(url_options, record, link.column)
html_options.reverse_merge! link.html_options.merge(:class => link.action)
# Needs to be in html_options to as the adding _method to the url is no longer supported by Rails
html_options[:method] = link.method if link.method != :get
html_options['data-confirm'] = link.confirm(record.try(:to_label)) if link.confirm?
html_options['data-position'] = link.position if link.position and link.inline?
html_options['data-controller'] = link.controller.to_s if link.controller
html_options[:class] += ' as_action' if link.inline?
html_options['data-action'] = link.action if link.inline?
if link.popup?
html_options['data-popup'] = true
html_options[:target] = '_blank'
end
html_options[:id] = link_id
html_options[:remote] = true unless link.page? || link.popup?
if link.dhtml_confirm?
html_options[:class] += ' as_action' if !link.inline?
html_options[:page_link] = 'true' if !link.inline?
html_options[:dhtml_confirm] = link.dhtml_confirm.value
html_options[:onclick] = link.dhtml_confirm.onclick_function(controller, link_id)
end
html_options[:class] += " #{link.html_options[:class]}" unless link.html_options[:class].blank?
html_options
end
def get_action_link_id(url_options, record = nil, column = nil)
id = url_options[:id] || url_options[:parent_id]
id = "#{column.association.name}-#{record.id}" if column && column.plural_association?
if record.try(column.association.name.to_sym).present?
id = "#{column.association.name}-#{record.send(column.association.name).id}-#{record.id}"
else
id = "#{column.association.name}-#{record.id}" unless record.nil?
end if column && column.singular_association?
id = "#{id}-#{url_options[:batch_scope].downcase}" if url_options[:batch_scope]
action_id = "#{id_from_controller(url_options[:controller]) + '-' if url_options[:parent_controller]}#{url_options[:action].to_s}"
action_link_id(action_id, id)
end
def action_link_html(link, url, html_options, record)
# issue 260, use url_options[:link] if it exists. This prevents DB data from being localized.
label = url.delete(:link) if url.is_a?(Hash)
label ||= link.label
begin
if link.image.nil?
#http://www.continuousthinking.com/2011/09/22/rails-3-1-engine-namespaces-can-creep-into-urls-in-application-layout.html
#its not possible to link from a namespacedcontroller back to a non namespaced-controller anymore
# seems to be only working with named_routes...
html = link_to(label, url, html_options)
else
html = link_to(image_tag(link.image[:name] , :size => link.image[:size], :alt => label), url, html_options)
end
# if url is nil we would like to generate an anchor without href attribute
url.nil? ? html.sub(/href=".*?"/, '').html_safe : html.html_safe
rescue ActionController::RoutingError => e
Rails.logger.error("ActiveScaffold link_to routing Error: #{e.inspect}")
"Routing Error"
end
end
def url_options_for_nested_link(column, record, link, url_options, options = {})
if column && column.association
url_options[:assoc_id] = url_options.delete(:id)
url_options[:id] = record.send(column.association.name).id if column.singular_association? && record.send(column.association.name).present?
link.eid = "#{controller_id.from(3)}_#{record.id}_#{column.association.name}" unless options.has_key?(:reuse_eid)
url_options[:eid] = link.eid
elsif link.parameters && link.parameters[:named_scope]
url_options[:assoc_id] = url_options.delete(:id)
link.eid = "#{controller_id.from(3)}_#{record.id}_#{link.parameters[:named_scope]}" unless options.has_key?(:reuse_eid)
url_options[:eid] = link.eid
end
end
def url_options_for_sti_link(column, record, link, url_options, options = {})
#need to find out controller of current record type
#and set parameters
# its quite difficult to detect an sti link
# if link.column.nil? we are sure that it is nt an singular association inline autolink
# howver that will not work if a sti parent is an singular association inline autolink
if link.column.nil?
sti_controller_path = controller_path_for_activerecord(record.class)
if sti_controller_path
url_options[:controller] = sti_controller_path
url_options[:parent_sti] = controller_path
end
end
end
def column_class(column, column_value, record)
classes = []
classes << "#{column.name}-column"
if column.css_class.is_a?(Proc)
css_class = column.css_class.call(column_value, record)
classes << css_class unless css_class.nil?
else
classes << column.css_class
end unless column.css_class.nil?
classes << 'empty' if column_empty? column_value
classes << 'sorted' if active_scaffold_config.list.user.sorting.sorts_on?(column)
classes << 'numeric' if column.column and [:decimal, :float, :integer].include?(column.column.type)
classes.join(' ').rstrip
end
def column_heading_class(column, sorting)
classes = []
classes << "#{column.name}-column_heading"
classes << "sorted #{sorting.direction_of(column).downcase}" if sorting.sorts_on? column
classes << column.css_class unless column.css_class.nil? || column.css_class.is_a?(Proc)
classes.join(' ')
end
def as_main_div_class
classes = ["active-scaffold", "active-scaffold-#{controller_id}", "#{params[:controller]}-view", "#{active_scaffold_config.theme}-theme"]
classes << "as_touch" if touch_device?
classes.join(' ')
end
def column_empty?(column_value)
@empty_column_strings ||= [' ', active_scaffold_config.list.empty_field_text]
empty = column_value.nil?
empty ||= column_value.empty? if column_value.respond_to? :empty?
empty ||= @empty_column_strings.include?(column_value) if String === column_value
return empty
end
def column_calculation(column)
unless column.calculate.instance_of? Proc
conditions = controller.send(:all_conditions)
includes = active_scaffold_config.list.count_includes
includes ||= controller.send(:active_scaffold_includes) unless conditions.nil?
calculation = beginning_of_chain.calculate(column.calculate, column.name, :conditions => conditions,
:joins => controller.send(:joins_for_collection), :include => includes)
else
column.calculate.call(@records)
end
end
def render_column_calculation(column)
calculation = column_calculation(column)
override_formatter = "render_#{column.name}_#{column.calculate}"
calculation = send(override_formatter, calculation) if respond_to? override_formatter
"#{"#{as_(column.calculate)}: " unless column.calculate.is_a? Proc}#{format_column_value nil, column, calculation}"
end
def column_show_add_existing(column)
(column.allow_add_existing and options_for_association_count(column.association) > 0)
end
def column_show_add_new(column, associated, record)
value = (column.plural_association? && !column.readonly_association?) || (column.singular_association? and not associated.empty?)
value = false unless record.class.authorized_for?(:crud_type => :create)
value
end
def clean_column_name(name)
name.to_s.gsub('?', '')
end
def clean_class_name(name)
name.underscore.gsub('/', '_')
end
def active_scaffold_error_messages_for(*params)
options = params.extract_options!.symbolize_keys
options.reverse_merge!(:container_tag => :div, :list_type => :ul)
objects = Array.wrap(options.delete(:object) || params).map do |object|
object = instance_variable_get("@#{object}") unless object.respond_to?(:to_model)
object = convert_to_model(object)
if object.class.respond_to?(:model_name)
options[:object_name] ||= object.class.model_name.human.downcase
end
object
end
objects.compact!
count = objects.inject(0) {|sum, object| sum + object.errors.count }
unless count.zero?
html = {}
[:id, :class].each do |key|
if options.include?(key)
value = options[key]
html[key] = value unless value.blank?
else
html[key] = 'errorExplanation'
end
end
options[:object_name] ||= params.first
header_message = if options.include?(:header_message)
options[:header_message]
else
as_('errors.template.header', :count => count, :model => options[:object_name].to_s.gsub('_', ' '))
end
message = options.include?(:message) ? options[:message] : as_('errors.template.body')
error_messages = objects.sum do |object|
object.errors.full_messages.map do |msg|
options[:list_type] != :br ? content_tag(:li, msg) : msg
end
end
error_messages = if options[:list_type] == :br
error_messages.join('<br/>').html_safe
else
content_tag(options[:list_type], error_messages.join.html_safe)
end
contents = []
contents << content_tag(options[:header_tag] || :h2, header_message) unless header_message.blank?
contents << content_tag(:p, message) unless message.blank?
contents << error_messages
contents = contents.join.html_safe
options[:container_tag] ? content_tag(options[:container_tag], contents, html) : contents
else
''
end
end
end
end
end
do not recreate numeric classes all the time
module ActiveScaffold
module Helpers
# All extra helpers that should be included in the View.
# Also a dumping ground for uncategorized helpers.
module ViewHelpers
include ActiveScaffold::Helpers::IdHelpers
include ActiveScaffold::Helpers::AssociationHelpers
include ActiveScaffold::Helpers::PaginationHelpers
include ActiveScaffold::Helpers::ListColumnHelpers
include ActiveScaffold::Helpers::ShowColumnHelpers
include ActiveScaffold::Helpers::FormColumnHelpers
include ActiveScaffold::Helpers::SearchColumnHelpers
include ActiveScaffold::Helpers::HumanConditionHelpers
##
## Delegates
##
# access to the configuration variable
def active_scaffold_config
controller.class.active_scaffold_config
end
def active_scaffold_config_for(*args)
controller.class.active_scaffold_config_for(*args)
end
def active_scaffold_controller_for(*args)
controller.class.active_scaffold_controller_for(*args)
end
##
## Uncategorized
##
def controller_path_for_activerecord(klass)
begin
controller = active_scaffold_controller_for(klass)
controller.controller_path
rescue ActiveScaffold::ControllerNotFound
controller = nil
end
end
# This is the template finder logic, keep it updated with however we find stuff in rails
# currently this very similar to the logic in ActionBase::Base.render for options file
def template_exists?(template_name, partial = false)
lookup_context.exists? template_name, '', partial
end
def generate_temporary_id
(Time.now.to_f*1000).to_i.to_s
end
# Turns [[label, value]] into <option> tags
# Takes optional parameter of :include_blank
def option_tags_for(select_options, options = {})
select_options.insert(0,[as_(:_select_),nil]) if options[:include_blank]
select_options.collect do |option|
label, value = option[0], option[1]
value.nil? ? "<option value="">#{label}</option>" : "<option value=\"#{value}\">#{label}</option>"
end
end
# Should this column be displayed in the subform?
def in_subform?(column, parent_record)
return true unless column.association
# Polymorphic associations can't appear because they *might* be the reverse association, and because you generally don't assign an association from the polymorphic side ... I think.
return false if column.polymorphic_association?
# A column shouldn't be in the subform if it's the reverse association to the parent
return false if column.association.reverse_for?(parent_record.class)
return true
end
def form_remote_upload_tag(url_for_options = {}, options = {})
options[:target] = action_iframe_id(url_for_options)
options[:multipart] ||= true
options[:class] = "#{options[:class]} as_remote_upload".strip
output=""
output << form_tag(url_for_options, options)
(output << "<iframe id='#{action_iframe_id(url_for_options)}' name='#{action_iframe_id(url_for_options)}' style='display:none'></iframe>").html_safe
end
# Provides list of javascripts to include with +javascript_include_tag+
# You can use this with your javascripts like
# <%= javascript_include_tag :defaults, 'your_own_cool_script', active_scaffold_javascripts, :cache => true %>
def active_scaffold_javascripts(frontend = :default)
ActiveScaffold::Config::Core.javascripts(frontend).collect do |name|
ActiveScaffold::Config::Core.asset_path(name, frontend)
end
end
# Provides stylesheets to include with +stylesheet_link_tag+
def active_scaffold_stylesheets(frontend = :default)
[ActiveScaffold::Config::Core.asset_path("stylesheet.css", frontend)]
end
# Provides stylesheets for IE to include with +stylesheet_link_tag+
def active_scaffold_ie_stylesheets(frontend = :default)
[ActiveScaffold::Config::Core.asset_path("stylesheet-ie.css", frontend)]
end
# easy way to include ActiveScaffold assets
def active_scaffold_includes(*args)
frontend = args.first.is_a?(Symbol) ? args.shift : :default
options = args.first.is_a?(Hash) ? args.shift : {}
js = javascript_include_tag(*active_scaffold_javascripts(frontend).push(options))
css = stylesheet_link_tag(*active_scaffold_stylesheets(frontend).push(options))
options[:cache] += '_ie' if options[:cache].is_a? String
options[:concat] += '_ie' if options[:concat].is_a? String
ie_css = stylesheet_link_tag(*active_scaffold_ie_stylesheets(frontend).push(options))
js + "\n" + css + "\n<!--[if IE]>".html_safe + ie_css + "<![endif]-->\n".html_safe
end
# a general-use loading indicator (the "stuff is happening, please wait" feedback)
def loading_indicator_tag(options)
image_tag "indicator.gif", :style => "visibility:hidden;", :id => loading_indicator_id(options), :alt => "loading indicator", :class => "loading-indicator"
end
# Creates a javascript-based link that toggles the visibility of some element on the page.
# By default, it toggles the visibility of the sibling after the one it's nested in. You may pass custom javascript logic in options[:of] to change that, though. For example, you could say :of => '$("my_div_id")'.
# You may also flag whether the other element is visible by default or not, and the initial text will adjust accordingly.
def link_to_visibility_toggle(id, options = {})
options[:default_visible] = true if options[:default_visible].nil?
options[:hide_label] = as_(:hide)
options[:show_label] = as_(:show)
javascript_tag("ActiveScaffold.create_visibility_toggle('#{id}', #{options.to_json});")
end
def skip_action_link(link, *args)
(!link.ignore_method.nil? && controller.respond_to?(link.ignore_method) && controller.send(link.ignore_method, *args)) || ((link.security_method_set? or controller.respond_to? link.security_method) and !controller.send(link.security_method, *args))
end
def render_action_link(link, url_options, record = nil, html_options = {})
url_options = action_link_url_options(link, url_options, record)
html_options = action_link_html_options(link, url_options, record, html_options)
action_link_html(link, url_options, html_options, record)
end
def render_group_action_link(link, url_options, options, record = nil)
if link.type == :member && !options[:authorized]
action_link_html(link, nil, {:class => "disabled #{link.action}#{link.html_options[:class].blank? ? '' : (' ' + link.html_options[:class])}"}, record)
else
render_action_link(link, url_options, record)
end
end
def action_link_url_options(link, url_options, record, options = {})
url_options = url_options.clone
url_options[:action] = link.action
url_options[:controller] = link.controller.to_s if link.controller
url_options.delete(:search) if link.controller and link.controller.to_s != params[:controller]
url_options.merge! link.parameters if link.parameters
@link_record = record
url_options.merge! self.instance_eval(&(link.dynamic_parameters)) if link.dynamic_parameters.is_a?(Proc)
@link_record = nil
url_options_for_nested_link(link.column, record, link, url_options, options) if link.nested_link?
url_options_for_sti_link(link.column, record, link, url_options, options) unless record.nil? || active_scaffold_config.sti_children.nil?
url_options[:_method] = link.method if !link.confirm? && link.inline? && link.method != :get
url_options
end
def action_link_html_options(link, url_options, record, html_options)
link_id = get_action_link_id(url_options, record, link.column)
html_options.reverse_merge! link.html_options.merge(:class => link.action)
# Needs to be in html_options to as the adding _method to the url is no longer supported by Rails
html_options[:method] = link.method if link.method != :get
html_options['data-confirm'] = link.confirm(record.try(:to_label)) if link.confirm?
html_options['data-position'] = link.position if link.position and link.inline?
html_options['data-controller'] = link.controller.to_s if link.controller
html_options[:class] += ' as_action' if link.inline?
html_options['data-action'] = link.action if link.inline?
if link.popup?
html_options['data-popup'] = true
html_options[:target] = '_blank'
end
html_options[:id] = link_id
html_options[:remote] = true unless link.page? || link.popup?
if link.dhtml_confirm?
html_options[:class] += ' as_action' if !link.inline?
html_options[:page_link] = 'true' if !link.inline?
html_options[:dhtml_confirm] = link.dhtml_confirm.value
html_options[:onclick] = link.dhtml_confirm.onclick_function(controller, link_id)
end
html_options[:class] += " #{link.html_options[:class]}" unless link.html_options[:class].blank?
html_options
end
def get_action_link_id(url_options, record = nil, column = nil)
id = url_options[:id] || url_options[:parent_id]
id = "#{column.association.name}-#{record.id}" if column && column.plural_association?
if record.try(column.association.name.to_sym).present?
id = "#{column.association.name}-#{record.send(column.association.name).id}-#{record.id}"
else
id = "#{column.association.name}-#{record.id}" unless record.nil?
end if column && column.singular_association?
id = "#{id}-#{url_options[:batch_scope].downcase}" if url_options[:batch_scope]
action_id = "#{id_from_controller(url_options[:controller]) + '-' if url_options[:parent_controller]}#{url_options[:action].to_s}"
action_link_id(action_id, id)
end
def action_link_html(link, url, html_options, record)
# issue 260, use url_options[:link] if it exists. This prevents DB data from being localized.
label = url.delete(:link) if url.is_a?(Hash)
label ||= link.label
begin
if link.image.nil?
#http://www.continuousthinking.com/2011/09/22/rails-3-1-engine-namespaces-can-creep-into-urls-in-application-layout.html
#its not possible to link from a namespacedcontroller back to a non namespaced-controller anymore
# seems to be only working with named_routes...
html = link_to(label, url, html_options)
else
html = link_to(image_tag(link.image[:name] , :size => link.image[:size], :alt => label), url, html_options)
end
# if url is nil we would like to generate an anchor without href attribute
url.nil? ? html.sub(/href=".*?"/, '').html_safe : html.html_safe
rescue ActionController::RoutingError => e
Rails.logger.error("ActiveScaffold link_to routing Error: #{e.inspect}")
"Routing Error"
end
end
def url_options_for_nested_link(column, record, link, url_options, options = {})
if column && column.association
url_options[:assoc_id] = url_options.delete(:id)
url_options[:id] = record.send(column.association.name).id if column.singular_association? && record.send(column.association.name).present?
link.eid = "#{controller_id.from(3)}_#{record.id}_#{column.association.name}" unless options.has_key?(:reuse_eid)
url_options[:eid] = link.eid
elsif link.parameters && link.parameters[:named_scope]
url_options[:assoc_id] = url_options.delete(:id)
link.eid = "#{controller_id.from(3)}_#{record.id}_#{link.parameters[:named_scope]}" unless options.has_key?(:reuse_eid)
url_options[:eid] = link.eid
end
end
def url_options_for_sti_link(column, record, link, url_options, options = {})
#need to find out controller of current record type
#and set parameters
# its quite difficult to detect an sti link
# if link.column.nil? we are sure that it is nt an singular association inline autolink
# howver that will not work if a sti parent is an singular association inline autolink
if link.column.nil?
sti_controller_path = controller_path_for_activerecord(record.class)
if sti_controller_path
url_options[:controller] = sti_controller_path
url_options[:parent_sti] = controller_path
end
end
end
def column_class(column, column_value, record)
@numeric_classes ||= [:decimal, :float, :integer]
classes = []
classes << "#{column.name}-column"
if column.css_class.is_a?(Proc)
css_class = column.css_class.call(column_value, record)
classes << css_class unless css_class.nil?
else
classes << column.css_class
end unless column.css_class.nil?
classes << 'empty' if column_empty? column_value
classes << 'sorted' if active_scaffold_config.list.user.sorting.sorts_on?(column)
classes << 'numeric' if column.column && @numeric_classes.include?(column.column.type)
classes.join(' ').rstrip
end
def column_heading_class(column, sorting)
classes = []
classes << "#{column.name}-column_heading"
classes << "sorted #{sorting.direction_of(column).downcase}" if sorting.sorts_on? column
classes << column.css_class unless column.css_class.nil? || column.css_class.is_a?(Proc)
classes.join(' ')
end
def as_main_div_class
classes = ["active-scaffold", "active-scaffold-#{controller_id}", "#{params[:controller]}-view", "#{active_scaffold_config.theme}-theme"]
classes << "as_touch" if touch_device?
classes.join(' ')
end
def column_empty?(column_value)
@empty_column_strings ||= [' ', active_scaffold_config.list.empty_field_text]
empty = column_value.nil?
empty ||= column_value.empty? if column_value.respond_to? :empty?
empty ||= @empty_column_strings.include?(column_value) if String === column_value
return empty
end
def column_calculation(column)
unless column.calculate.instance_of? Proc
conditions = controller.send(:all_conditions)
includes = active_scaffold_config.list.count_includes
includes ||= controller.send(:active_scaffold_includes) unless conditions.nil?
calculation = beginning_of_chain.calculate(column.calculate, column.name, :conditions => conditions,
:joins => controller.send(:joins_for_collection), :include => includes)
else
column.calculate.call(@records)
end
end
def render_column_calculation(column)
calculation = column_calculation(column)
override_formatter = "render_#{column.name}_#{column.calculate}"
calculation = send(override_formatter, calculation) if respond_to? override_formatter
"#{"#{as_(column.calculate)}: " unless column.calculate.is_a? Proc}#{format_column_value nil, column, calculation}"
end
def column_show_add_existing(column)
(column.allow_add_existing and options_for_association_count(column.association) > 0)
end
def column_show_add_new(column, associated, record)
value = (column.plural_association? && !column.readonly_association?) || (column.singular_association? and not associated.empty?)
value = false unless record.class.authorized_for?(:crud_type => :create)
value
end
def clean_column_name(name)
name.to_s.gsub('?', '')
end
def clean_class_name(name)
name.underscore.gsub('/', '_')
end
def active_scaffold_error_messages_for(*params)
options = params.extract_options!.symbolize_keys
options.reverse_merge!(:container_tag => :div, :list_type => :ul)
objects = Array.wrap(options.delete(:object) || params).map do |object|
object = instance_variable_get("@#{object}") unless object.respond_to?(:to_model)
object = convert_to_model(object)
if object.class.respond_to?(:model_name)
options[:object_name] ||= object.class.model_name.human.downcase
end
object
end
objects.compact!
count = objects.inject(0) {|sum, object| sum + object.errors.count }
unless count.zero?
html = {}
[:id, :class].each do |key|
if options.include?(key)
value = options[key]
html[key] = value unless value.blank?
else
html[key] = 'errorExplanation'
end
end
options[:object_name] ||= params.first
header_message = if options.include?(:header_message)
options[:header_message]
else
as_('errors.template.header', :count => count, :model => options[:object_name].to_s.gsub('_', ' '))
end
message = options.include?(:message) ? options[:message] : as_('errors.template.body')
error_messages = objects.sum do |object|
object.errors.full_messages.map do |msg|
options[:list_type] != :br ? content_tag(:li, msg) : msg
end
end
error_messages = if options[:list_type] == :br
error_messages.join('<br/>').html_safe
else
content_tag(options[:list_type], error_messages.join.html_safe)
end
contents = []
contents << content_tag(options[:header_tag] || :h2, header_message) unless header_message.blank?
contents << content_tag(:p, message) unless message.blank?
contents << error_messages
contents = contents.join.html_safe
options[:container_tag] ? content_tag(options[:container_tag], contents, html) : contents
else
''
end
end
end
end
end
|
module ActiveScaffold
module Helpers
# All extra helpers that should be included in the View.
# Also a dumping ground for uncategorized helpers.
module ViewHelpers
include ActiveScaffold::Helpers::IdHelpers
include ActiveScaffold::Helpers::AssociationHelpers
include ActiveScaffold::Helpers::PaginationHelpers
include ActiveScaffold::Helpers::ListColumnHelpers
include ActiveScaffold::Helpers::ShowColumnHelpers
include ActiveScaffold::Helpers::FormColumnHelpers
include ActiveScaffold::Helpers::SearchColumnHelpers
include ActiveScaffold::Helpers::HumanConditionHelpers
##
## Delegates
##
# access to the configuration variable
def active_scaffold_config
controller.class.active_scaffold_config
end
def active_scaffold_config_for(*args)
controller.class.active_scaffold_config_for(*args)
end
def active_scaffold_controller_for(*args)
controller.class.active_scaffold_controller_for(*args)
end
##
## Uncategorized
##
def controller_path_for_activerecord(klass)
begin
controller = active_scaffold_controller_for(klass)
controller.controller_path
rescue ActiveScaffold::ControllerNotFound
controller = nil
end
end
def partial_pieces(partial_path)
if partial_path.include?('/')
return File.dirname(partial_path), File.basename(partial_path)
else
return controller.class.controller_path, partial_path
end
end
# This is the template finder logic, keep it updated with however we find stuff in rails
# currently this very similar to the logic in ActionBase::Base.render for options file
# TODO: Work with rails core team to find a better way to check for this.
# Not working so far for rais 3.1
def template_exists?(template_name, path)
begin
method = 'find_template'
#self.view_paths.send(method, template_name)
return false
rescue ActionView::MissingTemplate => e
return false
end
end
def generate_temporary_id
(Time.now.to_f*1000).to_i.to_s
end
# Turns [[label, value]] into <option> tags
# Takes optional parameter of :include_blank
def option_tags_for(select_options, options = {})
select_options.insert(0,[as_(:_select_),nil]) if options[:include_blank]
select_options.collect do |option|
label, value = option[0], option[1]
value.nil? ? "<option value="">#{label}</option>" : "<option value=\"#{value}\">#{label}</option>"
end
end
# Should this column be displayed in the subform?
def in_subform?(column, parent_record)
return true unless column.association
# Polymorphic associations can't appear because they *might* be the reverse association, and because you generally don't assign an association from the polymorphic side ... I think.
return false if column.polymorphic_association?
# A column shouldn't be in the subform if it's the reverse association to the parent
return false if column.association.reverse_for?(parent_record.class)
return true
end
def form_remote_upload_tag(url_for_options = {}, options = {})
options[:target] = action_iframe_id(url_for_options)
options[:multipart] ||= true
options[:class] = "#{options[:class]} as_remote_upload".strip
output=""
output << form_tag(url_for_options, options)
(output << "<iframe id='#{action_iframe_id(url_for_options)}' name='#{action_iframe_id(url_for_options)}' style='display:none'></iframe>").html_safe
end
# Provides list of javascripts to include with +javascript_include_tag+
# You can use this with your javascripts like
# <%= javascript_include_tag :defaults, 'your_own_cool_script', active_scaffold_javascripts, :cache => true %>
def active_scaffold_javascripts(frontend = :default)
ActiveScaffold::Config::Core.javascripts(frontend).collect do |name|
ActiveScaffold::Config::Core.asset_path(name, frontend)
end
end
# Provides stylesheets to include with +stylesheet_link_tag+
def active_scaffold_stylesheets(frontend = :default)
[ActiveScaffold::Config::Core.asset_path("stylesheet.css", frontend)]
end
# Provides stylesheets for IE to include with +stylesheet_link_tag+
def active_scaffold_ie_stylesheets(frontend = :default)
[ActiveScaffold::Config::Core.asset_path("stylesheet-ie.css", frontend)]
end
# easy way to include ActiveScaffold assets
def active_scaffold_includes(*args)
frontend = args.first.is_a?(Symbol) ? args.shift : :default
options = args.first.is_a?(Hash) ? args.shift : {}
js = javascript_include_tag(*active_scaffold_javascripts(frontend).push(options))
css = stylesheet_link_tag(*active_scaffold_stylesheets(frontend).push(options))
options[:cache] += '_ie' if options[:cache].is_a? String
options[:concat] += '_ie' if options[:concat].is_a? String
ie_css = stylesheet_link_tag(*active_scaffold_ie_stylesheets(frontend).push(options))
js + "\n" + css + "\n<!--[if IE]>".html_safe + ie_css + "<![endif]-->\n".html_safe
end
# a general-use loading indicator (the "stuff is happening, please wait" feedback)
def loading_indicator_tag(options)
image_tag "indicator.gif", :style => "visibility:hidden;", :id => loading_indicator_id(options), :alt => "loading indicator", :class => "loading-indicator"
end
# Creates a javascript-based link that toggles the visibility of some element on the page.
# By default, it toggles the visibility of the sibling after the one it's nested in. You may pass custom javascript logic in options[:of] to change that, though. For example, you could say :of => '$("my_div_id")'.
# You may also flag whether the other element is visible by default or not, and the initial text will adjust accordingly.
def link_to_visibility_toggle(id, options = {})
options[:default_visible] = true if options[:default_visible].nil?
options[:hide_label] = as_(:hide)
options[:show_label] = as_(:show)
javascript_tag("ActiveScaffold.create_visibility_toggle('#{id}', #{options.to_json});")
end
def skip_action_link(link, *args)
(!link.ignore_method.nil? and controller.try(link.ignore_method, *args)) || ((link.security_method_set? or controller.respond_to? link.security_method) and !controller.send(link.security_method, *args))
end
def render_action_link(link, url_options, record = nil, html_options = {})
url_options = action_link_url_options(link, url_options, record)
html_options = action_link_html_options(link, url_options, record, html_options)
action_link_html(link, url_options, html_options, record)
end
def render_group_action_link(link, url_options, options, record = nil)
if link.type == :member && !options[:authorized]
action_link_html(link, nil, {:class => "disabled #{link.action}#{link.html_options[:class].blank? ? '' : (' ' + link.html_options[:class])}"}, record)
else
render_action_link(link, url_options, record)
end
end
def action_link_url_options(link, url_options, record, options = {})
url_options = url_options.clone
url_options[:action] = link.action
url_options[:controller] = link.controller.to_s if link.controller
url_options.delete(:search) if link.controller and link.controller.to_s != params[:controller]
url_options.merge! link.parameters if link.parameters
@link_record = record
url_options.merge! self.instance_eval(&(link.dynamic_parameters)) if link.dynamic_parameters.is_a?(Proc)
@link_record = nil
url_options_for_nested_link(link.column, record, link, url_options, options) if link.nested_link?
url_options_for_sti_link(link.column, record, link, url_options, options) unless record.nil? || active_scaffold_config.sti_children.nil?
url_options[:_method] = link.method if !link.confirm? && link.inline? && link.method != :get
url_options
end
def action_link_html_options(link, url_options, record, html_options)
link_id = get_action_link_id(url_options, record, link.column)
html_options.reverse_merge! link.html_options.merge(:class => link.action)
# Needs to be in html_options to as the adding _method to the url is no longer supported by Rails
html_options[:method] = link.method if link.method != :get
html_options['data-confirm'] = link.confirm(record.try(:to_label)) if link.confirm?
html_options['data-position'] = link.position if link.position and link.inline?
html_options[:class] += ' as_action' if link.inline?
html_options['data-action'] = link.action if link.inline?
if link.popup?
html_options['data-popup'] = true
html_options[:target] = '_blank'
end
html_options[:id] = link_id
html_options[:remote] = true unless link.page? || link.popup?
if link.dhtml_confirm?
html_options[:class] += ' as_action' if !link.inline?
html_options[:page_link] = 'true' if !link.inline?
html_options[:dhtml_confirm] = link.dhtml_confirm.value
html_options[:onclick] = link.dhtml_confirm.onclick_function(controller, link_id)
end
html_options[:class] += " #{link.html_options[:class]}" unless link.html_options[:class].blank?
html_options
end
def get_action_link_id(url_options, record = nil, column = nil)
id = url_options[:id] || url_options[:parent_id]
id = "#{column.association.name}-#{record.id}" if column && column.plural_association?
if record.try(column.association.name.to_sym).present?
id = "#{column.association.name}-#{record.send(column.association.name).id}-#{record.id}"
else
id = "#{column.association.name}-#{record.id}" unless record.nil?
end if column && column.singular_association?
id = "#{id}-#{url_options[:batch_scope].downcase}" if url_options[:batch_scope]
action_id = "#{id_from_controller(url_options[:controller]) + '-' if url_options[:parent_controller]}#{url_options[:action].to_s}"
action_link_id(action_id, id)
end
def action_link_html(link, url, html_options, record)
# issue 260, use url_options[:link] if it exists. This prevents DB data from being localized.
label = url.delete(:link) if url.is_a?(Hash)
label ||= link.label
if link.image.nil?
html = link_to(label, url, html_options)
else
html = link_to(image_tag(link.image[:name] , :size => link.image[:size], :alt => label), url, html_options)
end
# if url is nil we would like to generate an anchor without href attribute
url.nil? ? html.sub(/href=".*?"/, '').html_safe : html.html_safe
end
def url_options_for_nested_link(column, record, link, url_options, options = {})
if column && column.association
url_options[:assoc_id] = url_options.delete(:id)
url_options[:id] = record.send(column.association.name).id if column.singular_association? && record.send(column.association.name).present?
link.eid = "#{controller_id.from(3)}_#{record.id}_#{column.association.name}" unless options.has_key?(:reuse_eid)
url_options[:eid] = link.eid
elsif link.parameters && link.parameters[:named_scope]
url_options[:assoc_id] = url_options.delete(:id)
link.eid = "#{controller_id.from(3)}_#{record.id}_#{link.parameters[:named_scope]}" unless options.has_key?(:reuse_eid)
url_options[:eid] = link.eid
end
end
def url_options_for_sti_link(column, record, link, url_options, options = {})
#need to find out controller of current record type
#and set parameters
# its quite difficult to detect an sti link
# if link.column.nil? we are sure that it is nt an singular association inline autolink
# howver that will not work if a sti parent is an singular association inline autolink
if link.column.nil?
sti_controller_path = controller_path_for_activerecord(record.class)
if sti_controller_path
url_options[:controller] = sti_controller_path
url_options[:parent_sti] = controller_path
end
end
end
def column_class(column, column_value, record)
classes = []
classes << "#{column.name}-column"
if column.css_class.is_a?(Proc)
css_class = column.css_class.call(column_value, record)
classes << css_class unless css_class.nil?
else
classes << column.css_class
end unless column.css_class.nil?
classes << 'empty' if column_empty? column_value
classes << 'sorted' if active_scaffold_config.list.user.sorting.sorts_on?(column)
classes << 'numeric' if column.column and [:decimal, :float, :integer].include?(column.column.type)
classes.join(' ').rstrip
end
def column_heading_class(column, sorting)
classes = []
classes << "#{column.name}-column_heading"
classes << "sorted #{sorting.direction_of(column).downcase}" if sorting.sorts_on? column
classes << column.css_class unless column.css_class.nil? || column.css_class.is_a?(Proc)
classes.join(' ')
end
def as_main_div_class
classes = ["active-scaffold", "active-scaffold-#{controller_id}", "#{params[:controller]}-view", "#{active_scaffold_config.theme}-theme"]
classes << "as_touch" if touch_device?
classes.join(' ')
end
def column_empty?(column_value)
empty = column_value.nil?
empty ||= column_value.empty? if column_value.respond_to? :empty?
empty ||= [' ', active_scaffold_config.list.empty_field_text].include? column_value if String === column_value
return empty
end
def column_calculation(column)
unless column.calculate.instance_of? Proc
conditions = controller.send(:all_conditions)
includes = active_scaffold_config.list.count_includes
includes ||= controller.send(:active_scaffold_includes) unless conditions.nil?
calculation = beginning_of_chain.calculate(column.calculate, column.name, :conditions => conditions,
:joins => controller.send(:joins_for_collection), :include => includes)
else
column.calculate.call(@records)
end
end
def render_column_calculation(column)
calculation = column_calculation(column)
override_formatter = "render_#{column.name}_#{column.calculate}"
calculation = send(override_formatter, calculation) if respond_to? override_formatter
"#{"#{as_(column.calculate)}: " unless column.calculate.is_a? Proc}#{format_column_value nil, column, calculation}"
end
def column_show_add_existing(column)
(column.allow_add_existing and options_for_association_count(column.association) > 0)
end
def column_show_add_new(column, associated, record)
value = (column.plural_association? && !column.readonly_association?) || (column.singular_association? and not associated.empty?)
value = false unless record.class.authorized_for?(:crud_type => :create)
value
end
def active_scaffold_error_messages_for(*params)
options = params.extract_options!.symbolize_keys
options.reverse_merge!(:container_tag => :div, :list_type => :ul)
objects = Array.wrap(options.delete(:object) || params).map do |object|
object = instance_variable_get("@#{object}") unless object.respond_to?(:to_model)
object = convert_to_model(object)
if object.class.respond_to?(:model_name)
options[:object_name] ||= object.class.model_name.human.downcase
end
object
end
objects.compact!
count = objects.inject(0) {|sum, object| sum + object.errors.count }
unless count.zero?
html = {}
[:id, :class].each do |key|
if options.include?(key)
value = options[key]
html[key] = value unless value.blank?
else
html[key] = 'errorExplanation'
end
end
options[:object_name] ||= params.first
header_message = if options.include?(:header_message)
options[:header_message]
else
as_('errors.template.header', :count => count, :model => options[:object_name].to_s.gsub('_', ' '))
end
message = options.include?(:message) ? options[:message] : as_('errors.template.body')
error_messages = objects.sum do |object|
object.errors.full_messages.map do |msg|
options[:list_type] != :br ? content_tag(:li, msg) : msg
end
end
error_messages = if options[:list_type] == :br
error_messages.join('<br/>').html_safe
else
content_tag(options[:list_type], error_messages.join.html_safe)
end
contents = []
contents << content_tag(options[:header_tag] || :h2, header_message) unless header_message.blank?
contents << content_tag(:p, message) unless message.blank?
contents << error_messages
contents = contents.join.html_safe
options[:container_tag] ? content_tag(options[:container_tag], contents, html) : contents
else
''
end
end
end
end
end
catch routing error in case link_to throws exception... rails 3.1 does
nt allow linking to a controller without namespace if current controller
is in a namespace
module ActiveScaffold
module Helpers
# All extra helpers that should be included in the View.
# Also a dumping ground for uncategorized helpers.
module ViewHelpers
include ActiveScaffold::Helpers::IdHelpers
include ActiveScaffold::Helpers::AssociationHelpers
include ActiveScaffold::Helpers::PaginationHelpers
include ActiveScaffold::Helpers::ListColumnHelpers
include ActiveScaffold::Helpers::ShowColumnHelpers
include ActiveScaffold::Helpers::FormColumnHelpers
include ActiveScaffold::Helpers::SearchColumnHelpers
include ActiveScaffold::Helpers::HumanConditionHelpers
##
## Delegates
##
# access to the configuration variable
def active_scaffold_config
controller.class.active_scaffold_config
end
def active_scaffold_config_for(*args)
controller.class.active_scaffold_config_for(*args)
end
def active_scaffold_controller_for(*args)
controller.class.active_scaffold_controller_for(*args)
end
##
## Uncategorized
##
def controller_path_for_activerecord(klass)
begin
controller = active_scaffold_controller_for(klass)
controller.controller_path
rescue ActiveScaffold::ControllerNotFound
controller = nil
end
end
def partial_pieces(partial_path)
if partial_path.include?('/')
return File.dirname(partial_path), File.basename(partial_path)
else
return controller.class.controller_path, partial_path
end
end
# This is the template finder logic, keep it updated with however we find stuff in rails
# currently this very similar to the logic in ActionBase::Base.render for options file
# TODO: Work with rails core team to find a better way to check for this.
# Not working so far for rais 3.1
def template_exists?(template_name, path)
begin
method = 'find_template'
#self.view_paths.send(method, template_name)
return false
rescue ActionView::MissingTemplate => e
return false
end
end
def generate_temporary_id
(Time.now.to_f*1000).to_i.to_s
end
# Turns [[label, value]] into <option> tags
# Takes optional parameter of :include_blank
def option_tags_for(select_options, options = {})
select_options.insert(0,[as_(:_select_),nil]) if options[:include_blank]
select_options.collect do |option|
label, value = option[0], option[1]
value.nil? ? "<option value="">#{label}</option>" : "<option value=\"#{value}\">#{label}</option>"
end
end
# Should this column be displayed in the subform?
def in_subform?(column, parent_record)
return true unless column.association
# Polymorphic associations can't appear because they *might* be the reverse association, and because you generally don't assign an association from the polymorphic side ... I think.
return false if column.polymorphic_association?
# A column shouldn't be in the subform if it's the reverse association to the parent
return false if column.association.reverse_for?(parent_record.class)
return true
end
def form_remote_upload_tag(url_for_options = {}, options = {})
options[:target] = action_iframe_id(url_for_options)
options[:multipart] ||= true
options[:class] = "#{options[:class]} as_remote_upload".strip
output=""
output << form_tag(url_for_options, options)
(output << "<iframe id='#{action_iframe_id(url_for_options)}' name='#{action_iframe_id(url_for_options)}' style='display:none'></iframe>").html_safe
end
# Provides list of javascripts to include with +javascript_include_tag+
# You can use this with your javascripts like
# <%= javascript_include_tag :defaults, 'your_own_cool_script', active_scaffold_javascripts, :cache => true %>
def active_scaffold_javascripts(frontend = :default)
ActiveScaffold::Config::Core.javascripts(frontend).collect do |name|
ActiveScaffold::Config::Core.asset_path(name, frontend)
end
end
# Provides stylesheets to include with +stylesheet_link_tag+
def active_scaffold_stylesheets(frontend = :default)
[ActiveScaffold::Config::Core.asset_path("stylesheet.css", frontend)]
end
# Provides stylesheets for IE to include with +stylesheet_link_tag+
def active_scaffold_ie_stylesheets(frontend = :default)
[ActiveScaffold::Config::Core.asset_path("stylesheet-ie.css", frontend)]
end
# easy way to include ActiveScaffold assets
def active_scaffold_includes(*args)
frontend = args.first.is_a?(Symbol) ? args.shift : :default
options = args.first.is_a?(Hash) ? args.shift : {}
js = javascript_include_tag(*active_scaffold_javascripts(frontend).push(options))
css = stylesheet_link_tag(*active_scaffold_stylesheets(frontend).push(options))
options[:cache] += '_ie' if options[:cache].is_a? String
options[:concat] += '_ie' if options[:concat].is_a? String
ie_css = stylesheet_link_tag(*active_scaffold_ie_stylesheets(frontend).push(options))
js + "\n" + css + "\n<!--[if IE]>".html_safe + ie_css + "<![endif]-->\n".html_safe
end
# a general-use loading indicator (the "stuff is happening, please wait" feedback)
def loading_indicator_tag(options)
image_tag "indicator.gif", :style => "visibility:hidden;", :id => loading_indicator_id(options), :alt => "loading indicator", :class => "loading-indicator"
end
# Creates a javascript-based link that toggles the visibility of some element on the page.
# By default, it toggles the visibility of the sibling after the one it's nested in. You may pass custom javascript logic in options[:of] to change that, though. For example, you could say :of => '$("my_div_id")'.
# You may also flag whether the other element is visible by default or not, and the initial text will adjust accordingly.
def link_to_visibility_toggle(id, options = {})
options[:default_visible] = true if options[:default_visible].nil?
options[:hide_label] = as_(:hide)
options[:show_label] = as_(:show)
javascript_tag("ActiveScaffold.create_visibility_toggle('#{id}', #{options.to_json});")
end
def skip_action_link(link, *args)
(!link.ignore_method.nil? and controller.try(link.ignore_method, *args)) || ((link.security_method_set? or controller.respond_to? link.security_method) and !controller.send(link.security_method, *args))
end
def render_action_link(link, url_options, record = nil, html_options = {})
url_options = action_link_url_options(link, url_options, record)
html_options = action_link_html_options(link, url_options, record, html_options)
action_link_html(link, url_options, html_options, record)
end
def render_group_action_link(link, url_options, options, record = nil)
if link.type == :member && !options[:authorized]
action_link_html(link, nil, {:class => "disabled #{link.action}#{link.html_options[:class].blank? ? '' : (' ' + link.html_options[:class])}"}, record)
else
render_action_link(link, url_options, record)
end
end
def action_link_url_options(link, url_options, record, options = {})
url_options = url_options.clone
url_options[:action] = link.action
url_options[:controller] = link.controller.to_s if link.controller
url_options.delete(:search) if link.controller and link.controller.to_s != params[:controller]
url_options.merge! link.parameters if link.parameters
@link_record = record
url_options.merge! self.instance_eval(&(link.dynamic_parameters)) if link.dynamic_parameters.is_a?(Proc)
@link_record = nil
url_options_for_nested_link(link.column, record, link, url_options, options) if link.nested_link?
url_options_for_sti_link(link.column, record, link, url_options, options) unless record.nil? || active_scaffold_config.sti_children.nil?
url_options[:_method] = link.method if !link.confirm? && link.inline? && link.method != :get
url_options
end
def action_link_html_options(link, url_options, record, html_options)
link_id = get_action_link_id(url_options, record, link.column)
html_options.reverse_merge! link.html_options.merge(:class => link.action)
# Needs to be in html_options to as the adding _method to the url is no longer supported by Rails
html_options[:method] = link.method if link.method != :get
html_options['data-confirm'] = link.confirm(record.try(:to_label)) if link.confirm?
html_options['data-position'] = link.position if link.position and link.inline?
html_options[:class] += ' as_action' if link.inline?
html_options['data-action'] = link.action if link.inline?
if link.popup?
html_options['data-popup'] = true
html_options[:target] = '_blank'
end
html_options[:id] = link_id
html_options[:remote] = true unless link.page? || link.popup?
if link.dhtml_confirm?
html_options[:class] += ' as_action' if !link.inline?
html_options[:page_link] = 'true' if !link.inline?
html_options[:dhtml_confirm] = link.dhtml_confirm.value
html_options[:onclick] = link.dhtml_confirm.onclick_function(controller, link_id)
end
html_options[:class] += " #{link.html_options[:class]}" unless link.html_options[:class].blank?
html_options
end
def get_action_link_id(url_options, record = nil, column = nil)
id = url_options[:id] || url_options[:parent_id]
id = "#{column.association.name}-#{record.id}" if column && column.plural_association?
if record.try(column.association.name.to_sym).present?
id = "#{column.association.name}-#{record.send(column.association.name).id}-#{record.id}"
else
id = "#{column.association.name}-#{record.id}" unless record.nil?
end if column && column.singular_association?
id = "#{id}-#{url_options[:batch_scope].downcase}" if url_options[:batch_scope]
action_id = "#{id_from_controller(url_options[:controller]) + '-' if url_options[:parent_controller]}#{url_options[:action].to_s}"
action_link_id(action_id, id)
end
def action_link_html(link, url, html_options, record)
# issue 260, use url_options[:link] if it exists. This prevents DB data from being localized.
label = url.delete(:link) if url.is_a?(Hash)
label ||= link.label
begin
if link.image.nil?
#http://www.continuousthinking.com/2011/09/22/rails-3-1-engine-namespaces-can-creep-into-urls-in-application-layout.html
#its not possible to link from a namespacedcontroller back to a non namespaced-controller anymore
# seems to be only working with named_routes...
html = link_to(label, url, html_options)
else
html = link_to(image_tag(link.image[:name] , :size => link.image[:size], :alt => label), url, html_options)
end
# if url is nil we would like to generate an anchor without href attribute
url.nil? ? html.sub(/href=".*?"/, '').html_safe : html.html_safe
rescue ActionController::RoutingError => e
Rails.logger.error("ActiveScaffold link_to routing Error: #{e.inspect}")
"Routing Error"
end
end
def url_options_for_nested_link(column, record, link, url_options, options = {})
if column && column.association
url_options[:assoc_id] = url_options.delete(:id)
url_options[:id] = record.send(column.association.name).id if column.singular_association? && record.send(column.association.name).present?
link.eid = "#{controller_id.from(3)}_#{record.id}_#{column.association.name}" unless options.has_key?(:reuse_eid)
url_options[:eid] = link.eid
elsif link.parameters && link.parameters[:named_scope]
url_options[:assoc_id] = url_options.delete(:id)
link.eid = "#{controller_id.from(3)}_#{record.id}_#{link.parameters[:named_scope]}" unless options.has_key?(:reuse_eid)
url_options[:eid] = link.eid
end
end
def url_options_for_sti_link(column, record, link, url_options, options = {})
#need to find out controller of current record type
#and set parameters
# its quite difficult to detect an sti link
# if link.column.nil? we are sure that it is nt an singular association inline autolink
# howver that will not work if a sti parent is an singular association inline autolink
if link.column.nil?
sti_controller_path = controller_path_for_activerecord(record.class)
if sti_controller_path
url_options[:controller] = sti_controller_path
url_options[:parent_sti] = controller_path
end
end
end
def column_class(column, column_value, record)
classes = []
classes << "#{column.name}-column"
if column.css_class.is_a?(Proc)
css_class = column.css_class.call(column_value, record)
classes << css_class unless css_class.nil?
else
classes << column.css_class
end unless column.css_class.nil?
classes << 'empty' if column_empty? column_value
classes << 'sorted' if active_scaffold_config.list.user.sorting.sorts_on?(column)
classes << 'numeric' if column.column and [:decimal, :float, :integer].include?(column.column.type)
classes.join(' ').rstrip
end
def column_heading_class(column, sorting)
classes = []
classes << "#{column.name}-column_heading"
classes << "sorted #{sorting.direction_of(column).downcase}" if sorting.sorts_on? column
classes << column.css_class unless column.css_class.nil? || column.css_class.is_a?(Proc)
classes.join(' ')
end
def as_main_div_class
classes = ["active-scaffold", "active-scaffold-#{controller_id}", "#{params[:controller]}-view", "#{active_scaffold_config.theme}-theme"]
classes << "as_touch" if touch_device?
classes.join(' ')
end
def column_empty?(column_value)
empty = column_value.nil?
empty ||= column_value.empty? if column_value.respond_to? :empty?
empty ||= [' ', active_scaffold_config.list.empty_field_text].include? column_value if String === column_value
return empty
end
def column_calculation(column)
unless column.calculate.instance_of? Proc
conditions = controller.send(:all_conditions)
includes = active_scaffold_config.list.count_includes
includes ||= controller.send(:active_scaffold_includes) unless conditions.nil?
calculation = beginning_of_chain.calculate(column.calculate, column.name, :conditions => conditions,
:joins => controller.send(:joins_for_collection), :include => includes)
else
column.calculate.call(@records)
end
end
def render_column_calculation(column)
calculation = column_calculation(column)
override_formatter = "render_#{column.name}_#{column.calculate}"
calculation = send(override_formatter, calculation) if respond_to? override_formatter
"#{"#{as_(column.calculate)}: " unless column.calculate.is_a? Proc}#{format_column_value nil, column, calculation}"
end
def column_show_add_existing(column)
(column.allow_add_existing and options_for_association_count(column.association) > 0)
end
def column_show_add_new(column, associated, record)
value = (column.plural_association? && !column.readonly_association?) || (column.singular_association? and not associated.empty?)
value = false unless record.class.authorized_for?(:crud_type => :create)
value
end
def active_scaffold_error_messages_for(*params)
options = params.extract_options!.symbolize_keys
options.reverse_merge!(:container_tag => :div, :list_type => :ul)
objects = Array.wrap(options.delete(:object) || params).map do |object|
object = instance_variable_get("@#{object}") unless object.respond_to?(:to_model)
object = convert_to_model(object)
if object.class.respond_to?(:model_name)
options[:object_name] ||= object.class.model_name.human.downcase
end
object
end
objects.compact!
count = objects.inject(0) {|sum, object| sum + object.errors.count }
unless count.zero?
html = {}
[:id, :class].each do |key|
if options.include?(key)
value = options[key]
html[key] = value unless value.blank?
else
html[key] = 'errorExplanation'
end
end
options[:object_name] ||= params.first
header_message = if options.include?(:header_message)
options[:header_message]
else
as_('errors.template.header', :count => count, :model => options[:object_name].to_s.gsub('_', ' '))
end
message = options.include?(:message) ? options[:message] : as_('errors.template.body')
error_messages = objects.sum do |object|
object.errors.full_messages.map do |msg|
options[:list_type] != :br ? content_tag(:li, msg) : msg
end
end
error_messages = if options[:list_type] == :br
error_messages.join('<br/>').html_safe
else
content_tag(options[:list_type], error_messages.join.html_safe)
end
contents = []
contents << content_tag(options[:header_tag] || :h2, header_message) unless header_message.blank?
contents << content_tag(:p, message) unless message.blank?
contents << error_messages
contents = contents.join.html_safe
options[:container_tag] ? content_tag(options[:container_tag], contents, html) : contents
else
''
end
end
end
end
end
|
require_relative "configuration"
require_relative "call"
require_relative "twiml_error"
require_relative "rest_api/phone_call"
require_relative "util/request_validator"
require_relative "util/url"
module Adhearsion::Twilio::ControllerMethods
extend ActiveSupport::Concern
TWILIO_CALL_STATUSES = {
:no_answer => "no-answer",
:answer => "completed",
:timeout => "no-answer",
:error => "failed",
:in_progress => "in-progress",
:ringing => "ringing"
}
TWILIO_CALL_DIRECTIONS = {
:inbound => "inbound",
:outbound_api => "outbound-api"
}
INFINITY = 100
SLEEP_BETWEEN_REDIRECTS = 1
included do
after :twilio_hangup
end
private
def answered?
!!@answered
end
def answer!
answer unless answered?
@answered = true
end
def notify_voice_request_url
execute_twiml(
notify_http(
voice_request_url, voice_request_method, :ringing
)
)
end
def redirect(url = nil, options = {})
execute_twiml(
notify_http(
URI.join(@last_request_url, url.to_s).to_s,
options.delete("method") || "post",
:in_progress, options
)
)
end
def notify_status_callback_url
notify_http(
status_callback_url,
status_callback_method,
answered? ? :answer : :no_answer,
"CallDuration" => call.duration.to_i,
) if status_callback_url.present?
end
def notify_http(url, method, status, options = {})
basic_auth, sanitized_url = Adhearsion::Twilio::Util::Url.new(url).extract_auth
@last_request_url = sanitized_url
request_body = {
"CallStatus" => TWILIO_CALL_STATUSES[status],
}.merge(build_request_body).merge(options)
headers = build_twilio_signature_header(sanitized_url, request_body)
request_options = {
:body => request_body,
:headers => headers
}
request_options.merge!(:basic_auth => basic_auth) if basic_auth.any?
logger.info("Notifying HTTP with method: #{method}, URL: #{sanitized_url} and options: #{request_options}")
HTTParty.send(
method.downcase,
sanitized_url,
request_options
).body
end
def build_request_body
{
"From" => twilio_call.from,
"To" => twilio_call.to,
"CallSid" => call_sid,
"Direction" => call_direction,
"ApiVersion" => api_version
}
end
def build_twilio_signature_header(url, params)
{"X-Twilio-Signature" => twilio_request_validator.build_signature_for(url, params)}
end
def api_version
"adhearsion-twilio-#{Adhearsion::Twilio::VERSION}"
end
def twilio_request_validator
@twilio_request_validator ||= Adhearsion::Twilio::Util::RequestValidator.new(auth_token)
end
def execute_twiml(response)
redirection = nil
with_twiml(response) do |node|
content = node.content
options = twilio_options(node)
case node.name
when 'Reject'
execute_twiml_verb(:reject, false, options)
break
when 'Play'
execute_twiml_verb(:play, true, content, options)
when 'Gather'
break if redirection = execute_twiml_verb(:gather, true, node, options)
when 'Redirect'
redirection = execute_twiml_verb(:redirect, false, content, options)
break
when 'Hangup'
break
when 'Say'
execute_twiml_verb(:say, true, content, options)
when 'Pause'
not_yet_supported!
when 'Bridge'
not_yet_supported!
when 'Dial'
break if redirection = execute_twiml_verb(:dial, true, node, options)
else
raise(ArgumentError, "Invalid element '#{node.name}'")
end
end
redirection ? redirect(*redirection) : hangup
end
def execute_twiml_verb(verb, answer_call, *args)
answer! if answer_call
send("twilio_#{verb}", *args)
end
def twilio_reject(options = {})
reject(options["reason"] == "busy" ? :busy : :decline)
end
def twilio_hangup
notify_status_callback_url
end
def twilio_redirect(url, options = {})
raise(Adhearsion::Twilio::TwimlError, "invalid redirect url") if url && url.empty?
sleep(SLEEP_BETWEEN_REDIRECTS)
[url, options]
end
def twilio_gather(node, options = {})
ask_params = []
ask_options = {}
node.children.each do |nested_verb_node|
verb = nested_verb_node.name
raise(
Adhearsion::Twilio::TwimlError,
"Nested verb '<#{verb}>' not allowed within '<#{node.name}>'"
) unless ["Say", "Play", "Pause"].include?(verb)
nested_verb_options = twilio_options(nested_verb_node)
output_count = twilio_loop(nested_verb_options, :finite => true).count
ask_options.merge!(send("options_for_twilio_#{verb.downcase}", nested_verb_options))
ask_params << Array.new(output_count, nested_verb_node.content)
end
ask_options.merge!(:timeout => (options["timeout"] || 5).to_i.seconds)
if options["finishOnKey"]
ask_options.merge!(
:terminator => options["finishOnKey"]
) if options["finishOnKey"] =~ /^(?:\d|\*|\#)$/
else
ask_options.merge!(:terminator => "#")
end
ask_options.merge!(:limit => options["numDigits"].to_i) if options["numDigits"]
ask_params << nil if ask_params.blank?
ask_params.flatten!
logger.info("Executing ask with params: #{ask_params} and options: #{ask_options}")
result = ask(*ask_params, ask_options)
digits = result.utterance if [:match, :nomatch].include?(result.status)
[
options["action"],
{
"Digits" => digits, "method" => options["method"]
}
] if digits.present?
end
def twilio_say(words, options = {})
params = options_for_twilio_say(options)
twilio_loop(options).each do
say(words, params)
end
end
def options_for_twilio_say(options = {})
params = {}
voice = options["voice"].to_s.downcase == "woman" ? configuration.default_female_voice : configuration.default_male_voice
params[:voice] = voice if voice
params
end
def options_for_twilio_play(options = {})
{}
end
def options_for_twilio_dial(options = {})
global = options.delete(:global)
global = true unless global == false
params = {}
params[:from] = options["callerId"] if options["callerId"]
params[:ringback] = options["ringback"] if options["ringback"]
params[:for] = (options["timeout"] ? options["timeout"].to_i.seconds : 30.seconds) if global
params
end
def twilio_dial(node, options = {})
params = options_for_twilio_dial(options)
to = {}
node.children.each do |nested_noun_node|
break if nested_noun_node.text?
noun = nested_noun_node.name
raise(
Adhearsion::Twilio::TwimlError,
"Nested noun '<#{noun}>' not allowed within '<#{node.name}>'"
) unless ["Number"].include?(noun)
nested_noun_options = twilio_options(nested_noun_node)
specific_dial_options = options_for_twilio_dial(nested_noun_options.merge(:global => false))
to[nested_noun_node.content.strip] = specific_dial_options
end
to = node.content if to.empty?
dial_status = dial(to, params)
dial_call_status_options = {
"DialCallStatus" => TWILIO_CALL_STATUSES[dial_status.result]
}
# try to find the joined call
outbound_call = dial_status.joins.select do |outbound_leg, join_status|
join_status.result == :joined
end.keys.first
dial_call_status_options.merge!(
"DialCallSid" => outbound_call.id,
"DialCallDuration" => dial_status.joins[outbound_call].duration.to_i
) if outbound_call
[
options["action"],
{
"method" => options["method"],
}.merge(dial_call_status_options)
] if options["action"]
end
def twilio_play(path, options = {})
twilio_loop(options).each do
play_audio(path, options_for_twilio_play)
end
end
def parse_twiml(xml)
logger.info("Parsing TwiML: #{xml}")
begin
doc = ::Nokogiri::XML(xml) do |config|
config.options = Nokogiri::XML::ParseOptions::NOBLANKS
end
rescue Nokogiri::XML::SyntaxError => e
raise(Adhearsion::Twilio::TwimlError, "Error while parsing XML: #{e.message}. XML Document: #{xml}")
end
raise(Adhearsion::Twilio::TwimlError, "The root element must be the '<Response>' element") if doc.root.name != "Response"
doc.root.children
end
def with_twiml(raw_response, &block)
doc = parse_twiml(raw_response)
doc.each do |node|
yield node
end
end
def twilio_loop(twilio_options, options = {})
infinite_loop = options.delete(:finite) ? INFINITY.times : loop
twilio_options["loop"].to_s == "0" ? infinite_loop : (twilio_options["loop"] || 1).to_i.times
end
def twilio_options(node)
options = {}
node.attributes.each do |key, attribute|
options[key] = attribute.value
end
options
end
def twilio_call
@twilio_call ||= Adhearsion::Twilio::Call.new(call)
end
def configuration
@configuration ||= Adhearsion::Twilio::Configuration.new
end
def rest_api_phone_call
@rest_api_phone_call ||= Adhearsion::Twilio::RestApi::PhoneCall.new(twilio_call, :logger => logger)
end
def voice_request_url
resolve_configuration(:voice_request_url)
end
def voice_request_method
resolve_configuration(:voice_request_method)
end
def status_callback_url
resolve_configuration(:status_callback_url)
end
def status_callback_method
resolve_configuration(:status_callback_method)
end
def auth_token
resolve_configuration(:auth_token)
end
def call_sid
resolve_configuration(:call_sid, false) || twilio_call.id
end
def call_direction
TWILIO_CALL_DIRECTIONS[(metadata[:call_direction] || :inbound).to_sym]
end
def resolve_configuration(name, has_global_configuration = true)
(metadata[name] || (configuration.rest_api_enabled? && rest_api_phone_call.public_send(name)) || has_global_configuration && configuration.public_send(name)).presence
end
def not_yet_supported!
raise ArgumentError, "Not yet supported"
end
end
Add switch for explicitly turning off REST API
require_relative "configuration"
require_relative "call"
require_relative "twiml_error"
require_relative "rest_api/phone_call"
require_relative "util/request_validator"
require_relative "util/url"
module Adhearsion::Twilio::ControllerMethods
extend ActiveSupport::Concern
TWILIO_CALL_STATUSES = {
:no_answer => "no-answer",
:answer => "completed",
:timeout => "no-answer",
:error => "failed",
:in_progress => "in-progress",
:ringing => "ringing"
}
TWILIO_CALL_DIRECTIONS = {
:inbound => "inbound",
:outbound_api => "outbound-api"
}
INFINITY = 100
SLEEP_BETWEEN_REDIRECTS = 1
included do
after :twilio_hangup
end
private
def answered?
!!@answered
end
def answer!
answer unless answered?
@answered = true
end
def notify_voice_request_url
execute_twiml(
notify_http(
voice_request_url, voice_request_method, :ringing
)
)
end
def redirect(url = nil, options = {})
execute_twiml(
notify_http(
URI.join(@last_request_url, url.to_s).to_s,
options.delete("method") || "post",
:in_progress, options
)
)
end
def notify_status_callback_url
notify_http(
status_callback_url,
status_callback_method,
answered? ? :answer : :no_answer,
"CallDuration" => call.duration.to_i,
) if status_callback_url.present?
end
def notify_http(url, method, status, options = {})
basic_auth, sanitized_url = Adhearsion::Twilio::Util::Url.new(url).extract_auth
@last_request_url = sanitized_url
request_body = {
"CallStatus" => TWILIO_CALL_STATUSES[status],
}.merge(build_request_body).merge(options)
headers = build_twilio_signature_header(sanitized_url, request_body)
request_options = {
:body => request_body,
:headers => headers
}
request_options.merge!(:basic_auth => basic_auth) if basic_auth.any?
logger.info("Notifying HTTP with method: #{method}, URL: #{sanitized_url} and options: #{request_options}")
HTTParty.send(
method.downcase,
sanitized_url,
request_options
).body
end
def build_request_body
{
"From" => twilio_call.from,
"To" => twilio_call.to,
"CallSid" => call_sid,
"Direction" => call_direction,
"ApiVersion" => api_version
}
end
def build_twilio_signature_header(url, params)
{"X-Twilio-Signature" => twilio_request_validator.build_signature_for(url, params)}
end
def api_version
"adhearsion-twilio-#{Adhearsion::Twilio::VERSION}"
end
def twilio_request_validator
@twilio_request_validator ||= Adhearsion::Twilio::Util::RequestValidator.new(auth_token)
end
def execute_twiml(response)
redirection = nil
with_twiml(response) do |node|
content = node.content
options = twilio_options(node)
case node.name
when 'Reject'
execute_twiml_verb(:reject, false, options)
break
when 'Play'
execute_twiml_verb(:play, true, content, options)
when 'Gather'
break if redirection = execute_twiml_verb(:gather, true, node, options)
when 'Redirect'
redirection = execute_twiml_verb(:redirect, false, content, options)
break
when 'Hangup'
break
when 'Say'
execute_twiml_verb(:say, true, content, options)
when 'Pause'
not_yet_supported!
when 'Bridge'
not_yet_supported!
when 'Dial'
break if redirection = execute_twiml_verb(:dial, true, node, options)
else
raise(ArgumentError, "Invalid element '#{node.name}'")
end
end
redirection ? redirect(*redirection) : hangup
end
def execute_twiml_verb(verb, answer_call, *args)
answer! if answer_call
send("twilio_#{verb}", *args)
end
def twilio_reject(options = {})
reject(options["reason"] == "busy" ? :busy : :decline)
end
def twilio_hangup
notify_status_callback_url
end
def twilio_redirect(url, options = {})
raise(Adhearsion::Twilio::TwimlError, "invalid redirect url") if url && url.empty?
sleep(SLEEP_BETWEEN_REDIRECTS)
[url, options]
end
def twilio_gather(node, options = {})
ask_params = []
ask_options = {}
node.children.each do |nested_verb_node|
verb = nested_verb_node.name
raise(
Adhearsion::Twilio::TwimlError,
"Nested verb '<#{verb}>' not allowed within '<#{node.name}>'"
) unless ["Say", "Play", "Pause"].include?(verb)
nested_verb_options = twilio_options(nested_verb_node)
output_count = twilio_loop(nested_verb_options, :finite => true).count
ask_options.merge!(send("options_for_twilio_#{verb.downcase}", nested_verb_options))
ask_params << Array.new(output_count, nested_verb_node.content)
end
ask_options.merge!(:timeout => (options["timeout"] || 5).to_i.seconds)
if options["finishOnKey"]
ask_options.merge!(
:terminator => options["finishOnKey"]
) if options["finishOnKey"] =~ /^(?:\d|\*|\#)$/
else
ask_options.merge!(:terminator => "#")
end
ask_options.merge!(:limit => options["numDigits"].to_i) if options["numDigits"]
ask_params << nil if ask_params.blank?
ask_params.flatten!
logger.info("Executing ask with params: #{ask_params} and options: #{ask_options}")
result = ask(*ask_params, ask_options)
digits = result.utterance if [:match, :nomatch].include?(result.status)
[
options["action"],
{
"Digits" => digits, "method" => options["method"]
}
] if digits.present?
end
def twilio_say(words, options = {})
params = options_for_twilio_say(options)
twilio_loop(options).each do
say(words, params)
end
end
def options_for_twilio_say(options = {})
params = {}
voice = options["voice"].to_s.downcase == "woman" ? configuration.default_female_voice : configuration.default_male_voice
params[:voice] = voice if voice
params
end
def options_for_twilio_play(options = {})
{}
end
def options_for_twilio_dial(options = {})
global = options.delete(:global)
global = true unless global == false
params = {}
params[:from] = options["callerId"] if options["callerId"]
params[:ringback] = options["ringback"] if options["ringback"]
params[:for] = (options["timeout"] ? options["timeout"].to_i.seconds : 30.seconds) if global
params
end
def twilio_dial(node, options = {})
params = options_for_twilio_dial(options)
to = {}
node.children.each do |nested_noun_node|
break if nested_noun_node.text?
noun = nested_noun_node.name
raise(
Adhearsion::Twilio::TwimlError,
"Nested noun '<#{noun}>' not allowed within '<#{node.name}>'"
) unless ["Number"].include?(noun)
nested_noun_options = twilio_options(nested_noun_node)
specific_dial_options = options_for_twilio_dial(nested_noun_options.merge(:global => false))
to[nested_noun_node.content.strip] = specific_dial_options
end
to = node.content if to.empty?
dial_status = dial(to, params)
dial_call_status_options = {
"DialCallStatus" => TWILIO_CALL_STATUSES[dial_status.result]
}
# try to find the joined call
outbound_call = dial_status.joins.select do |outbound_leg, join_status|
join_status.result == :joined
end.keys.first
dial_call_status_options.merge!(
"DialCallSid" => outbound_call.id,
"DialCallDuration" => dial_status.joins[outbound_call].duration.to_i
) if outbound_call
[
options["action"],
{
"method" => options["method"],
}.merge(dial_call_status_options)
] if options["action"]
end
def twilio_play(path, options = {})
twilio_loop(options).each do
play_audio(path, options_for_twilio_play)
end
end
def parse_twiml(xml)
logger.info("Parsing TwiML: #{xml}")
begin
doc = ::Nokogiri::XML(xml) do |config|
config.options = Nokogiri::XML::ParseOptions::NOBLANKS
end
rescue Nokogiri::XML::SyntaxError => e
raise(Adhearsion::Twilio::TwimlError, "Error while parsing XML: #{e.message}. XML Document: #{xml}")
end
raise(Adhearsion::Twilio::TwimlError, "The root element must be the '<Response>' element") if doc.root.name != "Response"
doc.root.children
end
def with_twiml(raw_response, &block)
doc = parse_twiml(raw_response)
doc.each do |node|
yield node
end
end
def twilio_loop(twilio_options, options = {})
infinite_loop = options.delete(:finite) ? INFINITY.times : loop
twilio_options["loop"].to_s == "0" ? infinite_loop : (twilio_options["loop"] || 1).to_i.times
end
def twilio_options(node)
options = {}
node.attributes.each do |key, attribute|
options[key] = attribute.value
end
options
end
def twilio_call
@twilio_call ||= Adhearsion::Twilio::Call.new(call)
end
def configuration
@configuration ||= Adhearsion::Twilio::Configuration.new
end
def rest_api_phone_call
@rest_api_phone_call ||= Adhearsion::Twilio::RestApi::PhoneCall.new(twilio_call, :logger => logger)
end
def voice_request_url
resolve_configuration(:voice_request_url)
end
def voice_request_method
resolve_configuration(:voice_request_method)
end
def status_callback_url
resolve_configuration(:status_callback_url)
end
def status_callback_method
resolve_configuration(:status_callback_method)
end
def auth_token
resolve_configuration(:auth_token)
end
def call_sid
resolve_configuration(:call_sid, false) || twilio_call.id
end
def call_direction
TWILIO_CALL_DIRECTIONS[(metadata[:call_direction] || :inbound).to_sym]
end
def resolve_configuration(name, has_global_configuration = true)
(metadata[name] || (configuration.rest_api_enabled? && metadata[:rest_api_enabled?] != false && rest_api_phone_call.public_send(name)) || has_global_configuration && configuration.public_send(name)).presence
end
def not_yet_supported!
raise ArgumentError, "Not yet supported"
end
end
|
class Oksocial < Formula
desc "OkSocial"
homepage "https://github.com/yschimke/oksocial"
version "1.38.0"
url "https://github.com/yschimke/oksocial/releases/download/#{version}/oksocial-#{version}.tgz"
sha256 "fb5af5b3747e61f74f1b8f3b4c53caf893bd57d66b797407bef4342bf5c9b69a"
depends_on :java => :optional
depends_on "bash-completion" => :recommended
depends_on "jq" => :recommended
def install
libexec.install Dir["*"]
inreplace "#{libexec}/bin/okscript", /^export INSTALLDIR.*/, "export INSTALLDIR=#{libexec}"
inreplace "#{libexec}/bin/oksocial", /^export INSTALLDIR.*/, "export INSTALLDIR=#{libexec}"
inreplace "#{libexec}/bin/okscript", /^PLUGINDIR.*/, "PLUGINDIR=" + var/"oksocial/plugins"
inreplace "#{libexec}/bin/oksocial", /^PLUGINDIR.*/, "PLUGINDIR=" + var/"oksocial/plugins"
bin.install_symlink "#{libexec}/bin/oksocial" => "okapi"
bin.install_symlink "#{libexec}/bin/oksocial" => "okws"
bin.install_symlink "#{libexec}/bin/4sqapi"
bin.install_symlink "#{libexec}/bin/fbapi"
bin.install_symlink "#{libexec}/bin/githubapi"
bin.install_symlink "#{libexec}/bin/mapboxapi"
bin.install_symlink "#{libexec}/bin/okscript"
bin.install_symlink "#{libexec}/bin/oksocial"
bin.install_symlink "#{libexec}/bin/sorecent.kts" => "sorecent"
bin.install_symlink "#{libexec}/bin/twitterapi"
bin.install_symlink "#{libexec}/bin/tweetsearch.kts" => "tweetsearch"
bin.install_symlink "#{libexec}/bin/uberprices.kts" => "uberprices"
bash_completion.install "#{libexec}/bash/completion.bash" => "oksocial"
unless (var/"oksocial/plugins").exist?
(var/"oksocial/plugins").mkpath
end
end
test do
system "#{bin}/oksocial" "https://api.twitter.com/robots.txt"
end
end
release
class Oksocial < Formula
desc "OkSocial"
homepage "https://github.com/yschimke/oksocial"
version "1.39.0"
url "https://github.com/yschimke/oksocial/releases/download/#{version}/oksocial-#{version}.tgz"
sha256 "b1b0a27d3fcdadc21e7bd064c5b3e20c01429308372e19b035729f018fb73308"
depends_on :java => :optional
depends_on "bash-completion" => :recommended
depends_on "jq" => :recommended
def install
libexec.install Dir["*"]
inreplace "#{libexec}/bin/okscript", /^export INSTALLDIR.*/, "export INSTALLDIR=#{libexec}"
inreplace "#{libexec}/bin/oksocial", /^export INSTALLDIR.*/, "export INSTALLDIR=#{libexec}"
inreplace "#{libexec}/bin/okscript", /^PLUGINDIR.*/, "PLUGINDIR=" + var/"oksocial/plugins"
inreplace "#{libexec}/bin/oksocial", /^PLUGINDIR.*/, "PLUGINDIR=" + var/"oksocial/plugins"
bin.install_symlink "#{libexec}/bin/oksocial" => "okapi"
bin.install_symlink "#{libexec}/bin/oksocial" => "okws"
bin.install_symlink "#{libexec}/bin/4sqapi"
bin.install_symlink "#{libexec}/bin/fbapi"
bin.install_symlink "#{libexec}/bin/githubapi"
bin.install_symlink "#{libexec}/bin/mapboxapi"
bin.install_symlink "#{libexec}/bin/okscript"
bin.install_symlink "#{libexec}/bin/oksocial"
bin.install_symlink "#{libexec}/bin/sorecent.kts" => "sorecent"
bin.install_symlink "#{libexec}/bin/twitterapi"
bin.install_symlink "#{libexec}/bin/tweetsearch.kts" => "tweetsearch"
bin.install_symlink "#{libexec}/bin/uberprices.kts" => "uberprices"
bash_completion.install "#{libexec}/bash/completion.bash" => "oksocial"
unless (var/"oksocial/plugins").exist?
(var/"oksocial/plugins").mkpath
end
end
test do
system "#{bin}/oksocial" "https://api.twitter.com/robots.txt"
end
end
|
# frozen_string_literal: true
Prawn::Font::AFM.instance_variable_set :@hide_m17n_warning, true
require 'prawn/icon'
Prawn::Icon::Compatibility.send :prepend, (::Module.new { def warning *_args; end })
module Asciidoctor
module Prawn
module Extensions
include ::Asciidoctor::PDF::Measurements
include ::Asciidoctor::PDF::Sanitizer
include ::Asciidoctor::PDF::TextTransformer
FontAwesomeIconSets = %w(fab far fas)
IconSets = %w(fab far fas fi pf).to_set
IconSetPrefixes = IconSets.map {|it| it + '-' }
InitialPageContent = %(q\n)
(FontStyleToSet = {
bold: [:bold].to_set,
italic: [:italic].to_set,
bold_italic: [:bold, :italic].to_set,
}).default = ::Set.new
# NOTE: must use a visible char for placeholder or else Prawn won't reserve space for the fragment
PlaceholderChar = ?\u2063
# - :height is the height of a line
# - :leading is spacing between adjacent lines
# - :padding_top is half line spacing, plus any line_gap in the font
# - :padding_bottom is half line spacing
# - :final_gap determines whether a gap is added below the last line
LineMetrics = ::Struct.new :height, :leading, :padding_top, :padding_bottom, :final_gap
# Core
# Retrieves the catalog reference data for the PDF.
#
def catalog
state.store.root
end
# Retrieves the compatiblity version of the PDF.
#
def min_version
state.version
end
# Measurements
# Returns the width of the current page from edge-to-edge
#
def page_width
page.dimensions[2]
end
# Returns the height of the current page from edge-to-edge
#
def page_height
page.dimensions[3]
end
# Returns the effective (writable) height of the page
#
# If inside a fixed-height bounding box, returns height of box.
#
def effective_page_height
reference_bounds.height
end
# Returns the height of the content area of the page
#
def page_content_height
page_height - page_margin_top - page_margin_bottom
end
# remove once fixed upstream; see https://github.com/prawnpdf/prawn/pull/1122
def generate_margin_box
page_w, page_h = (page = state.page).dimensions.slice 2, 2
page_m = page.margins
prev_margin_box, @margin_box = @margin_box, (::Prawn::Document::BoundingBox.new self, nil, [page_m[:left], page_h - page_m[:top]], width: page_w - page_m[:left] - page_m[:right], height: page_h - page_m[:top] - page_m[:bottom])
# update bounding box if not flowing from the previous page
unless @bounding_box&.parent
prev_margin_box = @bounding_box
@bounding_box = @margin_box
end
# maintains indentation settings across page breaks
if prev_margin_box
@margin_box.add_left_padding prev_margin_box.total_left_padding
@margin_box.add_right_padding prev_margin_box.total_right_padding
end
nil
end
# Set the margins for the current page.
#
def set_page_margin margin
# FIXME: is there a cleaner way to set margins? does it make sense to override create_new_page?
apply_margin_options margin: margin
generate_margin_box
end
# Returns the margins for the current page as a 4 element array (top, right, bottom, left)
#
def page_margin
[page_margin_top, page_margin_right, page_margin_bottom, page_margin_left]
end
# Returns the width of the left margin for the current page
#
def page_margin_left
page.margins[:left]
end
# Returns the width of the right margin for the current page
#
def page_margin_right
page.margins[:right]
end
# Returns the width of the top margin for the current page
#
def page_margin_top
page.margins[:top]
end
# Returns the width of the bottom margin for the current page
#
def page_margin_bottom
page.margins[:bottom]
end
# Returns the total left margin (to the page edge) for the current bounds.
#
def bounds_margin_left
bounds.absolute_left
end
# Returns the total right margin (to the page edge) for the current bounds.
#
def bounds_margin_right
page.dimensions[2] - bounds.absolute_right
end
# Returns the side the current page is facing, :recto or :verso.
#
def page_side pgnum = nil, invert = nil
if invert
(recto_page? pgnum) ? :verso : :recto
else
(verso_page? pgnum) ? :verso : :recto
end
end
# Returns whether the page is a recto page.
#
def recto_page? pgnum = nil
(pgnum || page_number).odd?
end
# Returns whether the page is a verso page.
#
def verso_page? pgnum = nil
(pgnum || page_number).even?
end
# Returns whether the cursor is at the top of the page (i.e., margin box).
#
def at_page_top?
@y == @margin_box.absolute_top
end
# Returns whether the current page is the last page in the document.
#
def last_page?
page_number == page_count
end
# Destinations
# Generates a destination object that resolves to the top of the page
# specified by the page_num parameter or the current page if no page number
# is provided. The destination preserves the user's zoom level unlike
# the destinations generated by the outline builder.
#
def dest_top page_num = nil
dest_xyz 0, page_height, nil, (page_num ? state.pages[page_num - 1] : page)
end
# Fonts
# Registers a new custom font described in the data parameter
# after converting the font name to a String.
#
# Example:
#
# register_font Roboto: {
# normal: 'fonts/roboto-normal.ttf',
# italic: 'fonts/roboto-italic.ttf',
# bold: 'fonts/roboto-bold.ttf',
# bold_italic: 'fonts/roboto-bold_italic.ttf'
# }
#
def register_font data
font_families.update data.transform_keys(&:to_s)
end
# Enhances the built-in font method to allow the font
# size to be specified as the second option and to
# lazily load font-based icons.
#
def font name = nil, options = {}
if name
options = { size: options } if ::Numeric === options
if IconSets.include? name
::Prawn::Icon::FontData.load self, name
options = options.reject {|k| k == :style } if options.key? :style
end
end
super
end
# Retrieves the current font name (i.e., family).
#
def font_family
font.options[:family]
end
alias font_name font_family
# Retrieves the current font info (family, style, size) as a Hash
#
def font_info
{ family: font.options[:family], style: (font.options[:style] || :normal), size: @font_size }
end
# Set the font style on the document, if a style is given, otherwise return the current font style.
#
def font_style style = nil
if style
font font.options[:family], style: style
else
font.options[:style] || :normal
end
end
# Applies points as a scale factor of the current font if the value provided
# is less than or equal to 1 or it's a string (e.g., 1.1em), then delegates to the super
# implementation to carry out the built-in functionality.
#
#--
# QUESTION: should we round the result?
def font_size points = nil
return @font_size unless points
if ::String === points
if points.end_with? 'rem'
super @root_font_size * points.to_f
elsif points.end_with? 'em'
super @font_size * points.to_f
elsif points.end_with? '%'
super @font_size * (points.to_f / 100)
else
super points.to_f
end
# NOTE: assume em value (since a font size of 1 is extremely unlikely)
elsif points <= 1
super @font_size * points
else
super points
end
end
def resolve_font_style styles
if styles.include? :bold
(styles.include? :italic) ? :bold_italic : :bold
elsif styles.include? :italic
:italic
else
:normal
end
end
# Retreives the collection of font styles from the given font style key,
# which defaults to the current font style.
#
def font_styles style = font_style
FontStyleToSet[style].dup
end
# Apply the font settings (family, size, styles and character spacing) from
# the fragment to the document, then yield to the block.
#
# Used to arrange an inline image
def fragment_font fragment
f_info = font_info
f_family = fragment[:font] || f_info[:family]
f_size = fragment[:size] || f_info[:size]
if (f_styles = fragment[:styles])
f_style = resolve_font_style f_styles
else
f_style = :normal
end
# character_spacing logic not currently used
#if (c_spacing = fragment[:character_spacing])
# character_spacing c_spacing do
# font f_family, size: f_size, style: f_style do
# yield
# end
# end
#else
# font f_family, size: f_size, style: f_style do
# yield
# end
#end
font f_family, size: f_size, style: f_style do
yield
end
end
# Override width of string to check for placeholder char, which uses character spacing to control width
#
def width_of_string string, options = {}
string == PlaceholderChar ? @character_spacing : super
end
def icon_font_data family
::Prawn::Icon::FontData.load self, family
end
def resolve_legacy_icon_name name
::Prawn::Icon::Compatibility::SHIMS[%(fa-#{name})]
end
def calc_line_metrics line_height = 1, font = self.font, font_size = self.font_size
line_height_length = line_height * font_size
leading = line_height_length - font_size
half_leading = leading / 2
padding_top = half_leading + font.line_gap
padding_bottom = half_leading
LineMetrics.new line_height_length, leading, padding_top, padding_bottom, false
end
=begin
# these line metrics attempted to figure out a correction based on the reported height and the font_size
# however, it only works for some fonts, and breaks down for fonts like Noto Serif
def calc_line_metrics line_height = 1, font = self.font, font_size = self.font_size
line_height_length = font_size * line_height
line_gap = line_height_length - font_size
correction = font.height - font_size
leading = line_gap - correction
shift = (font.line_gap + correction + line_gap) / 2
final_gap = font.line_gap != 0
LineMetrics.new line_height_length, leading, shift, shift, final_gap
end
=end
# Parse the text into an array of fragments using the text formatter.
def parse_text string, options = {}
return [] if string.nil?
options = options.dup
if (format_option = options.delete :inline_format)
format_option = [] unless ::Array === format_option
fragments = text_formatter.format string, *format_option
else
fragments = [text: string]
end
if (color = options.delete :color)
fragments.map do |fragment|
fragment[:color] ? fragment : (fragment.merge color: color)
end
else
fragments
end
end
# NOTE: override built-in fill_formatted_text_box to insert leading before second line when :first_line is true
def fill_formatted_text_box text, options
merge_text_box_positioning_options options
box = ::Prawn::Text::Formatted::Box.new text, options
remaining_text = box.render
@no_text_printed = box.nothing_printed?
@all_text_printed = box.everything_printed?
if ((defined? @final_gap) && @final_gap) || (options[:first_line] && !(@no_text_printed || @all_text_printed))
self.y -= box.height + box.line_gap + box.leading
else
self.y -= box.height
end
remaining_text
end
# NOTE: override built-in draw_indented_formatted_line to set first_line flag
def draw_indented_formatted_line string, options
super string, (options.merge first_line: true)
end
# Performs the same work as Prawn::Text.text except that the first_line_options are applied to the first line of text
# renderered. It's necessary to use low-level APIs in this method so we only style the first line and not the
# remaining lines (which is the default behavior in Prawn).
def text_with_formatted_first_line string, first_line_options, options
color = options.delete :color
fragments = parse_text string, options
# NOTE: the low-level APIs we're using don't recognize the :styles option, so we must resolve
# NOTE: disabled until we have a need for it
#if (styles = options.delete :styles)
# options[:style] = resolve_font_style styles
#end
if (first_line_styles = first_line_options.delete :styles)
first_line_options[:style] = resolve_font_style first_line_styles
end
first_line_color = (first_line_options.delete :color) || color
options = options.merge document: self
# QUESTION: should we merge more carefully here? (hand-select keys?)
first_line_options = (options.merge first_line_options).merge single_line: true, first_line: true
box = ::Prawn::Text::Formatted::Box.new fragments, first_line_options
# NOTE: get remaining_fragments before we add color to fragments on first line
if (text_indent = options.delete :indent_paragraphs)
remaining_fragments = indent text_indent do
box.render dry_run: true
end
else
remaining_fragments = box.render dry_run: true
end
# NOTE: color must be applied per-fragment
fragments.each {|fragment| fragment[:color] ||= first_line_color }
if text_indent
indent text_indent do
fill_formatted_text_box fragments, first_line_options
end
else
fill_formatted_text_box fragments, first_line_options
end
unless remaining_fragments.empty?
# NOTE: color must be applied per-fragment
remaining_fragments.each {|fragment| fragment[:color] ||= color }
remaining_fragments = fill_formatted_text_box remaining_fragments, options
draw_remaining_formatted_text_on_new_pages remaining_fragments, options
end
end
# Apply the text transform to the specified text.
#
# Supported transform values are "uppercase", "lowercase", or "none" (passed
# as either a String or a Symbol). When the uppercase transform is applied to
# the text, it correctly uppercases visible text while leaving markup and
# named character entities unchanged. The none transform returns the text
# unmodified.
#
def transform_text text, transform
case transform
when :uppercase, 'uppercase'
uppercase_pcdata text
when :lowercase, 'lowercase'
lowercase_pcdata text
when :capitalize, 'capitalize'
capitalize_words_pcdata text
else
text
end
end
def hyphenate_text text, hyphenator
hyphenate_words_pcdata text, hyphenator
end
# Cursor
# Short-circuits the call to the built-in move_up operation
# when n is 0.
#
def move_up n
super unless n == 0
end
# Override built-in move_text_position method to prevent Prawn from advancing
# to next page if image doesn't fit before rendering image.
#--
# NOTE: could use :at option when calling image/embed_image instead
def move_text_position h; end
# Short-circuits the call to the built-in move_down operation
# when n is 0.
#
def move_down n
super unless n == 0
end
# Bounds
# Augments the built-in pad method by adding support for specifying padding on all four sizes.
#
# Padding may be specified as an array of four values, or as a single value.
# The single value is used as the padding around all four sides of the box.
#
# If padding is nil, this method simply yields to the block and returns.
#
# Example:
#
# pad_box 20 do
# text 'A paragraph inside a blox with even padding on all sides.'
# end
#
# pad_box [10, 10, 10, 20] do
# text 'An indented paragraph inside a box with equal padding on all sides.'
# end
#
def pad_box padding
if padding
# TODO: implement shorthand combinations like in CSS
p_top, p_right, p_bottom, p_left = ::Array === padding ? padding : (::Array.new 4, padding)
begin
# logic is intentionally inlined
move_down p_top
bounds.add_left_padding p_left
bounds.add_right_padding p_right
yield
# NOTE: support negative bottom padding to shave bottom margin of last child
# NOTE: this doesn't work well at a page boundary since not all of the bottom margin may have been applied
if p_bottom < 0
p_bottom < cursor - reference_bounds.top ? (move_cursor_to reference_bounds.top) : (move_down p_bottom)
else
p_bottom < cursor ? (move_down p_bottom) : reference_bounds.move_past_bottom
end
ensure
bounds.subtract_left_padding p_left
bounds.subtract_right_padding p_right
end
else
yield
end
end
def expand_indent_value value
(::Array === value ? (value.slice 0, 2) : (::Array.new 2, value)).map(&:to_f)
end
def expand_padding_value shorthand
unless (padding = (@side_area_shorthand_cache ||= {})[shorthand])
if ::Array === shorthand
case shorthand.size
when 1
padding = [shorthand[0], shorthand[0], shorthand[0], shorthand[0]]
when 2
padding = [shorthand[0], shorthand[1], shorthand[0], shorthand[1]]
when 3
padding = [shorthand[0], shorthand[1], shorthand[2], shorthand[1]]
when 4
padding = shorthand
else
padding = shorthand.slice 0, 4
end
else
padding = ::Array.new 4, (shorthand || 0)
end
@side_area_shorthand_cache[shorthand] = padding
end
padding.dup
end
alias expand_margin_value expand_padding_value
# Stretch the current bounds to the left and right edges of the current page
# while yielding the specified block if the verdict argument is true.
# Otherwise, simply yield the specified block.
#
def span_page_width_if verdict
if verdict
indent(-bounds_margin_left, -bounds_margin_right) do
yield
end
else
yield
end
end
# A flowing version of the bounding_box. If the content runs to another page, the cursor starts
# at the top of the page instead of the original cursor position. Similar to span, except
# you can specify an absolute left position and pass additional options through to bounding_box.
#
def flow_bounding_box left = 0, options = {}
original_y = y
# QUESTION: should preserving original_x be an option?
original_x = bounds.absolute_left - margin_box.absolute_left
canvas do
bounding_box [margin_box.absolute_left + original_x + left, margin_box.absolute_top], options do
self.y = original_y
yield
end
end
end
# Graphics
# Fills the current bounding box with the specified fill color. Before
# returning from this method, the original fill color on the document is
# restored.
def fill_bounds f_color = fill_color
if f_color && f_color != 'transparent'
prev_fill_color = fill_color
fill_color f_color
fill_rectangle bounds.top_left, bounds.width, bounds.height
fill_color prev_fill_color
end
end
# Fills the absolute bounding box with the specified fill color. Before
# returning from this method, the original fill color on the document is
# restored.
def fill_absolute_bounds f_color = fill_color
canvas { fill_bounds f_color }
end
# Fills the current bounds using the specified fill color and strokes the
# bounds using the specified stroke color. Sets the line with if specified
# in the options. Before returning from this method, the original fill
# color, stroke color and line width on the document are restored.
#
def fill_and_stroke_bounds f_color = fill_color, s_color = stroke_color, options = {}
no_fill = !f_color || f_color == 'transparent'
if ::Array === (s_width = options[:line_width] || 0.5)
s_width_max = s_width.map(&:to_i).max
radius = 0
else
radius = options[:radius] || 0
end
no_stroke = !s_color || s_color == 'transparent' || (s_width_max || s_width) == 0
return if no_fill && no_stroke
save_graphics_state do
# fill
unless no_fill
fill_color f_color
fill_rounded_rectangle bounds.top_left, bounds.width, bounds.height, radius
end
next if no_stroke
# stroke
if s_width_max
if (s_width_end = s_width[0] || 0) > 0
stroke_horizontal_rule s_color, line_width: s_width_end, line_style: options[:line_style]
stroke_horizontal_rule s_color, line_width: s_width_end, line_style: options[:line_style], at: bounds.height
end
if (s_width_side = s_width[1] || 0) > 0
stroke_vertical_rule s_color, line_width: s_width_side, line_style: options[:line_style]
stroke_vertical_rule s_color, line_width: s_width_side, line_style: options[:line_style], at: bounds.width
end
else
stroke_color s_color
case options[:line_style]
when :dashed
line_width s_width
dash s_width * 4
when :dotted
line_width s_width
dash s_width
when :double
single_line_width = s_width / 3.0
line_width single_line_width
inner_line_offset = single_line_width * 2
inner_top_left = [bounds.left + inner_line_offset, bounds.top - inner_line_offset]
stroke_rounded_rectangle bounds.top_left, bounds.width, bounds.height, radius
stroke_rounded_rectangle inner_top_left, bounds.width - (inner_line_offset * 2), bounds.height - (inner_line_offset * 2), radius
next
else # :solid
line_width s_width
end
stroke_rounded_rectangle bounds.top_left, bounds.width, bounds.height, radius
end
end
end
# Strokes a horizontal line using the current bounds. The width of the line
# can be specified using the line_width option. The offset from the cursor
# can be set using the at option.
#
def stroke_horizontal_rule rule_color = stroke_color, options = {}
rule_y = cursor - (options[:at] || 0)
rule_style = options[:line_style]
rule_width = options[:line_width] || 0.5
rule_x_start = bounds.left
rule_x_end = bounds.right
save_graphics_state do
stroke_color rule_color
case rule_style
when :dashed
line_width rule_width
dash rule_width * 4
when :dotted
line_width rule_width
dash rule_width
when :double
single_rule_width = rule_width / 3.0
line_width single_rule_width
stroke_horizontal_line rule_x_start, rule_x_end, at: (rule_y + single_rule_width)
stroke_horizontal_line rule_x_start, rule_x_end, at: (rule_y - single_rule_width)
next
else # :solid
line_width rule_width
end
stroke_horizontal_line rule_x_start, rule_x_end, at: rule_y
end
end
# A compliment to the stroke_horizontal_rule method, strokes a
# vertical line using the current bounds. The width of the line
# can be specified using the line_width option. The horizontal (x)
# position can be specified using the at option.
#
def stroke_vertical_rule rule_color = stroke_color, options = {}
rule_x = options[:at] || 0
rule_y_from = bounds.top
rule_y_to = bounds.bottom
rule_style = options[:line_style]
rule_width = options[:line_width] || 0.5
save_graphics_state do
line_width rule_width
stroke_color rule_color
case rule_style
when :dashed
dash rule_width * 4
when :dotted
dash rule_width
when :double
stroke_vertical_line rule_y_from, rule_y_to, at: (rule_x - rule_width)
rule_x += rule_width
end if rule_style
stroke_vertical_line rule_y_from, rule_y_to, at: rule_x
end
end
# Pages
# Deletes the current page and move the cursor
# to the previous page.
def delete_page
pg = page_number
pdf_store = state.store
content_id = page.content.identifier
# NOTE: cannot delete objects and IDs, otherwise references get corrupted; so just reset the value
(pdf_store.instance_variable_get :@objects)[content_id] = ::PDF::Core::Reference.new content_id, {}
pdf_store.pages.data[:Kids].pop
pdf_store.pages.data[:Count] -= 1
state.pages.pop
if pg > 1
go_to_page pg - 1
else
@page_number = 0
state.page = nil
end
end
# Import the specified page into the current document.
#
# By default, advance to the next page afterwards, creating it if necessary.
# This behavior can be disabled by passing the option `advance: false`.
# However, due to how page creation works in Prawn, understand that advancing
# to the next page is necessary to prevent the size & layout of the imported
# page from affecting a newly created page.
def import_page file, options = {}
prev_page_layout = page.layout
prev_page_size = page.size
state.compress = false if state.compress # can't use compression if using template
prev_text_rendering_mode = (defined? @text_rendering_mode) ? @text_rendering_mode : nil
delete_page if options[:replace]
# NOTE: use functionality provided by prawn-templates
start_new_page_discretely template: file, template_page: options[:page]
# prawn-templates sets text_rendering_mode to :unknown, which breaks running content; revert
@text_rendering_mode = prev_text_rendering_mode
if page.imported_page?
yield if block_given?
# NOTE: set page size & layout explicitly in case imported page differs
# I'm not sure it's right to start a new page here, but unfortunately there's no other
# way atm to prevent the size & layout of the imported page from affecting subsequent pages
advance_page size: prev_page_size, layout: prev_page_layout if options.fetch :advance, true
elsif options.fetch :advance_if_missing, true
delete_page
# NOTE: see previous comment
advance_page size: prev_page_size, layout: prev_page_layout
else
delete_page
end
nil
end
# Create a new page for the specified image.
#
# The image is positioned relative to the boundaries of the page.
def image_page file, options = {}
start_new_page_discretely
ex = nil
float do
canvas do
image file, ({ position: :center, vposition: :center }.merge options)
rescue
ex = $!
end
end
raise ex if ex
nil
end
# Perform an operation (such as creating a new page) without triggering the on_page_create callback
#
def perform_discretely
# equivalent to calling `on_page_create` with no arguments
saved_callback, state.on_page_create_callback = state.on_page_create_callback, nil
yield
ensure
# equivalent to calling `on_page_create &saved_callback`
state.on_page_create_callback = saved_callback
end
# This method is a smarter version of start_new_page. It calls start_new_page
# if the current page is the last page of the document. Otherwise, it simply
# advances to the next existing page.
def advance_page options = {}
last_page? ? (start_new_page options) : (go_to_page page_number + 1)
end
# Start a new page without triggering the on_page_create callback
#
def start_new_page_discretely options = {}
perform_discretely { start_new_page options }
end
# Grouping
def scratch
@scratch ||= if defined? @prototype
instance = Marshal.load Marshal.dump @prototype
instance.instance_variable_set :@prototype, @prototype
instance.instance_variable_set :@tmp_files, @tmp_files
instance
else
logger.warn 'no scratch prototype available; instantiating fresh scratch document'
::Prawn::Document.new
end
end
def scratch?
(@_label ||= (state.store.info.data[:Scratch] ? :scratch : :primary)) == :scratch
rescue
false # NOTE: this method may get called before the state is initialized
end
alias is_scratch? scratch?
def dry_run &block
scratch_pdf = scratch
# QUESTION: should we use scratch_pdf.advance_page instead?
scratch_pdf.start_new_page
start_page_number = scratch_pdf.page_number
start_y = scratch_pdf.y
scratch_bounds = scratch_pdf.bounds
original_x = scratch_bounds.absolute_left
original_width = scratch_bounds.width
scratch_bounds.instance_variable_set :@x, bounds.absolute_left
scratch_bounds.instance_variable_set :@width, bounds.width
prev_font_scale, scratch_pdf.font_scale = scratch_pdf.font_scale, font_scale
scratch_pdf.font font_family, style: font_style, size: font_size do
scratch_pdf.instance_exec(&block)
end
scratch_pdf.font_scale = prev_font_scale
# NOTE: don't count excess if cursor exceeds writable area (due to padding)
full_page_height = scratch_pdf.effective_page_height
partial_page_height = [full_page_height, start_y - scratch_pdf.y].min
scratch_bounds.instance_variable_set :@x, original_x
scratch_bounds.instance_variable_set :@width, original_width
whole_pages = scratch_pdf.page_number - start_page_number
[(whole_pages * full_page_height + partial_page_height), whole_pages, partial_page_height]
end
def with_dry_run &block
total_height, = dry_run(&block)
instance_exec total_height, &block
end
# Attempt to keep the objects generated in the block on the same page
#
# TODO: short-circuit nested usage
def keep_together &block
available_space = cursor
total_height, = dry_run(&block)
# NOTE: technically, if we're at the page top, we don't even need to do the
# dry run, except several uses of this method rely on the calculated height
if total_height > available_space && !at_page_top? && total_height <= effective_page_height
advance_page
started_new_page = true
else
started_new_page = false
end
# HACK: yield doesn't work here on JRuby (at least not when called from AsciidoctorJ)
#yield remainder, started_new_page
instance_exec total_height, started_new_page, &block
end
# Attempt to keep the objects generated in the block on the same page
# if the verdict parameter is true.
#
def keep_together_if verdict, &block
verdict ? keep_together(&block) : yield
end
end
end
end
remove unnecessary check for missing @prototype when instantiating scratch PDF
# frozen_string_literal: true
Prawn::Font::AFM.instance_variable_set :@hide_m17n_warning, true
require 'prawn/icon'
Prawn::Icon::Compatibility.send :prepend, (::Module.new { def warning *_args; end })
module Asciidoctor
module Prawn
module Extensions
include ::Asciidoctor::PDF::Measurements
include ::Asciidoctor::PDF::Sanitizer
include ::Asciidoctor::PDF::TextTransformer
FontAwesomeIconSets = %w(fab far fas)
IconSets = %w(fab far fas fi pf).to_set
IconSetPrefixes = IconSets.map {|it| it + '-' }
InitialPageContent = %(q\n)
(FontStyleToSet = {
bold: [:bold].to_set,
italic: [:italic].to_set,
bold_italic: [:bold, :italic].to_set,
}).default = ::Set.new
# NOTE: must use a visible char for placeholder or else Prawn won't reserve space for the fragment
PlaceholderChar = ?\u2063
# - :height is the height of a line
# - :leading is spacing between adjacent lines
# - :padding_top is half line spacing, plus any line_gap in the font
# - :padding_bottom is half line spacing
# - :final_gap determines whether a gap is added below the last line
LineMetrics = ::Struct.new :height, :leading, :padding_top, :padding_bottom, :final_gap
# Core
# Retrieves the catalog reference data for the PDF.
#
def catalog
state.store.root
end
# Retrieves the compatiblity version of the PDF.
#
def min_version
state.version
end
# Measurements
# Returns the width of the current page from edge-to-edge
#
def page_width
page.dimensions[2]
end
# Returns the height of the current page from edge-to-edge
#
def page_height
page.dimensions[3]
end
# Returns the effective (writable) height of the page
#
# If inside a fixed-height bounding box, returns height of box.
#
def effective_page_height
reference_bounds.height
end
# Returns the height of the content area of the page
#
def page_content_height
page_height - page_margin_top - page_margin_bottom
end
# remove once fixed upstream; see https://github.com/prawnpdf/prawn/pull/1122
def generate_margin_box
page_w, page_h = (page = state.page).dimensions.slice 2, 2
page_m = page.margins
prev_margin_box, @margin_box = @margin_box, (::Prawn::Document::BoundingBox.new self, nil, [page_m[:left], page_h - page_m[:top]], width: page_w - page_m[:left] - page_m[:right], height: page_h - page_m[:top] - page_m[:bottom])
# update bounding box if not flowing from the previous page
unless @bounding_box&.parent
prev_margin_box = @bounding_box
@bounding_box = @margin_box
end
# maintains indentation settings across page breaks
if prev_margin_box
@margin_box.add_left_padding prev_margin_box.total_left_padding
@margin_box.add_right_padding prev_margin_box.total_right_padding
end
nil
end
# Set the margins for the current page.
#
def set_page_margin margin
# FIXME: is there a cleaner way to set margins? does it make sense to override create_new_page?
apply_margin_options margin: margin
generate_margin_box
end
# Returns the margins for the current page as a 4 element array (top, right, bottom, left)
#
def page_margin
[page_margin_top, page_margin_right, page_margin_bottom, page_margin_left]
end
# Returns the width of the left margin for the current page
#
def page_margin_left
page.margins[:left]
end
# Returns the width of the right margin for the current page
#
def page_margin_right
page.margins[:right]
end
# Returns the width of the top margin for the current page
#
def page_margin_top
page.margins[:top]
end
# Returns the width of the bottom margin for the current page
#
def page_margin_bottom
page.margins[:bottom]
end
# Returns the total left margin (to the page edge) for the current bounds.
#
def bounds_margin_left
bounds.absolute_left
end
# Returns the total right margin (to the page edge) for the current bounds.
#
def bounds_margin_right
page.dimensions[2] - bounds.absolute_right
end
# Returns the side the current page is facing, :recto or :verso.
#
def page_side pgnum = nil, invert = nil
if invert
(recto_page? pgnum) ? :verso : :recto
else
(verso_page? pgnum) ? :verso : :recto
end
end
# Returns whether the page is a recto page.
#
def recto_page? pgnum = nil
(pgnum || page_number).odd?
end
# Returns whether the page is a verso page.
#
def verso_page? pgnum = nil
(pgnum || page_number).even?
end
# Returns whether the cursor is at the top of the page (i.e., margin box).
#
def at_page_top?
@y == @margin_box.absolute_top
end
# Returns whether the current page is the last page in the document.
#
def last_page?
page_number == page_count
end
# Destinations
# Generates a destination object that resolves to the top of the page
# specified by the page_num parameter or the current page if no page number
# is provided. The destination preserves the user's zoom level unlike
# the destinations generated by the outline builder.
#
def dest_top page_num = nil
dest_xyz 0, page_height, nil, (page_num ? state.pages[page_num - 1] : page)
end
# Fonts
# Registers a new custom font described in the data parameter
# after converting the font name to a String.
#
# Example:
#
# register_font Roboto: {
# normal: 'fonts/roboto-normal.ttf',
# italic: 'fonts/roboto-italic.ttf',
# bold: 'fonts/roboto-bold.ttf',
# bold_italic: 'fonts/roboto-bold_italic.ttf'
# }
#
def register_font data
font_families.update data.transform_keys(&:to_s)
end
# Enhances the built-in font method to allow the font
# size to be specified as the second option and to
# lazily load font-based icons.
#
def font name = nil, options = {}
if name
options = { size: options } if ::Numeric === options
if IconSets.include? name
::Prawn::Icon::FontData.load self, name
options = options.reject {|k| k == :style } if options.key? :style
end
end
super
end
# Retrieves the current font name (i.e., family).
#
def font_family
font.options[:family]
end
alias font_name font_family
# Retrieves the current font info (family, style, size) as a Hash
#
def font_info
{ family: font.options[:family], style: (font.options[:style] || :normal), size: @font_size }
end
# Set the font style on the document, if a style is given, otherwise return the current font style.
#
def font_style style = nil
if style
font font.options[:family], style: style
else
font.options[:style] || :normal
end
end
# Applies points as a scale factor of the current font if the value provided
# is less than or equal to 1 or it's a string (e.g., 1.1em), then delegates to the super
# implementation to carry out the built-in functionality.
#
#--
# QUESTION: should we round the result?
def font_size points = nil
return @font_size unless points
if ::String === points
if points.end_with? 'rem'
super @root_font_size * points.to_f
elsif points.end_with? 'em'
super @font_size * points.to_f
elsif points.end_with? '%'
super @font_size * (points.to_f / 100)
else
super points.to_f
end
# NOTE: assume em value (since a font size of 1 is extremely unlikely)
elsif points <= 1
super @font_size * points
else
super points
end
end
def resolve_font_style styles
if styles.include? :bold
(styles.include? :italic) ? :bold_italic : :bold
elsif styles.include? :italic
:italic
else
:normal
end
end
# Retreives the collection of font styles from the given font style key,
# which defaults to the current font style.
#
def font_styles style = font_style
FontStyleToSet[style].dup
end
# Apply the font settings (family, size, styles and character spacing) from
# the fragment to the document, then yield to the block.
#
# Used to arrange an inline image
def fragment_font fragment
f_info = font_info
f_family = fragment[:font] || f_info[:family]
f_size = fragment[:size] || f_info[:size]
if (f_styles = fragment[:styles])
f_style = resolve_font_style f_styles
else
f_style = :normal
end
# character_spacing logic not currently used
#if (c_spacing = fragment[:character_spacing])
# character_spacing c_spacing do
# font f_family, size: f_size, style: f_style do
# yield
# end
# end
#else
# font f_family, size: f_size, style: f_style do
# yield
# end
#end
font f_family, size: f_size, style: f_style do
yield
end
end
# Override width of string to check for placeholder char, which uses character spacing to control width
#
def width_of_string string, options = {}
string == PlaceholderChar ? @character_spacing : super
end
def icon_font_data family
::Prawn::Icon::FontData.load self, family
end
def resolve_legacy_icon_name name
::Prawn::Icon::Compatibility::SHIMS[%(fa-#{name})]
end
def calc_line_metrics line_height = 1, font = self.font, font_size = self.font_size
line_height_length = line_height * font_size
leading = line_height_length - font_size
half_leading = leading / 2
padding_top = half_leading + font.line_gap
padding_bottom = half_leading
LineMetrics.new line_height_length, leading, padding_top, padding_bottom, false
end
=begin
# these line metrics attempted to figure out a correction based on the reported height and the font_size
# however, it only works for some fonts, and breaks down for fonts like Noto Serif
def calc_line_metrics line_height = 1, font = self.font, font_size = self.font_size
line_height_length = font_size * line_height
line_gap = line_height_length - font_size
correction = font.height - font_size
leading = line_gap - correction
shift = (font.line_gap + correction + line_gap) / 2
final_gap = font.line_gap != 0
LineMetrics.new line_height_length, leading, shift, shift, final_gap
end
=end
# Parse the text into an array of fragments using the text formatter.
def parse_text string, options = {}
return [] if string.nil?
options = options.dup
if (format_option = options.delete :inline_format)
format_option = [] unless ::Array === format_option
fragments = text_formatter.format string, *format_option
else
fragments = [text: string]
end
if (color = options.delete :color)
fragments.map do |fragment|
fragment[:color] ? fragment : (fragment.merge color: color)
end
else
fragments
end
end
# NOTE: override built-in fill_formatted_text_box to insert leading before second line when :first_line is true
def fill_formatted_text_box text, options
merge_text_box_positioning_options options
box = ::Prawn::Text::Formatted::Box.new text, options
remaining_text = box.render
@no_text_printed = box.nothing_printed?
@all_text_printed = box.everything_printed?
if ((defined? @final_gap) && @final_gap) || (options[:first_line] && !(@no_text_printed || @all_text_printed))
self.y -= box.height + box.line_gap + box.leading
else
self.y -= box.height
end
remaining_text
end
# NOTE: override built-in draw_indented_formatted_line to set first_line flag
def draw_indented_formatted_line string, options
super string, (options.merge first_line: true)
end
# Performs the same work as Prawn::Text.text except that the first_line_options are applied to the first line of text
# renderered. It's necessary to use low-level APIs in this method so we only style the first line and not the
# remaining lines (which is the default behavior in Prawn).
def text_with_formatted_first_line string, first_line_options, options
color = options.delete :color
fragments = parse_text string, options
# NOTE: the low-level APIs we're using don't recognize the :styles option, so we must resolve
# NOTE: disabled until we have a need for it
#if (styles = options.delete :styles)
# options[:style] = resolve_font_style styles
#end
if (first_line_styles = first_line_options.delete :styles)
first_line_options[:style] = resolve_font_style first_line_styles
end
first_line_color = (first_line_options.delete :color) || color
options = options.merge document: self
# QUESTION: should we merge more carefully here? (hand-select keys?)
first_line_options = (options.merge first_line_options).merge single_line: true, first_line: true
box = ::Prawn::Text::Formatted::Box.new fragments, first_line_options
# NOTE: get remaining_fragments before we add color to fragments on first line
if (text_indent = options.delete :indent_paragraphs)
remaining_fragments = indent text_indent do
box.render dry_run: true
end
else
remaining_fragments = box.render dry_run: true
end
# NOTE: color must be applied per-fragment
fragments.each {|fragment| fragment[:color] ||= first_line_color }
if text_indent
indent text_indent do
fill_formatted_text_box fragments, first_line_options
end
else
fill_formatted_text_box fragments, first_line_options
end
unless remaining_fragments.empty?
# NOTE: color must be applied per-fragment
remaining_fragments.each {|fragment| fragment[:color] ||= color }
remaining_fragments = fill_formatted_text_box remaining_fragments, options
draw_remaining_formatted_text_on_new_pages remaining_fragments, options
end
end
# Apply the text transform to the specified text.
#
# Supported transform values are "uppercase", "lowercase", or "none" (passed
# as either a String or a Symbol). When the uppercase transform is applied to
# the text, it correctly uppercases visible text while leaving markup and
# named character entities unchanged. The none transform returns the text
# unmodified.
#
def transform_text text, transform
case transform
when :uppercase, 'uppercase'
uppercase_pcdata text
when :lowercase, 'lowercase'
lowercase_pcdata text
when :capitalize, 'capitalize'
capitalize_words_pcdata text
else
text
end
end
def hyphenate_text text, hyphenator
hyphenate_words_pcdata text, hyphenator
end
# Cursor
# Short-circuits the call to the built-in move_up operation
# when n is 0.
#
def move_up n
super unless n == 0
end
# Override built-in move_text_position method to prevent Prawn from advancing
# to next page if image doesn't fit before rendering image.
#--
# NOTE: could use :at option when calling image/embed_image instead
def move_text_position h; end
# Short-circuits the call to the built-in move_down operation
# when n is 0.
#
def move_down n
super unless n == 0
end
# Bounds
# Augments the built-in pad method by adding support for specifying padding on all four sizes.
#
# Padding may be specified as an array of four values, or as a single value.
# The single value is used as the padding around all four sides of the box.
#
# If padding is nil, this method simply yields to the block and returns.
#
# Example:
#
# pad_box 20 do
# text 'A paragraph inside a blox with even padding on all sides.'
# end
#
# pad_box [10, 10, 10, 20] do
# text 'An indented paragraph inside a box with equal padding on all sides.'
# end
#
def pad_box padding
if padding
# TODO: implement shorthand combinations like in CSS
p_top, p_right, p_bottom, p_left = ::Array === padding ? padding : (::Array.new 4, padding)
begin
# logic is intentionally inlined
move_down p_top
bounds.add_left_padding p_left
bounds.add_right_padding p_right
yield
# NOTE: support negative bottom padding to shave bottom margin of last child
# NOTE: this doesn't work well at a page boundary since not all of the bottom margin may have been applied
if p_bottom < 0
p_bottom < cursor - reference_bounds.top ? (move_cursor_to reference_bounds.top) : (move_down p_bottom)
else
p_bottom < cursor ? (move_down p_bottom) : reference_bounds.move_past_bottom
end
ensure
bounds.subtract_left_padding p_left
bounds.subtract_right_padding p_right
end
else
yield
end
end
def expand_indent_value value
(::Array === value ? (value.slice 0, 2) : (::Array.new 2, value)).map(&:to_f)
end
def expand_padding_value shorthand
unless (padding = (@side_area_shorthand_cache ||= {})[shorthand])
if ::Array === shorthand
case shorthand.size
when 1
padding = [shorthand[0], shorthand[0], shorthand[0], shorthand[0]]
when 2
padding = [shorthand[0], shorthand[1], shorthand[0], shorthand[1]]
when 3
padding = [shorthand[0], shorthand[1], shorthand[2], shorthand[1]]
when 4
padding = shorthand
else
padding = shorthand.slice 0, 4
end
else
padding = ::Array.new 4, (shorthand || 0)
end
@side_area_shorthand_cache[shorthand] = padding
end
padding.dup
end
alias expand_margin_value expand_padding_value
# Stretch the current bounds to the left and right edges of the current page
# while yielding the specified block if the verdict argument is true.
# Otherwise, simply yield the specified block.
#
def span_page_width_if verdict
if verdict
indent(-bounds_margin_left, -bounds_margin_right) do
yield
end
else
yield
end
end
# A flowing version of the bounding_box. If the content runs to another page, the cursor starts
# at the top of the page instead of the original cursor position. Similar to span, except
# you can specify an absolute left position and pass additional options through to bounding_box.
#
def flow_bounding_box left = 0, options = {}
original_y = y
# QUESTION: should preserving original_x be an option?
original_x = bounds.absolute_left - margin_box.absolute_left
canvas do
bounding_box [margin_box.absolute_left + original_x + left, margin_box.absolute_top], options do
self.y = original_y
yield
end
end
end
# Graphics
# Fills the current bounding box with the specified fill color. Before
# returning from this method, the original fill color on the document is
# restored.
def fill_bounds f_color = fill_color
if f_color && f_color != 'transparent'
prev_fill_color = fill_color
fill_color f_color
fill_rectangle bounds.top_left, bounds.width, bounds.height
fill_color prev_fill_color
end
end
# Fills the absolute bounding box with the specified fill color. Before
# returning from this method, the original fill color on the document is
# restored.
def fill_absolute_bounds f_color = fill_color
canvas { fill_bounds f_color }
end
# Fills the current bounds using the specified fill color and strokes the
# bounds using the specified stroke color. Sets the line with if specified
# in the options. Before returning from this method, the original fill
# color, stroke color and line width on the document are restored.
#
def fill_and_stroke_bounds f_color = fill_color, s_color = stroke_color, options = {}
no_fill = !f_color || f_color == 'transparent'
if ::Array === (s_width = options[:line_width] || 0.5)
s_width_max = s_width.map(&:to_i).max
radius = 0
else
radius = options[:radius] || 0
end
no_stroke = !s_color || s_color == 'transparent' || (s_width_max || s_width) == 0
return if no_fill && no_stroke
save_graphics_state do
# fill
unless no_fill
fill_color f_color
fill_rounded_rectangle bounds.top_left, bounds.width, bounds.height, radius
end
next if no_stroke
# stroke
if s_width_max
if (s_width_end = s_width[0] || 0) > 0
stroke_horizontal_rule s_color, line_width: s_width_end, line_style: options[:line_style]
stroke_horizontal_rule s_color, line_width: s_width_end, line_style: options[:line_style], at: bounds.height
end
if (s_width_side = s_width[1] || 0) > 0
stroke_vertical_rule s_color, line_width: s_width_side, line_style: options[:line_style]
stroke_vertical_rule s_color, line_width: s_width_side, line_style: options[:line_style], at: bounds.width
end
else
stroke_color s_color
case options[:line_style]
when :dashed
line_width s_width
dash s_width * 4
when :dotted
line_width s_width
dash s_width
when :double
single_line_width = s_width / 3.0
line_width single_line_width
inner_line_offset = single_line_width * 2
inner_top_left = [bounds.left + inner_line_offset, bounds.top - inner_line_offset]
stroke_rounded_rectangle bounds.top_left, bounds.width, bounds.height, radius
stroke_rounded_rectangle inner_top_left, bounds.width - (inner_line_offset * 2), bounds.height - (inner_line_offset * 2), radius
next
else # :solid
line_width s_width
end
stroke_rounded_rectangle bounds.top_left, bounds.width, bounds.height, radius
end
end
end
# Strokes a horizontal line using the current bounds. The width of the line
# can be specified using the line_width option. The offset from the cursor
# can be set using the at option.
#
def stroke_horizontal_rule rule_color = stroke_color, options = {}
rule_y = cursor - (options[:at] || 0)
rule_style = options[:line_style]
rule_width = options[:line_width] || 0.5
rule_x_start = bounds.left
rule_x_end = bounds.right
save_graphics_state do
stroke_color rule_color
case rule_style
when :dashed
line_width rule_width
dash rule_width * 4
when :dotted
line_width rule_width
dash rule_width
when :double
single_rule_width = rule_width / 3.0
line_width single_rule_width
stroke_horizontal_line rule_x_start, rule_x_end, at: (rule_y + single_rule_width)
stroke_horizontal_line rule_x_start, rule_x_end, at: (rule_y - single_rule_width)
next
else # :solid
line_width rule_width
end
stroke_horizontal_line rule_x_start, rule_x_end, at: rule_y
end
end
# A compliment to the stroke_horizontal_rule method, strokes a
# vertical line using the current bounds. The width of the line
# can be specified using the line_width option. The horizontal (x)
# position can be specified using the at option.
#
def stroke_vertical_rule rule_color = stroke_color, options = {}
rule_x = options[:at] || 0
rule_y_from = bounds.top
rule_y_to = bounds.bottom
rule_style = options[:line_style]
rule_width = options[:line_width] || 0.5
save_graphics_state do
line_width rule_width
stroke_color rule_color
case rule_style
when :dashed
dash rule_width * 4
when :dotted
dash rule_width
when :double
stroke_vertical_line rule_y_from, rule_y_to, at: (rule_x - rule_width)
rule_x += rule_width
end if rule_style
stroke_vertical_line rule_y_from, rule_y_to, at: rule_x
end
end
# Pages
# Deletes the current page and move the cursor
# to the previous page.
def delete_page
pg = page_number
pdf_store = state.store
content_id = page.content.identifier
# NOTE: cannot delete objects and IDs, otherwise references get corrupted; so just reset the value
(pdf_store.instance_variable_get :@objects)[content_id] = ::PDF::Core::Reference.new content_id, {}
pdf_store.pages.data[:Kids].pop
pdf_store.pages.data[:Count] -= 1
state.pages.pop
if pg > 1
go_to_page pg - 1
else
@page_number = 0
state.page = nil
end
end
# Import the specified page into the current document.
#
# By default, advance to the next page afterwards, creating it if necessary.
# This behavior can be disabled by passing the option `advance: false`.
# However, due to how page creation works in Prawn, understand that advancing
# to the next page is necessary to prevent the size & layout of the imported
# page from affecting a newly created page.
def import_page file, options = {}
prev_page_layout = page.layout
prev_page_size = page.size
state.compress = false if state.compress # can't use compression if using template
prev_text_rendering_mode = (defined? @text_rendering_mode) ? @text_rendering_mode : nil
delete_page if options[:replace]
# NOTE: use functionality provided by prawn-templates
start_new_page_discretely template: file, template_page: options[:page]
# prawn-templates sets text_rendering_mode to :unknown, which breaks running content; revert
@text_rendering_mode = prev_text_rendering_mode
if page.imported_page?
yield if block_given?
# NOTE: set page size & layout explicitly in case imported page differs
# I'm not sure it's right to start a new page here, but unfortunately there's no other
# way atm to prevent the size & layout of the imported page from affecting subsequent pages
advance_page size: prev_page_size, layout: prev_page_layout if options.fetch :advance, true
elsif options.fetch :advance_if_missing, true
delete_page
# NOTE: see previous comment
advance_page size: prev_page_size, layout: prev_page_layout
else
delete_page
end
nil
end
# Create a new page for the specified image.
#
# The image is positioned relative to the boundaries of the page.
def image_page file, options = {}
start_new_page_discretely
ex = nil
float do
canvas do
image file, ({ position: :center, vposition: :center }.merge options)
rescue
ex = $!
end
end
raise ex if ex
nil
end
# Perform an operation (such as creating a new page) without triggering the on_page_create callback
#
def perform_discretely
# equivalent to calling `on_page_create` with no arguments
saved_callback, state.on_page_create_callback = state.on_page_create_callback, nil
yield
ensure
# equivalent to calling `on_page_create &saved_callback`
state.on_page_create_callback = saved_callback
end
# This method is a smarter version of start_new_page. It calls start_new_page
# if the current page is the last page of the document. Otherwise, it simply
# advances to the next existing page.
def advance_page options = {}
last_page? ? (start_new_page options) : (go_to_page page_number + 1)
end
# Start a new page without triggering the on_page_create callback
#
def start_new_page_discretely options = {}
perform_discretely { start_new_page options }
end
# Grouping
def scratch
@scratch ||= begin
instance = Marshal.load Marshal.dump @prototype
instance.instance_variable_set :@prototype, @prototype
instance.instance_variable_set :@tmp_files, @tmp_files
instance
end
end
def scratch?
(@_label ||= (state.store.info.data[:Scratch] ? :scratch : :primary)) == :scratch
rescue
false # NOTE: this method may get called before the state is initialized
end
alias is_scratch? scratch?
def dry_run &block
scratch_pdf = scratch
# QUESTION: should we use scratch_pdf.advance_page instead?
scratch_pdf.start_new_page
start_page_number = scratch_pdf.page_number
start_y = scratch_pdf.y
scratch_bounds = scratch_pdf.bounds
original_x = scratch_bounds.absolute_left
original_width = scratch_bounds.width
scratch_bounds.instance_variable_set :@x, bounds.absolute_left
scratch_bounds.instance_variable_set :@width, bounds.width
prev_font_scale, scratch_pdf.font_scale = scratch_pdf.font_scale, font_scale
scratch_pdf.font font_family, style: font_style, size: font_size do
scratch_pdf.instance_exec(&block)
end
scratch_pdf.font_scale = prev_font_scale
# NOTE: don't count excess if cursor exceeds writable area (due to padding)
full_page_height = scratch_pdf.effective_page_height
partial_page_height = [full_page_height, start_y - scratch_pdf.y].min
scratch_bounds.instance_variable_set :@x, original_x
scratch_bounds.instance_variable_set :@width, original_width
whole_pages = scratch_pdf.page_number - start_page_number
[(whole_pages * full_page_height + partial_page_height), whole_pages, partial_page_height]
end
def with_dry_run &block
total_height, = dry_run(&block)
instance_exec total_height, &block
end
# Attempt to keep the objects generated in the block on the same page
#
# TODO: short-circuit nested usage
def keep_together &block
available_space = cursor
total_height, = dry_run(&block)
# NOTE: technically, if we're at the page top, we don't even need to do the
# dry run, except several uses of this method rely on the calculated height
if total_height > available_space && !at_page_top? && total_height <= effective_page_height
advance_page
started_new_page = true
else
started_new_page = false
end
# HACK: yield doesn't work here on JRuby (at least not when called from AsciidoctorJ)
#yield remainder, started_new_page
instance_exec total_height, started_new_page, &block
end
# Attempt to keep the objects generated in the block on the same page
# if the verdict parameter is true.
#
def keep_together_if verdict, &block
verdict ? keep_together(&block) : yield
end
end
end
end
|
require "bosh/workspace"
module Bosh::Cli::Command
class ProjectDeployment < Base
include Bosh::Cli::Validation
include Bosh::Workspace
include ProjectDeploymentHelper
# Hack to unregister original deployment command
Bosh::Cli::Config.instance_eval("@commands.delete('deployment')")
usage "deployment"
desc "Get/set current deployment"
def set_current(filename = nil)
unless filename.nil?
deployment = find_deployment(filename)
if project_deployment_file?(deployment)
self.project_deployment = deployment
validate_project_deployment
filename = project_deployment.merged_file
create_placeholder_deployment unless File.exists?(filename)
end
end
deployment_cmd(options).set_current(filename)
end
# Hack to unregister original deploy command
Bosh::Cli::Config.instance_eval("@commands.delete('deploy')")
usage "deploy"
desc "Deploy according to the currently selected deployment manifest"
option "--recreate", "recreate all VMs in deployment"
def deploy
if project_deployment?
require_project_deployment
build_project_deployment
end
command = deployment_cmd(options)
command.perform
@exit_code = command.exit_code
end
private
def deployment_cmd(options = {})
Bosh::Cli::Command::Deployment.new.tap do |cmd|
options.each { |k, v| cmd.add_option k.to_sym, v }
end
end
end
end
Allows use of `--no-redact` flag
Signed-off-by: Gary Liu <e2834181ebd96d7544bc16fb46591d272d6ed785@pivotal.io>
require "bosh/workspace"
module Bosh::Cli::Command
class ProjectDeployment < Base
include Bosh::Cli::Validation
include Bosh::Workspace
include ProjectDeploymentHelper
# Hack to unregister original deployment command
Bosh::Cli::Config.instance_eval("@commands.delete('deployment')")
usage "deployment"
desc "Get/set current deployment"
def set_current(filename = nil)
unless filename.nil?
deployment = find_deployment(filename)
if project_deployment_file?(deployment)
self.project_deployment = deployment
validate_project_deployment
filename = project_deployment.merged_file
create_placeholder_deployment unless File.exists?(filename)
end
end
deployment_cmd(options).set_current(filename)
end
# Hack to unregister original deploy command
Bosh::Cli::Config.instance_eval("@commands.delete('deploy')")
usage "deploy"
desc "Deploy according to the currently selected deployment manifest"
option "--recreate", "recreate all VMs in deployment"
option "--no-redact", "redact manifest value chanes in deployment"
def deploy
if project_deployment?
require_project_deployment
build_project_deployment
end
command = deployment_cmd(options)
command.perform
@exit_code = command.exit_code
end
private
def deployment_cmd(options = {})
Bosh::Cli::Command::Deployment.new.tap do |cmd|
options.each { |k, v| cmd.add_option k.to_sym, v }
end
end
end
end
|
# Copyright (C) 2017 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
module Canvas
class DynamicSettings
# A class for reading values from Consul
#
# @attr prefix [String] The prefix to be prepended to keys for querying.
class PrefixProxy
DEFAULT_TTL = 5.minutes
# The TTL for cached values if none is specified in the constructor
attr_reader :prefix, :tree, :service, :environment, :cluster
# Build a new prefix proxy
#
# @param prefix [String] The prefix to be prepended to keys for querying.
# @param tree [String] Which tree to use (config, private, store)
# @param service [String] The service name to use (i.e. who owns the configuration). Defaults to canvas
# @param environment [String] An optional environment to look for so that multiple Canvas environments can share Consul
# @param cluster [String] An optional cluster to override region or global settings
# @param default_ttl [ActiveSupport::Duration] The TTL to use for cached
# values when not specified to the fetch methods.
# @param kv_client [Imperium::KV] The client to use for connecting to
# Consul, defaults to Imperium::KV.default_client
def initialize(prefix = nil,
tree: :config,
service: :canvas,
environment: nil,
cluster: nil,
default_ttl: DEFAULT_TTL,
kv_client: Imperium::KV.default_client,
data_center: nil)
@prefix = prefix
@tree = tree
@service = service
@environment = environment
@cluster = cluster
@default_ttl = default_ttl
@kv_client = kv_client
@data_center = data_center
end
# Fetch the value at the requested key using the prefix passed to the
# initializer.
#
# This method is intended to retreive a single key from the keyspace and
# will not work for getting multiple values in a hash from the store. If
# you need to access values nested deeper in the keyspace use #for_prefix
# to move deeper in the nesting.
#
# @param key [String, Symbol] The key to fetch
# @param ttl [ActiveSupport::Duration] The TTL for the value in the cache,
# defaults to value supplied to the constructor.
# @return [String]
# @return [nil] When no value was found
def fetch(key, ttl: @default_ttl)
keys = [
full_key(key),
[tree, service, environment, prefix, key].compact.join("/"),
].uniq
fallback_keys = [
[tree, service, prefix, key].compact.join("/"),
full_key(key, global: true),
["global", tree, service, prefix, key].compact.join("/"),
].uniq - keys
# try to get the local cache first right away
keys.each do |full_key|
result = LocalCache.fetch(CACHE_KEY_PREFIX + full_key)
return result if result
end
# okay now pre-cache an entire tree
tree_key = [tree, service, environment].compact.join("/")
LocalCache.fetch(CACHE_KEY_PREFIX + tree_key + '/', expires_in: ttl) do
result = @kv_client.get(tree_key, :recurse, :stale)
if result&.status == 200
populate_cache(tree_key, result.values, ttl) # only populate recursively when we missed
result.values
end
end
keys.each do |full_key|
# these keys will have been populated (or not!) above; don't
# actually try to fetch them
result = LocalCache.fetch(CACHE_KEY_PREFIX + full_key)
return result if result
end
fallback_keys.each do |full_key|
result = LocalCache.fetch(CACHE_KEY_PREFIX + full_key, expires_in: ttl) do
result = @kv_client.get(full_key, :stale)
result.values if result&.status == 200
end
return result if result
end
nil
rescue Imperium::TimeoutError => exception
LocalCache.fetch_without_expiration(CACHE_KEY_PREFIX + keys.first).tap do |val|
if val
Canvas::Errors.capture_exception(:consul, exception)
val
else
raise
end
end
end
alias [] fetch
# Extend the prefix from this instance returning a new one.
#
# @param prefix_extension [String]
# @param default_ttl [ActiveSupport::Duration] The default TTL to use when
# fetching keys from the extended keyspace, defaults to the same value as
# the receiver
# @return [ProxyPrefix]
def for_prefix(prefix_extension, default_ttl: @default_ttl)
self.class.new(
"#{@prefix}/#{prefix_extension}",
tree: tree,
service: service,
environment: environment,
cluster: cluster,
default_ttl: default_ttl,
kv_client: @kv_client,
data_center: @data_center
)
end
# Set multiple key value pairs
#
# @param kvs [Hash] Key value pairs where the hash key is the key
# and the hash value is the value
# @param global [boolean] Is it a global key?
# @return [Imperium::TransactionResponse]
def set_keys(kvs, global: false)
opts = @data_center.present? && global ? { dc: @data_center } : {}
@kv_client.transaction(opts) do |tx|
kvs.each { |k, v| tx.set(full_key(k, global: global), v) }
end
end
private
# Returns the full key
#
# @param key [String, Symbol] The key
# @param global [boolean] Is it a global key?
# @return [String] Full key
def full_key(key, global: false)
key_array = [tree, service, environment]
if global
key_array.prepend('global')
else
key_array << cluster
end
key_array.concat([prefix, key]).compact.join("/")
end
def populate_cache(prefix, subtree, ttl)
if subtree.is_a?(Hash)
subtree.each do |(k, v)|
populate_cache("#{prefix}/#{k}", v, ttl)
end
else
LocalCache.write(CACHE_KEY_PREFIX + prefix, subtree, expires_in: ttl)
end
end
end
end
end
skip saving the full setting tree since we're not using it
Change-Id: I6ae8ce273680fe6695b48bf42108abb02383a404
Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/233286
Tested-by: Service Cloud Jenkins <9144042a601061f88f1e1d7a1753ea3e2972119d@instructure.com>
Reviewed-by: Cody Cutrer <c4fe2b1d90ef8f2548c8aeabfa633434316f0305@instructure.com>
QA-Review: Cody Cutrer <c4fe2b1d90ef8f2548c8aeabfa633434316f0305@instructure.com>
Product-Review: Cody Cutrer <c4fe2b1d90ef8f2548c8aeabfa633434316f0305@instructure.com>
# Copyright (C) 2017 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
module Canvas
class DynamicSettings
# A class for reading values from Consul
#
# @attr prefix [String] The prefix to be prepended to keys for querying.
class PrefixProxy
DEFAULT_TTL = 5.minutes
# The TTL for cached values if none is specified in the constructor
attr_reader :prefix, :tree, :service, :environment, :cluster
# Build a new prefix proxy
#
# @param prefix [String] The prefix to be prepended to keys for querying.
# @param tree [String] Which tree to use (config, private, store)
# @param service [String] The service name to use (i.e. who owns the configuration). Defaults to canvas
# @param environment [String] An optional environment to look for so that multiple Canvas environments can share Consul
# @param cluster [String] An optional cluster to override region or global settings
# @param default_ttl [ActiveSupport::Duration] The TTL to use for cached
# values when not specified to the fetch methods.
# @param kv_client [Imperium::KV] The client to use for connecting to
# Consul, defaults to Imperium::KV.default_client
def initialize(prefix = nil,
tree: :config,
service: :canvas,
environment: nil,
cluster: nil,
default_ttl: DEFAULT_TTL,
kv_client: Imperium::KV.default_client,
data_center: nil)
@prefix = prefix
@tree = tree
@service = service
@environment = environment
@cluster = cluster
@default_ttl = default_ttl
@kv_client = kv_client
@data_center = data_center
end
# Fetch the value at the requested key using the prefix passed to the
# initializer.
#
# This method is intended to retreive a single key from the keyspace and
# will not work for getting multiple values in a hash from the store. If
# you need to access values nested deeper in the keyspace use #for_prefix
# to move deeper in the nesting.
#
# @param key [String, Symbol] The key to fetch
# @param ttl [ActiveSupport::Duration] The TTL for the value in the cache,
# defaults to value supplied to the constructor.
# @return [String]
# @return [nil] When no value was found
def fetch(key, ttl: @default_ttl)
keys = [
full_key(key),
[tree, service, environment, prefix, key].compact.join("/"),
].uniq
fallback_keys = [
[tree, service, prefix, key].compact.join("/"),
full_key(key, global: true),
["global", tree, service, prefix, key].compact.join("/"),
].uniq - keys
# try to get the local cache first right away
keys.each do |full_key|
result = LocalCache.fetch(CACHE_KEY_PREFIX + full_key)
return result if result
end
# okay now pre-cache an entire tree
tree_key = [tree, service, environment].compact.join("/")
LocalCache.fetch(CACHE_KEY_PREFIX + tree_key + '/', expires_in: ttl) do
result = @kv_client.get(tree_key, :recurse, :stale)
if result&.status == 200
populate_cache(tree_key, result.values, ttl) # only populate recursively when we missed
true # we don't actually need to save the values in the cache anymore if we're not using them
end
end
keys.each do |full_key|
# these keys will have been populated (or not!) above; don't
# actually try to fetch them
result = LocalCache.fetch(CACHE_KEY_PREFIX + full_key)
return result if result
end
fallback_keys.each do |full_key|
result = LocalCache.fetch(CACHE_KEY_PREFIX + full_key, expires_in: ttl) do
result = @kv_client.get(full_key, :stale)
result.values if result&.status == 200
end
return result if result
end
nil
rescue Imperium::TimeoutError => exception
LocalCache.fetch_without_expiration(CACHE_KEY_PREFIX + keys.first).tap do |val|
if val
Canvas::Errors.capture_exception(:consul, exception)
val
else
raise
end
end
end
alias [] fetch
# Extend the prefix from this instance returning a new one.
#
# @param prefix_extension [String]
# @param default_ttl [ActiveSupport::Duration] The default TTL to use when
# fetching keys from the extended keyspace, defaults to the same value as
# the receiver
# @return [ProxyPrefix]
def for_prefix(prefix_extension, default_ttl: @default_ttl)
self.class.new(
"#{@prefix}/#{prefix_extension}",
tree: tree,
service: service,
environment: environment,
cluster: cluster,
default_ttl: default_ttl,
kv_client: @kv_client,
data_center: @data_center
)
end
# Set multiple key value pairs
#
# @param kvs [Hash] Key value pairs where the hash key is the key
# and the hash value is the value
# @param global [boolean] Is it a global key?
# @return [Imperium::TransactionResponse]
def set_keys(kvs, global: false)
opts = @data_center.present? && global ? { dc: @data_center } : {}
@kv_client.transaction(opts) do |tx|
kvs.each { |k, v| tx.set(full_key(k, global: global), v) }
end
end
private
# Returns the full key
#
# @param key [String, Symbol] The key
# @param global [boolean] Is it a global key?
# @return [String] Full key
def full_key(key, global: false)
key_array = [tree, service, environment]
if global
key_array.prepend('global')
else
key_array << cluster
end
key_array.concat([prefix, key]).compact.join("/")
end
def populate_cache(prefix, subtree, ttl)
if subtree.is_a?(Hash)
subtree.each do |(k, v)|
populate_cache("#{prefix}/#{k}", v, ttl)
end
else
LocalCache.write(CACHE_KEY_PREFIX + prefix, subtree, expires_in: ttl)
end
end
end
end
end
|
#
# Author:: Xabier de Zuazo (<xabier@onddo.com>)
# Copyright:: Copyright (c) 2014 Onddo Labs, SL. (www.onddo.com)
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/mixin/params_validate'
require 'chef/encrypted_attribute/search_helper'
require 'chef/encrypted_attribute/cache_lru'
class Chef
class EncryptedAttribute
# Remote Node object to read and save its attributes
class RemoteNode
include ::Chef::Mixin::ParamsValidate
include ::Chef::EncryptedAttribute::SearchHelper
def initialize(name)
name(name)
end
def self.cache
# disabled by default
@@cache ||= Chef::EncryptedAttribute::CacheLru.new(0)
end
def name(arg = nil)
set_or_return(
:name,
arg,
kind_of: String
)
end
def load_attribute(attr_ary, partial_search = true)
unless attr_ary.is_a?(Array)
fail ArgumentError,
"#{self.class}##{__method__} attr_ary argument must be an "\
"array of strings. You passed #{attr_ary.inspect}."
end
cache_key = cache_key(name, attr_ary)
if self.class.cache.key?(cache_key)
self.class.cache[cache_key]
else
keys = { 'value' => attr_ary }
res = search(:node, "name:#{@name}", keys, 1, partial_search)
self.class.cache[cache_key] =
if res.is_a?(Array) && res[0].is_a?(Hash) && res[0].key?('value')
res[0]['value']
else
nil
end
end
end
def save_attribute(attr_ary, value)
unless attr_ary.is_a?(Array)
fail ArgumentError,
"#{self.class}##{__method__} attr_ary argument must be an "\
"array of strings. You passed #{attr_ary.inspect}."
end
cache_key = cache_key(name, attr_ary)
node = Chef::Node.load(name)
last = attr_ary.pop
node_attr = attr_ary.reduce(node.normal) do |a, k|
a[k] = Mash.new unless a.key?(k)
a[k]
end
node_attr[last] = value
node.save
self.class.cache[cache_key] = value
end
def delete_attribute(attr_ary)
unless attr_ary.is_a?(Array)
fail ArgumentError,
"#{self.class}##{__method__} attr_ary argument must be an "\
"array of strings. You passed #{attr_ary.inspect}."
end
cache_key = cache_key(name, attr_ary)
node = Chef::Node.load(name)
last = attr_ary.pop
node_attr = attr_ary.reduce(node.normal) do |a, k|
a.respond_to?(:key?) && a.key?(k) ? a[k] : nil
end
if node_attr.respond_to?(:key?) && node_attr.key?(last)
node_attr.delete(last)
node.save
self.class.cache.delete(cache_key)
true
else
false
end
end
protected
def cache_key(name, attr_ary)
"#{name}:#{attr_ary.inspect}" # TODO: ok, this can be improved
end
end
end
end
RemoteNode class refactor
#
# Author:: Xabier de Zuazo (<xabier@onddo.com>)
# Copyright:: Copyright (c) 2014 Onddo Labs, SL. (www.onddo.com)
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/mixin/params_validate'
require 'chef/encrypted_attribute/search_helper'
require 'chef/encrypted_attribute/cache_lru'
class Chef
class EncryptedAttribute
# Remote Node object to read and save its attributes
class RemoteNode
include ::Chef::Mixin::ParamsValidate
include ::Chef::EncryptedAttribute::SearchHelper
def initialize(name)
name(name)
end
def self.cache
# disabled by default
@@cache ||= Chef::EncryptedAttribute::CacheLru.new(0)
end
def name(arg = nil)
set_or_return(
:name,
arg,
kind_of: String
)
end
def load_attribute(attr_ary, partial_search = true)
assert_attribute_array(attr_ary)
cache_key = cache_key(name, attr_ary)
return self.class.cache[cache_key] if self.class.cache.key?(cache_key)
keys = { 'value' => attr_ary }
res = search(:node, "name:#{@name}", keys, 1, partial_search)
self.class.cache[cache_key] =
if res.is_a?(Array) && res[0].is_a?(Hash) && res[0].key?('value')
res[0]['value']
else
nil
end
end
def save_attribute(attr_ary, value)
assert_attribute_array(attr_ary)
cache_key = cache_key(name, attr_ary)
node = Chef::Node.load(name)
last = attr_ary.pop
node_attr = attr_ary.reduce(node.normal) do |a, k|
a[k] = Mash.new unless a.key?(k)
a[k]
end
node_attr[last] = value
node.save
self.class.cache[cache_key] = value
end
def delete_attribute(attr_ary)
assert_attribute_array(attr_ary)
cache_key = cache_key(name, attr_ary)
node = Chef::Node.load(name)
last = attr_ary.pop
node_attr = attr_ary.reduce(node.normal) do |a, k|
a.respond_to?(:key?) && a.key?(k) ? a[k] : nil
end
if node_attr.respond_to?(:key?) && node_attr.key?(last)
node_attr.delete(last)
node.save
self.class.cache.delete(cache_key)
true
else
false
end
end
protected
def cache_key(name, attr_ary)
"#{name}:#{attr_ary.inspect}" # TODO: ok, this can be improved
end
def assert_attribute_array(attr_ary)
unless attr_ary.is_a?(Array)
fail ArgumentError,
"#{self.class}##{__method__} attr_ary argument must be an "\
"array of strings. You passed #{attr_ary.inspect}."
end
end
end
end
end
|
from learn ruby the hardware
require 'open-uri'
WORD_URL = "http://learncodethehardway.org/words.txt"
WORDS = []
PHRASES = {
"class ### < ###\nend" =>
"Make a class named ## that is-a ###.",
"class ###\n\tdef initialize(@@@)\n\tend\nend" =>
"class ## has-a initialize that takes @@@ parameters.",
"class ##\n\tdef ***(@@@)\n\tend\nend" =>
"class ### has-a function named *** thats takes @@@ parameters.",
"*** = ##.new()" =>
"Set *** to an instance of class ###.",
"***.***(@@@)" =>
"From *** get the *** function, and call it with parameters @@@.",
"***.*** = '***'" =>
"From *** get the *** attribute and set it to '***'."
}
PHRASE_FIRST = ARGV[0] == "english"
open(WORD_URL) {|f|
f.each_line {|word| WORDS.push(word.chomp)}
}
def craft_names(rand_words, snippet, pattern, caps=false)
names = snippet.scan(pattern).map do
word = rand_words.pop()
caps ? word.capitalize : word
end
return names * 2
end
def craft_params(rand_words, snippet, pattern)
names = (0..snippet.scan(pattern).length).map do
param_count = rand(3) + 1
params = (0..param_count).map {|x| rand_words.pop()}
params.join(', ')
end
return names * 2
end
def convert(snippet, phrase)
rand_words = WORDS.sort_by {rand}
class_names = craft_names(rand_words, snippet, /###/, caps=true)
other_names = craft_names(rand_words, snippet, /\*\*\*/)
param_names = craft_params(rand_words, snippet, /@@@/)
results = []
[snippet, phrase].each do |sentence|
#fake class names and also copies sentence
result = sentence.gsub(/###/) {|x| class_names.pop }
#fake other names
result.gsub!(/\*\*\*/) {|x| other_names.pop}
#fake parameter lists
result.gsub!(/@@@/) {|x| param_names.pop}
results.push(result)
end
return results
end
loop do
snippets = PHRASES.keys().sort_by {rand}
for snippet in snippets
phrase = PHRASES[snippet]
question, answer = convert(snippet, phrase)
if PHRASE_FIRST
question, answer = answer, question
end
print question, "\n\n"
exit(0) unless $stdin.gets
puts "\nAnswer: %s\n\n" % answer
end
end
|
lib = File.expand_path('../lib', __FILE__)
$:.unshift lib unless $:.include?(lib)
require 'sinatra'
require 'haml'
require 'mongo'
require 'time'
require 'qu-mongo'
require 'openreqs/jobs'
require 'openreqs/peers'
require 'openreqs/content'
require 'openreqs/diff'
configure do
set :mongo, Mongo::Connection.new.db("openreqs")
mime_type :pem, "application/x-pem-file"
Qu.configure do |c|
c.connection = Mongo::Connection.new.db(settings.mongo.name + "-qu")
end
end
helpers do
def mongo; settings.mongo end
end
set :views, Proc.new { File.join(root, "views", "default") }
before {content_type :html, :charset => 'utf-8'}
get '/a/key.pem' do
self_peer = SelfPeer.new(mongo, :host => request.host)
content_type :pem
self_peer.key
end
get '/a.json' do
self_peer = SelfPeer.new(mongo, :host => request.host)
content_type :json
self_peer.to_json
end
get '/a/peers' do
@peers = Peer.all(mongo)
@requests = mongo["peers.register"].find
haml :peers
end
post '/a/peers/add' do
Qu.enqueue Find, params[:server]
redirect to("/a/peers")
end
post '/a/peers' do
users = params[:users] || []
peer_requests = mongo["peers.register"].find("_name" => {"$in" => users})
peer_requests.each {|peer_request|
peer = {
"_name" => peer_request["_name"],
"key" => peer_request["key"],
"local_url" => peer_request["local_url"]
}
mongo["peers.register"].remove("_id" => peer_request["_id"])
mongo["peers"].insert peer
}
redirect to("/a/peers")
end
post '/a/peers/:name/register' do
content_type :txt
name, local_url, key = params[:name], params[:local_url], params[:key]
error 400, "KO No Local URL" if local_url.nil?
if key.nil? || !key.is_a?(Hash) || key[:tempfile].nil?
error 400, "KO No key"
end
if Peer.new(mongo, :name => params[:name]).exist?
error 500, "KO Peer already registered"
end
peer_request = {"date" => Time.now.utc,
"ip" => request.ip, "user_agent" => request.user_agent,
"_name" => name, "local_url" => local_url,
"key" => key[:tempfile].read
}
mongo["peers.register"].insert peer_request
"OK"
end
post '/a/peers/:name/authentication' do
content_type :txt
peer = Peer.new(mongo, :name => params[:name])
error 404, "KO peer #{params[:name]} unknown" if !peer.exist?
sig = params.delete("signature")
if peer.verify(sig, params)
"OK"
else
error 400, "KO Bad Signature"
end
end
get '/a/peers/authenticate' do
@name, @peer, @session, @return_to = params[:name], params[:peer], params[:session], params[:return_to]
self_peer = SelfPeer.new(mongo, :host => request.host)
@return_params = {:name => @name, :session => @session}
@return_params["signature"] = self_peer.sign(@return_params)
haml :peers_authenticate
end
get '/a/peers/:name.pem' do
peer = Peer.new(mongo, :name => params[:name])
not_found if !peer.exist?
content_type :pem
peer.key
end
get '/a/peers/:name' do
@name = params[:name]
@peer = Peer.new(mongo, :name => @name)
not_found if !@peer.exist?
@versions = @peer["docs"] || {}
self_versions = Hash.new {|h,k| h[k] = []}
mongo["docs"].find(
{"_name" => {"$in" => @versions.keys}},
{:fields => ["_name", "date"]}
).each {|doc|
self_versions[doc["_name"]] << doc["date"]
}
@versions.each {|k,versions|
self_max = self_versions[k].max
@versions[k] = versions.select {|v| v > self_max}
}
haml :peer
end
post '/a/peers/:name/sync' do
Qu.enqueue Sync, params[:name]
redirect to("/a/peers/#{params[:name]}")
end
get '/a/clone' do
haml %q{
%div
Enter the Openreqs server address:
%form(method="post")
%input(type="text" name="url")
%input#clone(type="submit" value="Clone")
}
end
post '/a/clone' do
Qu.enqueue Clone, params[:url]
""
end
get '' do
redirect to('/')
end
get '/d/index' do
redirect to('/')
end
get '/' do
@doc = DocIndex.new(mongo, :context => self)
@name = @doc.name
haml :index
end
get '/d.json' do
content_type :json
mongo["docs"].find({}, {:fields => "_name"}).map {|d| d["_name"]}.uniq.to_json
end
get '/d/:doc.txt' do
@doc = Doc.new(mongo, params[:doc], :context => self)
not_found if !@doc.exist?
content_type :txt
@doc.to_txt
end
get '/d/:doc.json' do
@doc = Doc.new(mongo, params[:doc], :context => self)
not_found if !@doc.exist?
content_type :json
params[:with_history] == "1" ? @doc.to_json_with_history : @doc.to_json
end
get '/d/:doc' do
@doc = Doc.new(mongo, params[:doc], :context => self)
not_found if !@doc.exist?
@name = @doc.name
haml :doc
end
get '/d/:doc/add' do
@name = params[:doc]
haml :doc_add
end
post '/d/:doc/add' do
doc = {"_name" => params[:doc], "_content" => params[:content]}
mongo["docs"].insert doc
redirect to('/d/' + params[:doc])
end
get '/d/:doc/edit' do
@doc = Doc.new(mongo, params[:doc], :context => self)
not_found if !@doc.exist?
@name = @doc.name
@content = @doc.content
cache_control :no_cache
haml :doc_edit
end
post '/d/:doc/edit' do
@doc = Doc.new(mongo, params[:doc], :context => self)
doc_data = @doc.to_hash
doc_data.delete "_id"
doc_data["date"] = Time.now.utc
doc_data["_content"] = params[:content]
mongo["docs"].save doc_data
redirect to('/d/' + params[:doc])
end
get '/d/:doc/history.json' do
@doc = Doc.new(mongo, params[:doc], :context => self)
not_found if !@doc.exist?
@dates = mongo["docs"].find({"_name" => params[:doc]}, {:fields => "date", :sort => ["date", :asc]}).map {|doc| doc["date"]}
req_names = CreolaExtractURL.new(@doc["_content"]).to_a
@dates.concat mongo["requirements"].find({
"_name" => {"$in" => req_names},
"date"=> {"$gt" => @dates[0]}
}, {:fields => "date"}).map {|req| req["date"]}
@dates = @dates.sort.reverse
content_type :json
@dates.to_json
end
get '/d/:doc/history' do
@doc = Doc.new(mongo, params[:doc], :context => self)
not_found if !@doc.exist?
@dates = mongo["docs"].find({"_name" => params[:doc]}, {:fields => "date", :sort => ["date", :asc]}).map {|doc| doc["date"]}
req_names = CreolaExtractURL.new(@doc["_content"]).to_a
@dates.concat mongo["requirements"].find({
"_name" => {"$in" => req_names},
"date"=> {"$gt" => @dates[0]}
}, {:fields => "date"}).map {|req| req["date"]}
@dates = @dates.sort.reverse
@name = params[:doc]
haml :doc_history
end
get '/d/:doc/:date.txt' do
@date = Time.xmlschema(params[:date]) + 1 rescue not_found
@doc = Doc.new(mongo, params[:doc], :date => @date, :context => self)
not_found if !@doc.exist?
content_type :txt
@doc.to_txt
end
get '/d/:doc/:date.json' do
@date = Time.xmlschema(params[:date]) + 1 rescue not_found
@doc = Doc.new(mongo, params[:doc], :date => @date, :context => self)
not_found if !@doc.exist?
content_type :json
@doc.to_json
end
get '/d/:doc/:date' do
@date = Time.xmlschema(params[:date]) + 1 rescue not_found
@doc = Doc.new(mongo, params[:doc], :date => @date, :context => self)
not_found if !@doc.exist?
@name = params[:doc]
haml :doc_version
end
get '/d/:doc/:date/diff' do
@date = @date_a = Time.xmlschema(params[:date]) + 1 rescue not_found
@doc_a = Doc.new(mongo, params[:doc], :date => @date_a, :context => self)
not_found if !@doc_a.exist?
@date_param = Time.xmlschema(params[:compare]) + 1 rescue nil
@date_b = @date_param || (@date_a - 1)
@doc_b = Doc.new(mongo, params[:doc], :date => @date_b, :context => self)
@name = params[:doc]
@diff = DocDiff.new(@doc_b, @doc_a, :context => self)
haml :doc_diff
end
get '/r.json' do
content_type :json
mongo["requirements"].find({}, {:fields => "_name"}).map {|d| d["_name"]}.uniq.to_json
end
get '/r/:req/add' do
haml :doc_req_add
end
post '/r/:req/add' do
req = {"_name" => params[:doc], "_content" => params[:content], "date" => Time.now.utc}
mongo["requirements"].insert req
redirect to('/r/' + params[:doc])
end
get '/r/:req.json' do
@req = Req.new(mongo, params[:req], :context => self)
not_found if !@req.exist?
content_type :json
params[:with_history] == "1" ? @req.to_json_with_history : @req.to_json
end
get '/r/:req.txt' do
@req = Req.new(mongo, params[:req], :context => self)
not_found if !@req.exist?
content_type :txt
@req.to_txt
end
get '/r/:req' do
@req = Req.new(mongo, params[:req], :context => self)
not_found if !@req.exist?
latest_doc = {}
mongo["docs"].find({}, {:fields => ["_name", "date"], :sort => ["date", :desc]}).each {|doc|
latest_doc[doc["_name"]] ||= doc
}
latest = latest_doc.map {|k,v| v["_id"]}
@origin = []
mongo["docs"].find({"_id" => {"$in" => latest}}, {:fields => ["_name", "_content"]}).each {|doc|
if CreolaExtractURL.new(doc["_content"]).to_a.include? params[:req]
@origin << doc["_name"]
end
}
ReqHTML.new(@req, :context => self).to_html
end
get '/r/:req/edit' do
@req = Req.new(mongo, params[:req], :context => self)
not_found if !@req.exist?
cache_control :no_cache
haml :doc_req_edit
end
get '/r/:req/history.json' do
@req = Req.new(mongo, params[:req], :context => self)
not_found if !@req.exist?
@dates = mongo["requirements"].find({"_name" => params[:req]}, {:fields => "date", :sort => ["date", :desc]}).map {|req| req["date"]}
content_type :json
@dates.to_json
end
get '/r/:req/history' do
@req = Req.new(mongo, params[:req], :context => self)
not_found if !@req.exist?
@dates = mongo["requirements"].find({"_name" => params[:req]}, {:fields => "date", :sort => ["date", :desc]}).map {|req| req["date"]}
@name = params[:req]
haml :req_history
end
get '/r/:req/:date.json' do
@date = Time.xmlschema(params[:date]) + 1 rescue not_found
@req = Req.new(mongo, params[:req], :date => @date, :context => self)
not_found if !@req.exist?
content_type :json
@req.to_json
end
get '/r/:req/:date.txt' do
@date = Time.xmlschema(params[:date]) + 1 rescue not_found
@req = Req.new(mongo, params[:req], :date => @date, :context => self)
not_found if !@req.exist?
content_type :txt
@req.to_txt
end
get '/r/:req/:date' do
@date = Time.xmlschema(params[:date]) + 1 rescue not_found
@req = Req.new(mongo, params[:req], :date => @date, :context => self)
not_found if !@req.exist?
ReqHTML.new(@req, :context => self).to_html
end
get '/r/:req/:date/diff' do
@date = @date_a = Time.xmlschema(params[:date]) + 1 rescue not_found
@doc_a = Req.new(mongo, params[:req], :date => @date_a, :context => self)
not_found if !@doc_a.exist?
@date_param = Time.xmlschema(params[:compare]) + 1 rescue nil
@date_b = @date_param || (@date_a - 1)
@doc_b = Req.new(mongo, params[:req], :date => @date_b, :context => self)
@name = params[:req]
@diff = ReqDiff.new(@doc_b, @doc_a, :context => self)
haml :req_diff
end
post '/r/:req/edit' do
@req = Req.new(mongo, params[:req], :context => self)
req_data = @req.to_hash
req_data.delete "_id"
req_data["date"] = Time.now.utc
req_data["_content"] = params[:content]
if !params[:key].empty?
if !params[:value].empty?
req_data[params[:key]] = params[:value]
else
req_data.delete params[:key]
end
end
mongo["requirements"].save req_data
redirect to('/r/' + params[:req])
end
Greater time precision in json history output
lib = File.expand_path('../lib', __FILE__)
$:.unshift lib unless $:.include?(lib)
require 'sinatra'
require 'haml'
require 'mongo'
require 'time'
require 'qu-mongo'
require 'openreqs/jobs'
require 'openreqs/peers'
require 'openreqs/content'
require 'openreqs/diff'
configure do
set :mongo, Mongo::Connection.new.db("openreqs")
mime_type :pem, "application/x-pem-file"
Qu.configure do |c|
c.connection = Mongo::Connection.new.db(settings.mongo.name + "-qu")
end
end
helpers do
def mongo; settings.mongo end
end
set :views, Proc.new { File.join(root, "views", "default") }
before {content_type :html, :charset => 'utf-8'}
get '/a/key.pem' do
self_peer = SelfPeer.new(mongo, :host => request.host)
content_type :pem
self_peer.key
end
get '/a.json' do
self_peer = SelfPeer.new(mongo, :host => request.host)
content_type :json
self_peer.to_json
end
get '/a/peers' do
@peers = Peer.all(mongo)
@requests = mongo["peers.register"].find
haml :peers
end
post '/a/peers/add' do
Qu.enqueue Find, params[:server]
redirect to("/a/peers")
end
post '/a/peers' do
users = params[:users] || []
peer_requests = mongo["peers.register"].find("_name" => {"$in" => users})
peer_requests.each {|peer_request|
peer = {
"_name" => peer_request["_name"],
"key" => peer_request["key"],
"local_url" => peer_request["local_url"]
}
mongo["peers.register"].remove("_id" => peer_request["_id"])
mongo["peers"].insert peer
}
redirect to("/a/peers")
end
post '/a/peers/:name/register' do
content_type :txt
name, local_url, key = params[:name], params[:local_url], params[:key]
error 400, "KO No Local URL" if local_url.nil?
if key.nil? || !key.is_a?(Hash) || key[:tempfile].nil?
error 400, "KO No key"
end
if Peer.new(mongo, :name => params[:name]).exist?
error 500, "KO Peer already registered"
end
peer_request = {"date" => Time.now.utc,
"ip" => request.ip, "user_agent" => request.user_agent,
"_name" => name, "local_url" => local_url,
"key" => key[:tempfile].read
}
mongo["peers.register"].insert peer_request
"OK"
end
post '/a/peers/:name/authentication' do
content_type :txt
peer = Peer.new(mongo, :name => params[:name])
error 404, "KO peer #{params[:name]} unknown" if !peer.exist?
sig = params.delete("signature")
if peer.verify(sig, params)
"OK"
else
error 400, "KO Bad Signature"
end
end
get '/a/peers/authenticate' do
@name, @peer, @session, @return_to = params[:name], params[:peer], params[:session], params[:return_to]
self_peer = SelfPeer.new(mongo, :host => request.host)
@return_params = {:name => @name, :session => @session}
@return_params["signature"] = self_peer.sign(@return_params)
haml :peers_authenticate
end
get '/a/peers/:name.pem' do
peer = Peer.new(mongo, :name => params[:name])
not_found if !peer.exist?
content_type :pem
peer.key
end
get '/a/peers/:name' do
@name = params[:name]
@peer = Peer.new(mongo, :name => @name)
not_found if !@peer.exist?
@versions = @peer["docs"] || {}
self_versions = Hash.new {|h,k| h[k] = []}
mongo["docs"].find(
{"_name" => {"$in" => @versions.keys}},
{:fields => ["_name", "date"]}
).each {|doc|
self_versions[doc["_name"]] << doc["date"]
}
@versions.each {|k,versions|
self_max = self_versions[k].max
@versions[k] = versions.select {|v| v > self_max}
}
haml :peer
end
post '/a/peers/:name/sync' do
Qu.enqueue Sync, params[:name]
redirect to("/a/peers/#{params[:name]}")
end
get '/a/clone' do
haml %q{
%div
Enter the Openreqs server address:
%form(method="post")
%input(type="text" name="url")
%input#clone(type="submit" value="Clone")
}
end
post '/a/clone' do
Qu.enqueue Clone, params[:url]
""
end
get '' do
redirect to('/')
end
get '/d/index' do
redirect to('/')
end
get '/' do
@doc = DocIndex.new(mongo, :context => self)
@name = @doc.name
haml :index
end
get '/d.json' do
content_type :json
mongo["docs"].find({}, {:fields => "_name"}).map {|d| d["_name"]}.uniq.to_json
end
get '/d/:doc.txt' do
@doc = Doc.new(mongo, params[:doc], :context => self)
not_found if !@doc.exist?
content_type :txt
@doc.to_txt
end
get '/d/:doc.json' do
@doc = Doc.new(mongo, params[:doc], :context => self)
not_found if !@doc.exist?
content_type :json
params[:with_history] == "1" ? @doc.to_json_with_history : @doc.to_json
end
get '/d/:doc' do
@doc = Doc.new(mongo, params[:doc], :context => self)
not_found if !@doc.exist?
@name = @doc.name
haml :doc
end
get '/d/:doc/add' do
@name = params[:doc]
haml :doc_add
end
post '/d/:doc/add' do
doc = {"_name" => params[:doc], "_content" => params[:content]}
mongo["docs"].insert doc
redirect to('/d/' + params[:doc])
end
get '/d/:doc/edit' do
@doc = Doc.new(mongo, params[:doc], :context => self)
not_found if !@doc.exist?
@name = @doc.name
@content = @doc.content
cache_control :no_cache
haml :doc_edit
end
post '/d/:doc/edit' do
@doc = Doc.new(mongo, params[:doc], :context => self)
doc_data = @doc.to_hash
doc_data.delete "_id"
doc_data["date"] = Time.now.utc
doc_data["_content"] = params[:content]
mongo["docs"].save doc_data
redirect to('/d/' + params[:doc])
end
get '/d/:doc/history.json' do
@doc = Doc.new(mongo, params[:doc], :context => self)
not_found if !@doc.exist?
@dates = mongo["docs"].find({"_name" => params[:doc]}, {:fields => "date", :sort => ["date", :asc]}).map {|doc| doc["date"]}
req_names = CreolaExtractURL.new(@doc["_content"]).to_a
@dates.concat mongo["requirements"].find({
"_name" => {"$in" => req_names},
"date"=> {"$gt" => @dates[0]}
}, {:fields => "date"}).map {|req| req["date"]}
@dates = @dates.sort.reverse
content_type :json
@dates.map {|d| d.xmlschema(2)}.to_json
end
get '/d/:doc/history' do
@doc = Doc.new(mongo, params[:doc], :context => self)
not_found if !@doc.exist?
@dates = mongo["docs"].find({"_name" => params[:doc]}, {:fields => "date", :sort => ["date", :asc]}).map {|doc| doc["date"]}
req_names = CreolaExtractURL.new(@doc["_content"]).to_a
@dates.concat mongo["requirements"].find({
"_name" => {"$in" => req_names},
"date"=> {"$gt" => @dates[0]}
}, {:fields => "date"}).map {|req| req["date"]}
@dates = @dates.sort.reverse
@name = params[:doc]
haml :doc_history
end
get '/d/:doc/:date.txt' do
@date = Time.xmlschema(params[:date]) + 1 rescue not_found
@doc = Doc.new(mongo, params[:doc], :date => @date, :context => self)
not_found if !@doc.exist?
content_type :txt
@doc.to_txt
end
get '/d/:doc/:date.json' do
@date = Time.xmlschema(params[:date]) + 1 rescue not_found
@doc = Doc.new(mongo, params[:doc], :date => @date, :context => self)
not_found if !@doc.exist?
content_type :json
@doc.to_json
end
get '/d/:doc/:date' do
@date = Time.xmlschema(params[:date]) + 1 rescue not_found
@doc = Doc.new(mongo, params[:doc], :date => @date, :context => self)
not_found if !@doc.exist?
@name = params[:doc]
haml :doc_version
end
get '/d/:doc/:date/diff' do
@date = @date_a = Time.xmlschema(params[:date]) + 1 rescue not_found
@doc_a = Doc.new(mongo, params[:doc], :date => @date_a, :context => self)
not_found if !@doc_a.exist?
@date_param = Time.xmlschema(params[:compare]) + 1 rescue nil
@date_b = @date_param || (@date_a - 1)
@doc_b = Doc.new(mongo, params[:doc], :date => @date_b, :context => self)
@name = params[:doc]
@diff = DocDiff.new(@doc_b, @doc_a, :context => self)
haml :doc_diff
end
get '/r.json' do
content_type :json
mongo["requirements"].find({}, {:fields => "_name"}).map {|d| d["_name"]}.uniq.to_json
end
get '/r/:req/add' do
haml :doc_req_add
end
post '/r/:req/add' do
req = {"_name" => params[:doc], "_content" => params[:content], "date" => Time.now.utc}
mongo["requirements"].insert req
redirect to('/r/' + params[:doc])
end
get '/r/:req.json' do
@req = Req.new(mongo, params[:req], :context => self)
not_found if !@req.exist?
content_type :json
params[:with_history] == "1" ? @req.to_json_with_history : @req.to_json
end
get '/r/:req.txt' do
@req = Req.new(mongo, params[:req], :context => self)
not_found if !@req.exist?
content_type :txt
@req.to_txt
end
get '/r/:req' do
@req = Req.new(mongo, params[:req], :context => self)
not_found if !@req.exist?
latest_doc = {}
mongo["docs"].find({}, {:fields => ["_name", "date"], :sort => ["date", :desc]}).each {|doc|
latest_doc[doc["_name"]] ||= doc
}
latest = latest_doc.map {|k,v| v["_id"]}
@origin = []
mongo["docs"].find({"_id" => {"$in" => latest}}, {:fields => ["_name", "_content"]}).each {|doc|
if CreolaExtractURL.new(doc["_content"]).to_a.include? params[:req]
@origin << doc["_name"]
end
}
ReqHTML.new(@req, :context => self).to_html
end
get '/r/:req/edit' do
@req = Req.new(mongo, params[:req], :context => self)
not_found if !@req.exist?
cache_control :no_cache
haml :doc_req_edit
end
get '/r/:req/history.json' do
@req = Req.new(mongo, params[:req], :context => self)
not_found if !@req.exist?
@dates = mongo["requirements"].find({"_name" => params[:req]}, {:fields => "date", :sort => ["date", :desc]}).map {|req| req["date"]}
content_type :json
@dates.map {|d| d.xmlschema(2)}.to_json
end
get '/r/:req/history' do
@req = Req.new(mongo, params[:req], :context => self)
not_found if !@req.exist?
@dates = mongo["requirements"].find({"_name" => params[:req]}, {:fields => "date", :sort => ["date", :desc]}).map {|req| req["date"]}
@name = params[:req]
haml :req_history
end
get '/r/:req/:date.json' do
@date = Time.xmlschema(params[:date]) + 1 rescue not_found
@req = Req.new(mongo, params[:req], :date => @date, :context => self)
not_found if !@req.exist?
content_type :json
@req.to_json
end
get '/r/:req/:date.txt' do
@date = Time.xmlschema(params[:date]) + 1 rescue not_found
@req = Req.new(mongo, params[:req], :date => @date, :context => self)
not_found if !@req.exist?
content_type :txt
@req.to_txt
end
get '/r/:req/:date' do
@date = Time.xmlschema(params[:date]) + 1 rescue not_found
@req = Req.new(mongo, params[:req], :date => @date, :context => self)
not_found if !@req.exist?
ReqHTML.new(@req, :context => self).to_html
end
get '/r/:req/:date/diff' do
@date = @date_a = Time.xmlschema(params[:date]) + 1 rescue not_found
@doc_a = Req.new(mongo, params[:req], :date => @date_a, :context => self)
not_found if !@doc_a.exist?
@date_param = Time.xmlschema(params[:compare]) + 1 rescue nil
@date_b = @date_param || (@date_a - 1)
@doc_b = Req.new(mongo, params[:req], :date => @date_b, :context => self)
@name = params[:req]
@diff = ReqDiff.new(@doc_b, @doc_a, :context => self)
haml :req_diff
end
post '/r/:req/edit' do
@req = Req.new(mongo, params[:req], :context => self)
req_data = @req.to_hash
req_data.delete "_id"
req_data["date"] = Time.now.utc
req_data["_content"] = params[:content]
if !params[:key].empty?
if !params[:value].empty?
req_data[params[:key]] = params[:value]
else
req_data.delete params[:key]
end
end
mongo["requirements"].save req_data
redirect to('/r/' + params[:req])
end
|
class ProxychainsNg < Formula
desc "Hook preloader"
homepage "https://sourceforge.net/projects/proxychains-ng/"
url "https://downloads.sourceforge.net/project/proxychains-ng/proxychains-ng-4.11.tar.bz2"
sha256 "dcc4149808cd1fb5d9663cc09791f478805816b1f017381f424414c47f6376b6"
head "https://github.com/rofl0r/proxychains-ng.git"
bottle do
sha256 "dc6af4fa724352b42370b04e9c15cb2071436a0381d36c90a82173f930309112" => :el_capitan
sha256 "2bfb53e389b9a3222fb50632e72b7da85262ff9abf69bb477355fff55661c51b" => :yosemite
sha256 "5f88b3f9d16fd08c182b0c8c9490dc6328e84e597c9ccc2d80fe44fc252512ae" => :mavericks
sha256 "ec8067e606f210e88b501a8e3e8951a1f7082fb8f86356d1179ec791c02aa621" => :mountain_lion
end
option :universal
def install
args = ["--prefix=#{prefix}", "--sysconfdir=#{prefix}/etc"]
if build.universal?
ENV.universal_binary
args << "--fat-binary"
end
system "./configure", *args
system "make"
system "make", "install"
system "make", "install-config"
end
test do
assert_match "config file found", shell_output("#{bin}/proxychains4 test 2>&1", 1)
end
end
proxychains-ng: update 4.11 bottle.
class ProxychainsNg < Formula
desc "Hook preloader"
homepage "https://sourceforge.net/projects/proxychains-ng/"
url "https://downloads.sourceforge.net/project/proxychains-ng/proxychains-ng-4.11.tar.bz2"
sha256 "dcc4149808cd1fb5d9663cc09791f478805816b1f017381f424414c47f6376b6"
head "https://github.com/rofl0r/proxychains-ng.git"
bottle do
sha256 "3a54f2ae04b107b97db3a0522f06cc77c0420bf7a562a07d4938c209e34d53ca" => :el_capitan
sha256 "336d042fcdef471d60bca6233c834db94b85c911425efba8bf442b6affc0db00" => :yosemite
sha256 "2707450f3238082aeef0884770eabae0167d17c1029840a5ab48db0af320b254" => :mavericks
end
option :universal
def install
args = ["--prefix=#{prefix}", "--sysconfdir=#{prefix}/etc"]
if build.universal?
ENV.universal_binary
args << "--fat-binary"
end
system "./configure", *args
system "make"
system "make", "install"
system "make", "install-config"
end
test do
assert_match "config file found", shell_output("#{bin}/proxychains4 test 2>&1", 1)
end
end
|
# This is used in the Formula class when the user calls
# `python`, `python2` or `python3`.
# This method has a dual nature. For one, it takes a &block and sets up
# the ENV such that a Python, as defined in the requirements, is the default.
# If there are multiple `PythonInstalled` requirements, the block is evaluated
# once for each Python. This makes it possible to easily support 2.x and
# 3.x Python bindings without code duplication in formulae.
# If you need to special case stuff, set :allowed_major_versions.
# Second, inside the block, a formula author may call this method to access
# certain convienience methods for the currently selected Python, e.g.
# `python.site_packages`.
# This method should be executed in the context of the formula, so that
# prefix is defined. Note, that this method will set @current_python to be
# able to refer to the current python if a block is executed for 2.x and 3.x.
def python_helper(options={:allowed_major_versions => [2, 3]}, &block)
if !block_given? and !@current_python.nil?
# We are already inside of a `python do ... end` block, so just return
# the current_python or false if the version.major is not allowed.
if options[:allowed_major_versions].include?(@current_python.version.major)
return @current_python
else
return false
end
end
# Look for PythonInstalled requirements for this formula
python_reqs = requirements.select{ |r| r.kind_of?(PythonInstalled) }
if python_reqs.empty?
raise "If you use python in the formula, you have to add `depends_on :python` (or :python3)!"
end
# Now select those that are satisfied and matching the version.major and
# check that no two python binaries are the same (which could be the case
# because more than one `depends_on :python => 'module_name' may be present).
filtered_python_reqs = []
while !python_reqs.empty?
py = python_reqs.shift
# this is ulgy but Ruby 1.8 has no `uniq! { }`
if !filtered_python_reqs.map{ |fpr| fpr.binary }.include?(py.binary) &&
py.satisfied? &&
options[:allowed_major_versions].include?(py.version.major) &&
self.build.with?(py.name) || !(py.optional? || py.recommended?)
then
filtered_python_reqs << py
end
end
# Allow to use an else-branch like so: `if python do ... end; else ... end`
return false if filtered_python_reqs.empty?
# Sort by version, so the older 2.x will be used first and if no
# block_given? then 2.x is preferred because it is returned.
# Further note, having 3.x last allows us to run `2to3 --write .`
# which modifies the sources in-place (for some packages that need this).
filtered_python_reqs.sort_by{ |py| py.version }.map do |py|
# Now is the time to set the site_packages to the correct value
py.site_packages = lib/py.xy/'site-packages'
if !block_given?
return py
else
puts "brew: Python block (#{py.binary})..." if ARGV.verbose? && ARGV.debug?
# Ensure env changes are only temporary
begin
old_env = ENV.to_hash
# In order to install into the Cellar, the dir must exist and be in the
# PYTHONPATH. This will be executed in the context of the formula
# so that lib points to the HOMEBREW_PREFIX/Cellar/<formula>/<version>/lib
puts "brew: Appending to PYTHONPATH: #{py.site_packages}" if ARGV.verbose?
mkdir_p py.site_packages
ENV.append 'PYTHONPATH', py.site_packages, ':'
ENV['PYTHON'] = py.binary
ENV.prepend 'CMAKE_INCLUDE_PATH', py.incdir, ':'
ENV.prepend 'PKG_CONFIG_PATH', py.pkg_config_path, ':' if py.pkg_config_path
ENV.prepend 'PATH', py.binary.dirname, ':' unless py.from_osx?
#Note: Don't set LDFLAGS to point to the Python.framework, because
# it breaks builds (for example scipy.)
# Track the state of the currently selected python for this block,
# so if this python_helper is called again _inside_ the block,
# we can just return the right python (see `else`-branch a few lines down):
@current_python = py
res = instance_eval(&block)
@current_python = nil
res
ensure
ENV.replace(old_env)
end
end
end
end
Remove more nesting in python_helper
# This is used in the Formula class when the user calls
# `python`, `python2` or `python3`.
# This method has a dual nature. For one, it takes a &block and sets up
# the ENV such that a Python, as defined in the requirements, is the default.
# If there are multiple `PythonInstalled` requirements, the block is evaluated
# once for each Python. This makes it possible to easily support 2.x and
# 3.x Python bindings without code duplication in formulae.
# If you need to special case stuff, set :allowed_major_versions.
# Second, inside the block, a formula author may call this method to access
# certain convienience methods for the currently selected Python, e.g.
# `python.site_packages`.
# This method should be executed in the context of the formula, so that
# prefix is defined. Note, that this method will set @current_python to be
# able to refer to the current python if a block is executed for 2.x and 3.x.
def python_helper(options={:allowed_major_versions => [2, 3]}, &block)
if !block_given? and !@current_python.nil?
# We are already inside of a `python do ... end` block, so just return
# the current_python or false if the version.major is not allowed.
if options[:allowed_major_versions].include?(@current_python.version.major)
return @current_python
else
return false
end
end
# Look for PythonInstalled requirements for this formula
python_reqs = requirements.select{ |r| r.kind_of?(PythonInstalled) }
if python_reqs.empty?
raise "If you use python in the formula, you have to add `depends_on :python` (or :python3)!"
end
# Now select those that are satisfied and matching the version.major and
# check that no two python binaries are the same (which could be the case
# because more than one `depends_on :python => 'module_name' may be present).
filtered_python_reqs = []
while !python_reqs.empty?
py = python_reqs.shift
# this is ulgy but Ruby 1.8 has no `uniq! { }`
if !filtered_python_reqs.map{ |fpr| fpr.binary }.include?(py.binary) &&
py.satisfied? &&
options[:allowed_major_versions].include?(py.version.major) &&
self.build.with?(py.name) || !(py.optional? || py.recommended?)
then
filtered_python_reqs << py
end
end
# Allow to use an else-branch like so: `if python do ... end; else ... end`
return false if filtered_python_reqs.empty?
# Sort by version, so the older 2.x will be used first and if no
# block_given? then 2.x is preferred because it is returned.
# Further note, having 3.x last allows us to run `2to3 --write .`
# which modifies the sources in-place (for some packages that need this).
filtered_python_reqs.sort_by{ |py| py.version }.map do |py|
# Now is the time to set the site_packages to the correct value
py.site_packages = lib/py.xy/'site-packages'
return py if !block_given?
puts "brew: Python block (#{py.binary})..." if ARGV.verbose? && ARGV.debug?
# Ensure env changes are only temporary
begin
old_env = ENV.to_hash
# In order to install into the Cellar, the dir must exist and be in the
# PYTHONPATH. This will be executed in the context of the formula
# so that lib points to the HOMEBREW_PREFIX/Cellar/<formula>/<version>/lib
puts "brew: Appending to PYTHONPATH: #{py.site_packages}" if ARGV.verbose?
mkdir_p py.site_packages
ENV.append 'PYTHONPATH', py.site_packages, ':'
ENV['PYTHON'] = py.binary
ENV.prepend 'CMAKE_INCLUDE_PATH', py.incdir, ':'
ENV.prepend 'PKG_CONFIG_PATH', py.pkg_config_path, ':' if py.pkg_config_path
ENV.prepend 'PATH', py.binary.dirname, ':' unless py.from_osx?
#Note: Don't set LDFLAGS to point to the Python.framework, because
# it breaks builds (for example scipy.)
# Track the state of the currently selected python for this block,
# so if this python_helper is called again _inside_ the block,
# we can just return the right python (see `else`-branch a few lines down):
@current_python = py
res = instance_eval(&block)
@current_python = nil
res
ensure
ENV.replace(old_env)
end
end
end
|
spec = Gem::Specification.new do |s|
s.name = "xpather"
s.version = "0.0.7"
s.platform = Gem::Platform::RUBY
s.authors = ["Aaron Bedra"]
s.email = ["aaron@aaronbedra.com"]
s.homepage = "https://github.com/abedra/xpather"
s.summary = %q{Quick and painless XPath searching for Ruby}
s.description = %q{Quick and painless XPath searching for Ruby using libxml2}
s.rubyforge_project = "xpather"
s.files = Dir.glob('lib/**/*.rb') + Dir.glob('ext/**/*.c')
s.extensions = %w{ext/xpather/extconf.rb}
s.require_paths = ["lib", "ext"]
end
Version bump
spec = Gem::Specification.new do |s|
s.name = "xpather"
s.version = "0.0.8"
s.platform = Gem::Platform::RUBY
s.authors = ["Aaron Bedra"]
s.email = ["aaron@aaronbedra.com"]
s.homepage = "https://github.com/abedra/xpather"
s.summary = %q{Quick and painless XPath searching for Ruby}
s.description = %q{Quick and painless XPath searching for Ruby using libxml2}
s.rubyforge_project = "xpather"
s.files = Dir.glob('lib/**/*.rb') + Dir.glob('ext/**/*.c')
s.extensions = %w{ext/xpather/extconf.rb}
s.require_paths = ["lib", "ext"]
end
|
# frozen_string_literal: true
require "stringio"
require "uri"
require "active_support/core_ext/kernel/singleton_class"
require "rack/test"
require "minitest"
require "action_dispatch/testing/request_encoder"
module ActionDispatch
module Integration #:nodoc:
module RequestHelpers
# Performs a GET request with the given parameters. See ActionDispatch::Integration::Session#process
# for more details.
def get(path, **args)
process(:get, path, **args)
end
# Performs a POST request with the given parameters. See ActionDispatch::Integration::Session#process
# for more details.
def post(path, **args)
process(:post, path, **args)
end
# Performs a PATCH request with the given parameters. See ActionDispatch::Integration::Session#process
# for more details.
def patch(path, **args)
process(:patch, path, **args)
end
# Performs a PUT request with the given parameters. See ActionDispatch::Integration::Session#process
# for more details.
def put(path, **args)
process(:put, path, **args)
end
# Performs a DELETE request with the given parameters. See ActionDispatch::Integration::Session#process
# for more details.
def delete(path, **args)
process(:delete, path, **args)
end
# Performs a HEAD request with the given parameters. See ActionDispatch::Integration::Session#process
# for more details.
def head(path, *args)
process(:head, path, *args)
end
# Follow a single redirect response. If the last response was not a
# redirect, an exception will be raised. Otherwise, the redirect is
# performed on the location header. If the redirection is a 307 redirect,
# the same HTTP verb will be used when redirecting, otherwise a GET request
# will be performed. Any arguments are passed to the
# underlying request.
def follow_redirect!(**args)
raise "not a redirect! #{status} #{status_message}" unless redirect?
method = response.status == 307 ? request.method.downcase : :get
public_send(method, response.location, **args)
status
end
end
# An instance of this class represents a set of requests and responses
# performed sequentially by a test process. Because you can instantiate
# multiple sessions and run them side-by-side, you can also mimic (to some
# limited extent) multiple simultaneous users interacting with your system.
#
# Typically, you will instantiate a new session using
# IntegrationTest#open_session, rather than instantiating
# Integration::Session directly.
class Session
DEFAULT_HOST = "www.example.com"
include Minitest::Assertions
include TestProcess, RequestHelpers, Assertions
%w( status status_message headers body redirect? ).each do |method|
delegate method, to: :response, allow_nil: true
end
%w( path ).each do |method|
delegate method, to: :request, allow_nil: true
end
# The hostname used in the last request.
def host
@host || DEFAULT_HOST
end
attr_writer :host
# The remote_addr used in the last request.
attr_accessor :remote_addr
# The Accept header to send.
attr_accessor :accept
# A map of the cookies returned by the last response, and which will be
# sent with the next request.
def cookies
_mock_session.cookie_jar
end
# A reference to the controller instance used by the last request.
attr_reader :controller
# A reference to the request instance used by the last request.
attr_reader :request
# A reference to the response instance used by the last request.
attr_reader :response
# A running counter of the number of requests processed.
attr_accessor :request_count
include ActionDispatch::Routing::UrlFor
# Create and initialize a new Session instance.
def initialize(app)
super()
@app = app
reset!
end
def url_options
@url_options ||= default_url_options.dup.tap do |url_options|
url_options.reverse_merge!(controller.url_options) if controller
if @app.respond_to?(:routes)
url_options.reverse_merge!(@app.routes.default_url_options)
end
url_options.reverse_merge!(host: host, protocol: https? ? "https" : "http")
end
end
# Resets the instance. This can be used to reset the state information
# in an existing session instance, so it can be used from a clean-slate
# condition.
#
# session.reset!
def reset!
@https = false
@controller = @request = @response = nil
@_mock_session = nil
@request_count = 0
@url_options = nil
self.host = DEFAULT_HOST
self.remote_addr = "127.0.0.1"
self.accept = "text/xml,application/xml,application/xhtml+xml," \
"text/html;q=0.9,text/plain;q=0.8,image/png," \
"*/*;q=0.5"
unless defined? @named_routes_configured
# the helpers are made protected by default--we make them public for
# easier access during testing and troubleshooting.
@named_routes_configured = true
end
end
# Specify whether or not the session should mimic a secure HTTPS request.
#
# session.https!
# session.https!(false)
def https!(flag = true)
@https = flag
end
# Returns +true+ if the session is mimicking a secure HTTPS request.
#
# if session.https?
# ...
# end
def https?
@https
end
# Performs the actual request.
#
# - +method+: The HTTP method (GET, POST, PATCH, PUT, DELETE, HEAD, OPTIONS)
# as a symbol.
# - +path+: The URI (as a String) on which you want to perform the
# request.
# - +params+: The HTTP parameters that you want to pass. This may
# be +nil+,
# a Hash, or a String that is appropriately encoded
# (<tt>application/x-www-form-urlencoded</tt> or
# <tt>multipart/form-data</tt>).
# - +headers+: Additional headers to pass, as a Hash. The headers will be
# merged into the Rack env hash.
# - +env+: Additional env to pass, as a Hash. The headers will be
# merged into the Rack env hash.
# - +xhr+: Set to `true` if you want to make and Ajax request.
# Adds request headers characteristic of XMLHttpRequest e.g. HTTP_X_REQUESTED_WITH.
# The headers will be merged into the Rack env hash.
# - +as+: Used for encoding the request with different content type.
# Supports `:json` by default and will set the appropriate request headers.
# The headers will be merged into the Rack env hash.
#
# This method is rarely used directly. Use +#get+, +#post+, or other standard
# HTTP methods in integration tests. +#process+ is only required when using a
# request method that doesn't have a method defined in the integration tests.
#
# This method returns the response status, after performing the request.
# Furthermore, if this method was called from an ActionDispatch::IntegrationTest object,
# then that object's <tt>@response</tt> instance variable will point to a Response object
# which one can use to inspect the details of the response.
#
# Example:
# process :get, '/author', params: { since: 201501011400 }
def process(method, path, params: nil, headers: nil, env: nil, xhr: false, as: nil)
request_encoder = RequestEncoder.encoder(as)
headers ||= {}
if method == :get && as == :json && params
headers["X-Http-Method-Override"] = "GET"
method = :post
end
if %r{://}.match?(path)
path = build_expanded_path(path) do |location|
https! URI::HTTPS === location if location.scheme
if url_host = location.host
default = Rack::Request::DEFAULT_PORTS[location.scheme]
url_host += ":#{location.port}" if default != location.port
host! url_host
end
end
end
hostname, port = host.split(":")
request_env = {
:method => method,
:params => request_encoder.encode_params(params),
"SERVER_NAME" => hostname,
"SERVER_PORT" => port || (https? ? "443" : "80"),
"HTTPS" => https? ? "on" : "off",
"rack.url_scheme" => https? ? "https" : "http",
"REQUEST_URI" => path,
"HTTP_HOST" => host,
"REMOTE_ADDR" => remote_addr,
"CONTENT_TYPE" => request_encoder.content_type,
"HTTP_ACCEPT" => request_encoder.accept_header || accept
}
wrapped_headers = Http::Headers.from_hash({})
wrapped_headers.merge!(headers) if headers
if xhr
wrapped_headers["HTTP_X_REQUESTED_WITH"] = "XMLHttpRequest"
wrapped_headers["HTTP_ACCEPT"] ||= [Mime[:js], Mime[:html], Mime[:xml], "text/xml", "*/*"].join(", ")
end
# This modifies the passed request_env directly.
if wrapped_headers.present?
Http::Headers.from_hash(request_env).merge!(wrapped_headers)
end
if env.present?
Http::Headers.from_hash(request_env).merge!(env)
end
session = Rack::Test::Session.new(_mock_session)
# NOTE: rack-test v0.5 doesn't build a default uri correctly
# Make sure requested path is always a full URI.
session.request(build_full_uri(path, request_env), request_env)
@request_count += 1
@request = ActionDispatch::Request.new(session.last_request.env)
response = _mock_session.last_response
@response = ActionDispatch::TestResponse.from_response(response)
@response.request = @request
@html_document = nil
@url_options = nil
@controller = @request.controller_instance
response.status
end
# Set the host name to use in the next request.
#
# session.host! "www.example.com"
alias :host! :host=
private
def _mock_session
@_mock_session ||= Rack::MockSession.new(@app, host)
end
def build_full_uri(path, env)
"#{env['rack.url_scheme']}://#{env['SERVER_NAME']}:#{env['SERVER_PORT']}#{path}"
end
def build_expanded_path(path)
location = URI.parse(path)
yield location if block_given?
path = location.path
location.query ? "#{path}?#{location.query}" : path
end
end
module Runner
include ActionDispatch::Assertions
APP_SESSIONS = {}
attr_reader :app
def initialize(*args, &blk)
super(*args, &blk)
@integration_session = nil
end
def before_setup # :nodoc:
@app = nil
super
end
def integration_session
@integration_session ||= create_session(app)
end
# Reset the current session. This is useful for testing multiple sessions
# in a single test case.
def reset!
@integration_session = create_session(app)
end
def create_session(app)
klass = APP_SESSIONS[app] ||= Class.new(Integration::Session) {
# If the app is a Rails app, make url_helpers available on the session.
# This makes app.url_for and app.foo_path available in the console.
if app.respond_to?(:routes) && app.routes.is_a?(ActionDispatch::Routing::RouteSet)
include app.routes.url_helpers
include app.routes.mounted_helpers
end
}
klass.new(app)
end
def remove! # :nodoc:
@integration_session = nil
end
%w(get post patch put head delete cookies assigns follow_redirect!).each do |method|
define_method(method) do |*args, **options|
# reset the html_document variable, except for cookies/assigns calls
unless method == "cookies" || method == "assigns"
@html_document = nil
end
integration_session.__send__(method, *args, **options).tap do
copy_session_variables!
end
end
end
# Open a new session instance. If a block is given, the new session is
# yielded to the block before being returned.
#
# session = open_session do |sess|
# sess.extend(CustomAssertions)
# end
#
# By default, a single session is automatically created for you, but you
# can use this method to open multiple sessions that ought to be tested
# simultaneously.
def open_session
dup.tap do |session|
session.reset!
yield session if block_given?
end
end
# Copy the instance variables from the current session instance into the
# test instance.
def copy_session_variables! #:nodoc:
@controller = @integration_session.controller
@response = @integration_session.response
@request = @integration_session.request
end
def default_url_options
integration_session.default_url_options
end
def default_url_options=(options)
integration_session.default_url_options = options
end
private
def respond_to_missing?(method, _)
integration_session.respond_to?(method) || super
end
# Delegate unhandled messages to the current session instance.
def method_missing(method, *args, &block)
if integration_session.respond_to?(method)
integration_session.public_send(method, *args, &block).tap do
copy_session_variables!
end
else
super
end
end
end
end
# An integration test spans multiple controllers and actions,
# tying them all together to ensure they work together as expected. It tests
# more completely than either unit or functional tests do, exercising the
# entire stack, from the dispatcher to the database.
#
# At its simplest, you simply extend <tt>IntegrationTest</tt> and write your tests
# using the get/post methods:
#
# require "test_helper"
#
# class ExampleTest < ActionDispatch::IntegrationTest
# fixtures :people
#
# def test_login
# # get the login page
# get "/login"
# assert_equal 200, status
#
# # post the login and follow through to the home page
# post "/login", params: { username: people(:jamis).username,
# password: people(:jamis).password }
# follow_redirect!
# assert_equal 200, status
# assert_equal "/home", path
# end
# end
#
# However, you can also have multiple session instances open per test, and
# even extend those instances with assertions and methods to create a very
# powerful testing DSL that is specific for your application. You can even
# reference any named routes you happen to have defined.
#
# require "test_helper"
#
# class AdvancedTest < ActionDispatch::IntegrationTest
# fixtures :people, :rooms
#
# def test_login_and_speak
# jamis, david = login(:jamis), login(:david)
# room = rooms(:office)
#
# jamis.enter(room)
# jamis.speak(room, "anybody home?")
#
# david.enter(room)
# david.speak(room, "hello!")
# end
#
# private
#
# module CustomAssertions
# def enter(room)
# # reference a named route, for maximum internal consistency!
# get(room_url(id: room.id))
# assert(...)
# ...
# end
#
# def speak(room, message)
# post "/say/#{room.id}", xhr: true, params: { message: message }
# assert(...)
# ...
# end
# end
#
# def login(who)
# open_session do |sess|
# sess.extend(CustomAssertions)
# who = people(who)
# sess.post "/login", params: { username: who.username,
# password: who.password }
# assert(...)
# end
# end
# end
#
# Another longer example would be:
#
# A simple integration test that exercises multiple controllers:
#
# require 'test_helper'
#
# class UserFlowsTest < ActionDispatch::IntegrationTest
# test "login and browse site" do
# # login via https
# https!
# get "/login"
# assert_response :success
#
# post "/login", params: { username: users(:david).username, password: users(:david).password }
# follow_redirect!
# assert_equal '/welcome', path
# assert_equal 'Welcome david!', flash[:notice]
#
# https!(false)
# get "/articles/all"
# assert_response :success
# assert_select 'h1', 'Articles'
# end
# end
#
# As you can see the integration test involves multiple controllers and
# exercises the entire stack from database to dispatcher. In addition you can
# have multiple session instances open simultaneously in a test and extend
# those instances with assertion methods to create a very powerful testing
# DSL (domain-specific language) just for your application.
#
# Here's an example of multiple sessions and custom DSL in an integration test
#
# require 'test_helper'
#
# class UserFlowsTest < ActionDispatch::IntegrationTest
# test "login and browse site" do
# # User david logs in
# david = login(:david)
# # User guest logs in
# guest = login(:guest)
#
# # Both are now available in different sessions
# assert_equal 'Welcome david!', david.flash[:notice]
# assert_equal 'Welcome guest!', guest.flash[:notice]
#
# # User david can browse site
# david.browses_site
# # User guest can browse site as well
# guest.browses_site
#
# # Continue with other assertions
# end
#
# private
#
# module CustomDsl
# def browses_site
# get "/products/all"
# assert_response :success
# assert_select 'h1', 'Products'
# end
# end
#
# def login(user)
# open_session do |sess|
# sess.extend(CustomDsl)
# u = users(user)
# sess.https!
# sess.post "/login", params: { username: u.username, password: u.password }
# assert_equal '/welcome', sess.path
# sess.https!(false)
# end
# end
# end
#
# See the {request helpers documentation}[rdoc-ref:ActionDispatch::Integration::RequestHelpers] for help on how to
# use +get+, etc.
#
# === Changing the request encoding
#
# You can also test your JSON API easily by setting what the request should
# be encoded as:
#
# require "test_helper"
#
# class ApiTest < ActionDispatch::IntegrationTest
# test "creates articles" do
# assert_difference -> { Article.count } do
# post articles_path, params: { article: { title: "Ahoy!" } }, as: :json
# end
#
# assert_response :success
# assert_equal({ id: Article.last.id, title: "Ahoy!" }, response.parsed_body)
# end
# end
#
# The +as+ option passes an "application/json" Accept header (thereby setting
# the request format to JSON unless overridden), sets the content type to
# "application/json" and encodes the parameters as JSON.
#
# Calling +parsed_body+ on the response parses the response body based on the
# last response MIME type.
#
# Out of the box, only <tt>:json</tt> is supported. But for any custom MIME
# types you've registered, you can add your own encoders with:
#
# ActionDispatch::IntegrationTest.register_encoder :wibble,
# param_encoder: -> params { params.to_wibble },
# response_parser: -> body { body }
#
# Where +param_encoder+ defines how the params should be encoded and
# +response_parser+ defines how the response body should be parsed through
# +parsed_body+.
#
# Consult the Rails Testing Guide for more.
class IntegrationTest < ActiveSupport::TestCase
include TestProcess::FixtureFile
module UrlOptions
extend ActiveSupport::Concern
def url_options
integration_session.url_options
end
end
module Behavior
extend ActiveSupport::Concern
include Integration::Runner
include ActionController::TemplateAssertions
included do
include ActionDispatch::Routing::UrlFor
include UrlOptions # don't let UrlFor override the url_options method
ActiveSupport.run_load_hooks(:action_dispatch_integration_test, self)
@@app = nil
end
module ClassMethods
def app
if defined?(@@app) && @@app
@@app
else
ActionDispatch.test_app
end
end
def app=(app)
@@app = app
end
def register_encoder(*args, **options)
RequestEncoder.register_encoder(*args, **options)
end
end
def app
super || self.class.app
end
def document_root_element
html_document.root
end
end
include Behavior
end
end
Revert "Revert "send with **options only when options is not empty""
This reverts commit 2f141ab09ea0509895c5426ae3010819d2b4fc48.
Ruby 2.6 and 2.5 do not work without this...
# frozen_string_literal: true
require "stringio"
require "uri"
require "active_support/core_ext/kernel/singleton_class"
require "rack/test"
require "minitest"
require "action_dispatch/testing/request_encoder"
module ActionDispatch
module Integration #:nodoc:
module RequestHelpers
# Performs a GET request with the given parameters. See ActionDispatch::Integration::Session#process
# for more details.
def get(path, **args)
process(:get, path, **args)
end
# Performs a POST request with the given parameters. See ActionDispatch::Integration::Session#process
# for more details.
def post(path, **args)
process(:post, path, **args)
end
# Performs a PATCH request with the given parameters. See ActionDispatch::Integration::Session#process
# for more details.
def patch(path, **args)
process(:patch, path, **args)
end
# Performs a PUT request with the given parameters. See ActionDispatch::Integration::Session#process
# for more details.
def put(path, **args)
process(:put, path, **args)
end
# Performs a DELETE request with the given parameters. See ActionDispatch::Integration::Session#process
# for more details.
def delete(path, **args)
process(:delete, path, **args)
end
# Performs a HEAD request with the given parameters. See ActionDispatch::Integration::Session#process
# for more details.
def head(path, *args)
process(:head, path, *args)
end
# Follow a single redirect response. If the last response was not a
# redirect, an exception will be raised. Otherwise, the redirect is
# performed on the location header. If the redirection is a 307 redirect,
# the same HTTP verb will be used when redirecting, otherwise a GET request
# will be performed. Any arguments are passed to the
# underlying request.
def follow_redirect!(**args)
raise "not a redirect! #{status} #{status_message}" unless redirect?
method = response.status == 307 ? request.method.downcase : :get
public_send(method, response.location, **args)
status
end
end
# An instance of this class represents a set of requests and responses
# performed sequentially by a test process. Because you can instantiate
# multiple sessions and run them side-by-side, you can also mimic (to some
# limited extent) multiple simultaneous users interacting with your system.
#
# Typically, you will instantiate a new session using
# IntegrationTest#open_session, rather than instantiating
# Integration::Session directly.
class Session
DEFAULT_HOST = "www.example.com"
include Minitest::Assertions
include TestProcess, RequestHelpers, Assertions
%w( status status_message headers body redirect? ).each do |method|
delegate method, to: :response, allow_nil: true
end
%w( path ).each do |method|
delegate method, to: :request, allow_nil: true
end
# The hostname used in the last request.
def host
@host || DEFAULT_HOST
end
attr_writer :host
# The remote_addr used in the last request.
attr_accessor :remote_addr
# The Accept header to send.
attr_accessor :accept
# A map of the cookies returned by the last response, and which will be
# sent with the next request.
def cookies
_mock_session.cookie_jar
end
# A reference to the controller instance used by the last request.
attr_reader :controller
# A reference to the request instance used by the last request.
attr_reader :request
# A reference to the response instance used by the last request.
attr_reader :response
# A running counter of the number of requests processed.
attr_accessor :request_count
include ActionDispatch::Routing::UrlFor
# Create and initialize a new Session instance.
def initialize(app)
super()
@app = app
reset!
end
def url_options
@url_options ||= default_url_options.dup.tap do |url_options|
url_options.reverse_merge!(controller.url_options) if controller
if @app.respond_to?(:routes)
url_options.reverse_merge!(@app.routes.default_url_options)
end
url_options.reverse_merge!(host: host, protocol: https? ? "https" : "http")
end
end
# Resets the instance. This can be used to reset the state information
# in an existing session instance, so it can be used from a clean-slate
# condition.
#
# session.reset!
def reset!
@https = false
@controller = @request = @response = nil
@_mock_session = nil
@request_count = 0
@url_options = nil
self.host = DEFAULT_HOST
self.remote_addr = "127.0.0.1"
self.accept = "text/xml,application/xml,application/xhtml+xml," \
"text/html;q=0.9,text/plain;q=0.8,image/png," \
"*/*;q=0.5"
unless defined? @named_routes_configured
# the helpers are made protected by default--we make them public for
# easier access during testing and troubleshooting.
@named_routes_configured = true
end
end
# Specify whether or not the session should mimic a secure HTTPS request.
#
# session.https!
# session.https!(false)
def https!(flag = true)
@https = flag
end
# Returns +true+ if the session is mimicking a secure HTTPS request.
#
# if session.https?
# ...
# end
def https?
@https
end
# Performs the actual request.
#
# - +method+: The HTTP method (GET, POST, PATCH, PUT, DELETE, HEAD, OPTIONS)
# as a symbol.
# - +path+: The URI (as a String) on which you want to perform the
# request.
# - +params+: The HTTP parameters that you want to pass. This may
# be +nil+,
# a Hash, or a String that is appropriately encoded
# (<tt>application/x-www-form-urlencoded</tt> or
# <tt>multipart/form-data</tt>).
# - +headers+: Additional headers to pass, as a Hash. The headers will be
# merged into the Rack env hash.
# - +env+: Additional env to pass, as a Hash. The headers will be
# merged into the Rack env hash.
# - +xhr+: Set to `true` if you want to make and Ajax request.
# Adds request headers characteristic of XMLHttpRequest e.g. HTTP_X_REQUESTED_WITH.
# The headers will be merged into the Rack env hash.
# - +as+: Used for encoding the request with different content type.
# Supports `:json` by default and will set the appropriate request headers.
# The headers will be merged into the Rack env hash.
#
# This method is rarely used directly. Use +#get+, +#post+, or other standard
# HTTP methods in integration tests. +#process+ is only required when using a
# request method that doesn't have a method defined in the integration tests.
#
# This method returns the response status, after performing the request.
# Furthermore, if this method was called from an ActionDispatch::IntegrationTest object,
# then that object's <tt>@response</tt> instance variable will point to a Response object
# which one can use to inspect the details of the response.
#
# Example:
# process :get, '/author', params: { since: 201501011400 }
def process(method, path, params: nil, headers: nil, env: nil, xhr: false, as: nil)
request_encoder = RequestEncoder.encoder(as)
headers ||= {}
if method == :get && as == :json && params
headers["X-Http-Method-Override"] = "GET"
method = :post
end
if %r{://}.match?(path)
path = build_expanded_path(path) do |location|
https! URI::HTTPS === location if location.scheme
if url_host = location.host
default = Rack::Request::DEFAULT_PORTS[location.scheme]
url_host += ":#{location.port}" if default != location.port
host! url_host
end
end
end
hostname, port = host.split(":")
request_env = {
:method => method,
:params => request_encoder.encode_params(params),
"SERVER_NAME" => hostname,
"SERVER_PORT" => port || (https? ? "443" : "80"),
"HTTPS" => https? ? "on" : "off",
"rack.url_scheme" => https? ? "https" : "http",
"REQUEST_URI" => path,
"HTTP_HOST" => host,
"REMOTE_ADDR" => remote_addr,
"CONTENT_TYPE" => request_encoder.content_type,
"HTTP_ACCEPT" => request_encoder.accept_header || accept
}
wrapped_headers = Http::Headers.from_hash({})
wrapped_headers.merge!(headers) if headers
if xhr
wrapped_headers["HTTP_X_REQUESTED_WITH"] = "XMLHttpRequest"
wrapped_headers["HTTP_ACCEPT"] ||= [Mime[:js], Mime[:html], Mime[:xml], "text/xml", "*/*"].join(", ")
end
# This modifies the passed request_env directly.
if wrapped_headers.present?
Http::Headers.from_hash(request_env).merge!(wrapped_headers)
end
if env.present?
Http::Headers.from_hash(request_env).merge!(env)
end
session = Rack::Test::Session.new(_mock_session)
# NOTE: rack-test v0.5 doesn't build a default uri correctly
# Make sure requested path is always a full URI.
session.request(build_full_uri(path, request_env), request_env)
@request_count += 1
@request = ActionDispatch::Request.new(session.last_request.env)
response = _mock_session.last_response
@response = ActionDispatch::TestResponse.from_response(response)
@response.request = @request
@html_document = nil
@url_options = nil
@controller = @request.controller_instance
response.status
end
# Set the host name to use in the next request.
#
# session.host! "www.example.com"
alias :host! :host=
private
def _mock_session
@_mock_session ||= Rack::MockSession.new(@app, host)
end
def build_full_uri(path, env)
"#{env['rack.url_scheme']}://#{env['SERVER_NAME']}:#{env['SERVER_PORT']}#{path}"
end
def build_expanded_path(path)
location = URI.parse(path)
yield location if block_given?
path = location.path
location.query ? "#{path}?#{location.query}" : path
end
end
module Runner
include ActionDispatch::Assertions
APP_SESSIONS = {}
attr_reader :app
def initialize(*args, &blk)
super(*args, &blk)
@integration_session = nil
end
def before_setup # :nodoc:
@app = nil
super
end
def integration_session
@integration_session ||= create_session(app)
end
# Reset the current session. This is useful for testing multiple sessions
# in a single test case.
def reset!
@integration_session = create_session(app)
end
def create_session(app)
klass = APP_SESSIONS[app] ||= Class.new(Integration::Session) {
# If the app is a Rails app, make url_helpers available on the session.
# This makes app.url_for and app.foo_path available in the console.
if app.respond_to?(:routes) && app.routes.is_a?(ActionDispatch::Routing::RouteSet)
include app.routes.url_helpers
include app.routes.mounted_helpers
end
}
klass.new(app)
end
def remove! # :nodoc:
@integration_session = nil
end
%w(get post patch put head delete cookies assigns follow_redirect!).each do |method|
define_method(method) do |*args, **options|
# reset the html_document variable, except for cookies/assigns calls
unless method == "cookies" || method == "assigns"
@html_document = nil
end
result = if options.any?
integration_session.__send__(method, *args, **options)
else
integration_session.__send__(method, *args)
end
copy_session_variables!
result
end
end
# Open a new session instance. If a block is given, the new session is
# yielded to the block before being returned.
#
# session = open_session do |sess|
# sess.extend(CustomAssertions)
# end
#
# By default, a single session is automatically created for you, but you
# can use this method to open multiple sessions that ought to be tested
# simultaneously.
def open_session
dup.tap do |session|
session.reset!
yield session if block_given?
end
end
# Copy the instance variables from the current session instance into the
# test instance.
def copy_session_variables! #:nodoc:
@controller = @integration_session.controller
@response = @integration_session.response
@request = @integration_session.request
end
def default_url_options
integration_session.default_url_options
end
def default_url_options=(options)
integration_session.default_url_options = options
end
private
def respond_to_missing?(method, _)
integration_session.respond_to?(method) || super
end
# Delegate unhandled messages to the current session instance.
def method_missing(method, *args, &block)
if integration_session.respond_to?(method)
integration_session.public_send(method, *args, &block).tap do
copy_session_variables!
end
else
super
end
end
end
end
# An integration test spans multiple controllers and actions,
# tying them all together to ensure they work together as expected. It tests
# more completely than either unit or functional tests do, exercising the
# entire stack, from the dispatcher to the database.
#
# At its simplest, you simply extend <tt>IntegrationTest</tt> and write your tests
# using the get/post methods:
#
# require "test_helper"
#
# class ExampleTest < ActionDispatch::IntegrationTest
# fixtures :people
#
# def test_login
# # get the login page
# get "/login"
# assert_equal 200, status
#
# # post the login and follow through to the home page
# post "/login", params: { username: people(:jamis).username,
# password: people(:jamis).password }
# follow_redirect!
# assert_equal 200, status
# assert_equal "/home", path
# end
# end
#
# However, you can also have multiple session instances open per test, and
# even extend those instances with assertions and methods to create a very
# powerful testing DSL that is specific for your application. You can even
# reference any named routes you happen to have defined.
#
# require "test_helper"
#
# class AdvancedTest < ActionDispatch::IntegrationTest
# fixtures :people, :rooms
#
# def test_login_and_speak
# jamis, david = login(:jamis), login(:david)
# room = rooms(:office)
#
# jamis.enter(room)
# jamis.speak(room, "anybody home?")
#
# david.enter(room)
# david.speak(room, "hello!")
# end
#
# private
#
# module CustomAssertions
# def enter(room)
# # reference a named route, for maximum internal consistency!
# get(room_url(id: room.id))
# assert(...)
# ...
# end
#
# def speak(room, message)
# post "/say/#{room.id}", xhr: true, params: { message: message }
# assert(...)
# ...
# end
# end
#
# def login(who)
# open_session do |sess|
# sess.extend(CustomAssertions)
# who = people(who)
# sess.post "/login", params: { username: who.username,
# password: who.password }
# assert(...)
# end
# end
# end
#
# Another longer example would be:
#
# A simple integration test that exercises multiple controllers:
#
# require 'test_helper'
#
# class UserFlowsTest < ActionDispatch::IntegrationTest
# test "login and browse site" do
# # login via https
# https!
# get "/login"
# assert_response :success
#
# post "/login", params: { username: users(:david).username, password: users(:david).password }
# follow_redirect!
# assert_equal '/welcome', path
# assert_equal 'Welcome david!', flash[:notice]
#
# https!(false)
# get "/articles/all"
# assert_response :success
# assert_select 'h1', 'Articles'
# end
# end
#
# As you can see the integration test involves multiple controllers and
# exercises the entire stack from database to dispatcher. In addition you can
# have multiple session instances open simultaneously in a test and extend
# those instances with assertion methods to create a very powerful testing
# DSL (domain-specific language) just for your application.
#
# Here's an example of multiple sessions and custom DSL in an integration test
#
# require 'test_helper'
#
# class UserFlowsTest < ActionDispatch::IntegrationTest
# test "login and browse site" do
# # User david logs in
# david = login(:david)
# # User guest logs in
# guest = login(:guest)
#
# # Both are now available in different sessions
# assert_equal 'Welcome david!', david.flash[:notice]
# assert_equal 'Welcome guest!', guest.flash[:notice]
#
# # User david can browse site
# david.browses_site
# # User guest can browse site as well
# guest.browses_site
#
# # Continue with other assertions
# end
#
# private
#
# module CustomDsl
# def browses_site
# get "/products/all"
# assert_response :success
# assert_select 'h1', 'Products'
# end
# end
#
# def login(user)
# open_session do |sess|
# sess.extend(CustomDsl)
# u = users(user)
# sess.https!
# sess.post "/login", params: { username: u.username, password: u.password }
# assert_equal '/welcome', sess.path
# sess.https!(false)
# end
# end
# end
#
# See the {request helpers documentation}[rdoc-ref:ActionDispatch::Integration::RequestHelpers] for help on how to
# use +get+, etc.
#
# === Changing the request encoding
#
# You can also test your JSON API easily by setting what the request should
# be encoded as:
#
# require "test_helper"
#
# class ApiTest < ActionDispatch::IntegrationTest
# test "creates articles" do
# assert_difference -> { Article.count } do
# post articles_path, params: { article: { title: "Ahoy!" } }, as: :json
# end
#
# assert_response :success
# assert_equal({ id: Article.last.id, title: "Ahoy!" }, response.parsed_body)
# end
# end
#
# The +as+ option passes an "application/json" Accept header (thereby setting
# the request format to JSON unless overridden), sets the content type to
# "application/json" and encodes the parameters as JSON.
#
# Calling +parsed_body+ on the response parses the response body based on the
# last response MIME type.
#
# Out of the box, only <tt>:json</tt> is supported. But for any custom MIME
# types you've registered, you can add your own encoders with:
#
# ActionDispatch::IntegrationTest.register_encoder :wibble,
# param_encoder: -> params { params.to_wibble },
# response_parser: -> body { body }
#
# Where +param_encoder+ defines how the params should be encoded and
# +response_parser+ defines how the response body should be parsed through
# +parsed_body+.
#
# Consult the Rails Testing Guide for more.
class IntegrationTest < ActiveSupport::TestCase
include TestProcess::FixtureFile
module UrlOptions
extend ActiveSupport::Concern
def url_options
integration_session.url_options
end
end
module Behavior
extend ActiveSupport::Concern
include Integration::Runner
include ActionController::TemplateAssertions
included do
include ActionDispatch::Routing::UrlFor
include UrlOptions # don't let UrlFor override the url_options method
ActiveSupport.run_load_hooks(:action_dispatch_integration_test, self)
@@app = nil
end
module ClassMethods
def app
if defined?(@@app) && @@app
@@app
else
ActionDispatch.test_app
end
end
def app=(app)
@@app = app
end
def register_encoder(*args, **options)
RequestEncoder.register_encoder(*args, **options)
end
end
def app
super || self.class.app
end
def document_root_element
html_document.root
end
end
include Behavior
end
end
|
# = test_yars_basic_query.rb
#
# Test Unit of Yars Adapter query method
#
# == Project
#
# * ActiveRDF
# <http://m3pe.org/activerdf/>
#
# == Authors
#
# * Eyal Oren <first dot last at deri dot org>
# * Renaud Delbru <first dot last at deri dot org>
#
# == Copyright
#
# (c) 2005-2006 by Eyal Oren and Renaud Delbru - All Rights Reserved
#
require 'test/unit'
require 'active_rdf'
require 'node_factory'
require 'test/adapter/yars/manage_yars_db'
class TestYarsAdapterBasicQuery < Test::Unit::TestCase
@context = 'test_query'
def setup
setup_yars(@context)
params = { :adapter => :yars, :host => DB_HOST, :port => 8080, :context => @context }
NodeFactory.connection(params)
end
def teardown
delete_yars(@context)
end
def test_A_query_all
qs = query_test_A
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
assert_equal(46, results.size)
result = results.first
assert_instance_of(Array, result)
assert_equal(3, result.size)
end
def test_B_query_subjects
qs = query_test_B
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
results.uniq!
assert_equal(11, results.size)
result = results.first
assert_kind_of(Node, result)
end
def test_C_query_predicates
qs = query_test_C
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
results.uniq!
assert_equal(9, results.size)
result = results.first
assert_kind_of(Node, result)
end
def test_D_query_objects
qs = query_test_D
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
results.uniq!
assert_equal(31, results.size)
result = results.first
assert_kind_of(Node, result)
end
def test_E_query_subject_by_predicate_and_literal_object
qs = query_test_E
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
assert_equal(1, results.size)
assert_equal('http://m3pe.org/activerdf/test/test_set_Instance_7', results.first.uri)
end
def test_F_query_subject_by_predicate_and_resource_object
qs = query_test_F
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
assert_equal(2, results.size)
for result in results
assert_match(/http:\/\/m3pe\.org\/activerdf\/test\/test_set_Instance_(7|10)/, result.uri)
end
end
def test_G_query_predicate_by_subject_and_literal_object
qs = query_test_G
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
assert_equal(1, results.size)
assert_kind_of(Resource, results.first)
assert_equal('http://m3pe.org/activerdf/test/name', results.first.uri)
end
def test_H_query_predicate_by_subject_and_resource_object
qs = query_test_H
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
assert_equal(1, results.size)
assert_kind_of(Resource, results.first)
assert_equal('http://m3pe.org/activerdf/test/knows', results.first.uri)
end
def test_I_query_literal_object_by_subject_and_predicate
qs = query_test_I
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
assert_equal(1, results.size)
assert_kind_of(Literal, results.first)
assert_equal('renaud', results.first.value)
end
def test_J_query_resource_object_by_subject_and_predicate
qs = query_test_J
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
assert_equal(1, results.size)
assert_kind_of(Resource, results.first)
assert_equal('http://m3pe.org/activerdf/test/test_set_Instance_9', results.first.uri)
end
private
def query_test_A
qe = QueryEngine.new
qe.add_binding_variables(:s, :p, :o)
qe.add_condition(:s, :p, :o)
return qe.generate
end
def query_test_B
qe = QueryEngine.new
qe.add_binding_variables(:s)
qe.add_condition(:s, :p, :o)
return qe.generate
end
def query_test_C
qe = QueryEngine.new
qe.add_binding_variables(:p)
qe.add_condition(:s, :p, :o)
return qe.generate
end
def query_test_D
qe = QueryEngine.new
qe.add_binding_variables(:o)
qe.add_condition(:s, :p, :o)
return qe.generate
end
def query_test_E
predicate = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/age')
object = NodeFactory.create_literal("23", 'xsd:integer')
qe = QueryEngine.new
qe.add_binding_variables(:s)
qe.add_condition(:s, predicate, object)
return qe.generate
end
def query_test_F
predicate = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/knows')
object = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/test_set_Instance_9')
qe = QueryEngine.new
qe.add_binding_variables(:s)
qe.add_condition(:s, predicate, object)
return qe.generate
end
def query_test_G
subject = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/test_set_Instance_7')
object = NodeFactory.create_literal('renaud', 'xsd:string')
qe = QueryEngine.new
qe.add_binding_variables(:p)
qe.add_condition(subject, :p, object)
return qe.generate
end
def query_test_H
subject = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/test_set_Instance_7')
object = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/test_set_Instance_9')
qe = QueryEngine.new
qe.add_binding_variables(:p)
qe.add_condition(subject, :p, object)
return qe.generate
end
def query_test_I
subject = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/test_set_Instance_7')
predicate = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/name')
qe = QueryEngine.new
qe.add_binding_variables(:o)
qe.add_condition(subject, predicate, :o)
return qe.generate
end
def query_test_J
subject = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/test_set_Instance_7')
predicate = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/knows')
qe = QueryEngine.new
qe.add_binding_variables(:o)
qe.add_condition(subject, predicate, :o)
return qe.generate
end
end
updated tests to new API
# = test_yars_basic_query.rb
#
# Test Unit of Yars Adapter query method
#
# == Project
#
# * ActiveRDF
# <http://m3pe.org/activerdf/>
#
# == Authors
#
# * Eyal Oren <first dot last at deri dot org>
# * Renaud Delbru <first dot last at deri dot org>
#
# == Copyright
#
# (c) 2005-2006 by Eyal Oren and Renaud Delbru - All Rights Reserved
#
require 'test/unit'
require 'active_rdf'
require 'node_factory'
require 'test/adapter/yars/manage_yars_db'
class TestYarsAdapterBasicQuery < Test::Unit::TestCase
@context = 'test_query'
def setup
setup_yars(@context)
params = { :adapter => :yars, :host => DB_HOST, :port => 8080, :context => @context }
NodeFactory.connection(params)
end
def teardown
delete_yars(@context)
end
def test_A_query_all
qs = query_test_A
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
assert_equal(46, results.size)
result = results.first
assert_instance_of(Array, result)
assert_equal(3, result.size)
end
def test_B_query_subjects
qs = query_test_B
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
results.uniq!
assert_equal(11, results.size)
result = results.first
assert_kind_of(Node, result)
end
def test_C_query_predicates
qs = query_test_C
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
results.uniq!
assert_equal(9, results.size)
result = results.first
assert_kind_of(Node, result)
end
def test_D_query_objects
qs = query_test_D
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
results.uniq!
assert_equal(25, results.size)
result = results.first
assert_kind_of(Node, result)
end
def test_E_query_subject_by_predicate_and_literal_object
qs = query_test_E
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
assert_equal(1, results.size)
assert_equal('http://m3pe.org/activerdf/test/test_set_Instance_7', results.first.uri)
end
def test_F_query_subject_by_predicate_and_resource_object
qs = query_test_F
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
assert_equal(2, results.size)
for result in results
assert_match(/http:\/\/m3pe\.org\/activerdf\/test\/test_set_Instance_(7|10)/, result.uri)
end
end
def test_G_query_predicate_by_subject_and_literal_object
qs = query_test_G
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
assert_equal(1, results.size)
assert_kind_of(Resource, results.first)
assert_equal('http://m3pe.org/activerdf/test/name', results.first.uri)
end
def test_H_query_predicate_by_subject_and_resource_object
qs = query_test_H
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
assert_equal(1, results.size)
assert_kind_of(Resource, results.first)
assert_equal('http://m3pe.org/activerdf/test/knows', results.first.uri)
end
def test_I_query_literal_object_by_subject_and_predicate
qs = query_test_I
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
assert_equal(1, results.size)
assert_kind_of(Literal, results.first)
assert_equal('renaud', results.first.value)
end
def test_J_query_resource_object_by_subject_and_predicate
qs = query_test_J
results = NodeFactory.connection.query(qs)
assert_not_nil(results)
assert_instance_of(Array, results)
assert_equal(1, results.size)
assert_kind_of(Resource, results.first)
assert_equal('http://m3pe.org/activerdf/test/test_set_Instance_9', results.first.uri)
end
private
def query_test_A
qe = QueryEngine.new
qe.add_binding_variables(:s, :p, :o)
qe.add_condition(:s, :p, :o)
return qe.generate
end
def query_test_B
qe = QueryEngine.new
qe.add_binding_variables(:s)
qe.add_condition(:s, :p, :o)
return qe.generate
end
def query_test_C
qe = QueryEngine.new
qe.add_binding_variables(:p)
qe.add_condition(:s, :p, :o)
return qe.generate
end
def query_test_D
qe = QueryEngine.new
qe.add_binding_variables(:o)
qe.add_condition(:s, :p, :o)
return qe.generate
end
def query_test_E
predicate = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/age')
object = NodeFactory.create_literal("23", 'xsd:integer')
qe = QueryEngine.new
qe.add_binding_variables(:s)
qe.add_condition(:s, predicate, object)
return qe.generate
end
def query_test_F
predicate = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/knows')
object = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/test_set_Instance_9')
qe = QueryEngine.new
qe.add_binding_variables(:s)
qe.add_condition(:s, predicate, object)
return qe.generate
end
def query_test_G
subject = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/test_set_Instance_7')
object = NodeFactory.create_literal('renaud', 'xsd:string')
qe = QueryEngine.new
qe.add_binding_variables(:p)
qe.add_condition(subject, :p, object)
return qe.generate
end
def query_test_H
subject = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/test_set_Instance_7')
object = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/test_set_Instance_9')
qe = QueryEngine.new
qe.add_binding_variables(:p)
qe.add_condition(subject, :p, object)
return qe.generate
end
def query_test_I
subject = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/test_set_Instance_7')
predicate = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/name')
qe = QueryEngine.new
qe.add_binding_variables(:o)
qe.add_condition(subject, predicate, :o)
return qe.generate
end
def query_test_J
subject = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/test_set_Instance_7')
predicate = NodeFactory.create_identified_resource('http://m3pe.org/activerdf/test/knows')
qe = QueryEngine.new
qe.add_binding_variables(:o)
qe.add_condition(subject, predicate, :o)
return qe.generate
end
end
|
module ActiveSupport
# Backtraces often include many lines that are not relevant for the context
# under review. This makes it hard to find the signal amongst the backtrace
# noise, and adds debugging time. With a BacktraceCleaner, filters and
# silencers are used to remove the noisy lines, so that only the most relevant
# lines remain.
#
# Filters are used to modify lines of data, while silencers are used to remove
# lines entirely. The typical filter use case is to remove lengthy path
# information from the start of each line, and view file paths relevant to the
# app directory instead of the file system root. The typical silencer use case
# is to exclude the output of a noisy library from the backtrace, so that you
# can focus on the rest.
#
# bc = BacktraceCleaner.new
# bc.add_filter { |line| line.gsub(Rails.root, '') } # strip the Rails.root prefix
# bc.add_silencer { |line| line =~ /mongrel|rubygems/ } # skip any lines from mongrel or rubygems
# bc.clean(exception.backtrace) # perform the cleanup
#
# To reconfigure an existing BacktraceCleaner (like the default one in Rails)
# and show as much data as possible, you can always call
# <tt>BacktraceCleaner#remove_silencers!</tt>, which will restore the
# backtrace to a pristine state. If you need to reconfigure an existing
# BacktraceCleaner so that it does not filter or modify the paths of any lines
# of the backtrace, you can call <tt>BacktraceCleaner#remove_filters!</tt>
# These two methods will give you a completely untouched backtrace.
#
# Inspired by the Quiet Backtrace gem by Thoughtbot.
class BacktraceCleaner
def initialize
@filters, @silencers = [], []
end
# Returns the backtrace after all filters and silencers have been run
# against it. Filters run first, then silencers.
def clean(backtrace, kind = :silent)
filtered = filter_backtrace(backtrace)
case kind
when :silent
silence(filtered)
when :noise
noise(filtered)
else
filtered
end
end
alias :filter :clean
# Adds a filter from the block provided. Each line in the backtrace will be
# mapped against this filter.
#
# # Will turn "/my/rails/root/app/models/person.rb" into "/app/models/person.rb"
# backtrace_cleaner.add_filter { |line| line.gsub(Rails.root, '') }
def add_filter(&block)
@filters << block
end
# Adds a silencer from the block provided. If the silencer returns +true+
# for a given line, it will be excluded from the clean backtrace.
#
# # Will reject all lines that include the word "mongrel", like "/gems/mongrel/server.rb" or "/app/my_mongrel_server/rb"
# backtrace_cleaner.add_silencer { |line| line =~ /mongrel/ }
def add_silencer(&block)
@silencers << block
end
# Will remove all silencers, but leave in the filters. This is useful if
# your context of debugging suddenly expands as you suspect a bug in one of
# the libraries you use.
def remove_silencers!
@silencers = []
end
# Removes all filters, but leaves in silencers. Useful if you suddenly
# need to see entire filepaths in the backtrace that you had already
# filtered out.
def remove_filters!
@filters = []
end
private
def filter_backtrace(backtrace)
@filters.each do |f|
backtrace = backtrace.map { |line| f.call(line) }
end
backtrace
end
def silence(backtrace)
@silencers.each do |s|
backtrace = backtrace.reject { |line| s.call(line) }
end
backtrace
end
def noise(backtrace)
backtrace - silence(backtrace)
end
end
end
Keep method's description/documentation consistent [ci skip]
module ActiveSupport
# Backtraces often include many lines that are not relevant for the context
# under review. This makes it hard to find the signal amongst the backtrace
# noise, and adds debugging time. With a BacktraceCleaner, filters and
# silencers are used to remove the noisy lines, so that only the most relevant
# lines remain.
#
# Filters are used to modify lines of data, while silencers are used to remove
# lines entirely. The typical filter use case is to remove lengthy path
# information from the start of each line, and view file paths relevant to the
# app directory instead of the file system root. The typical silencer use case
# is to exclude the output of a noisy library from the backtrace, so that you
# can focus on the rest.
#
# bc = BacktraceCleaner.new
# bc.add_filter { |line| line.gsub(Rails.root, '') } # strip the Rails.root prefix
# bc.add_silencer { |line| line =~ /mongrel|rubygems/ } # skip any lines from mongrel or rubygems
# bc.clean(exception.backtrace) # perform the cleanup
#
# To reconfigure an existing BacktraceCleaner (like the default one in Rails)
# and show as much data as possible, you can always call
# <tt>BacktraceCleaner#remove_silencers!</tt>, which will restore the
# backtrace to a pristine state. If you need to reconfigure an existing
# BacktraceCleaner so that it does not filter or modify the paths of any lines
# of the backtrace, you can call <tt>BacktraceCleaner#remove_filters!</tt>
# These two methods will give you a completely untouched backtrace.
#
# Inspired by the Quiet Backtrace gem by Thoughtbot.
class BacktraceCleaner
def initialize
@filters, @silencers = [], []
end
# Returns the backtrace after all filters and silencers have been run
# against it. Filters run first, then silencers.
def clean(backtrace, kind = :silent)
filtered = filter_backtrace(backtrace)
case kind
when :silent
silence(filtered)
when :noise
noise(filtered)
else
filtered
end
end
alias :filter :clean
# Adds a filter from the block provided. Each line in the backtrace will be
# mapped against this filter.
#
# # Will turn "/my/rails/root/app/models/person.rb" into "/app/models/person.rb"
# backtrace_cleaner.add_filter { |line| line.gsub(Rails.root, '') }
def add_filter(&block)
@filters << block
end
# Adds a silencer from the block provided. If the silencer returns +true+
# for a given line, it will be excluded from the clean backtrace.
#
# # Will reject all lines that include the word "mongrel", like "/gems/mongrel/server.rb" or "/app/my_mongrel_server/rb"
# backtrace_cleaner.add_silencer { |line| line =~ /mongrel/ }
def add_silencer(&block)
@silencers << block
end
# Removes all silencers, but leaves in the filters. Useful if your
# context of debugging suddenly expands as you suspect a bug in one of
# the libraries you use.
def remove_silencers!
@silencers = []
end
# Removes all filters, but leaves in the silencers. Useful if you suddenly
# need to see entire filepaths in the backtrace that you had already
# filtered out.
def remove_filters!
@filters = []
end
private
def filter_backtrace(backtrace)
@filters.each do |f|
backtrace = backtrace.map { |line| f.call(line) }
end
backtrace
end
def silence(backtrace)
@silencers.each do |s|
backtrace = backtrace.reject { |line| s.call(line) }
end
backtrace
end
def noise(backtrace)
backtrace - silence(backtrace)
end
end
end
|
lib = File.expand_path('../lib/', __FILE__)
$:.unshift lib unless $:.include?(lib)
require 'license_finder/platform'
require 'license_finder/version'
Gem::Specification.new do |s|
s.required_ruby_version = '>= 1.9.3'
s.name = "license_finder"
s.version = LicenseFinder::VERSION
s.authors = [
"Jacob Maine",
"Matthew Kane Parker",
"Ian Lesperance",
"David Edwards",
"Paul Meskers",
"Brent Wheeldon",
"Trevor John",
"David Tengdin",
"William Ramsey",
"David Dening",
"Geoff Pleiss",
"Mike Chinigo",
"Mike Dalessio"
]
s.email = ["labs-commoncode@pivotal.io"]
s.homepage = "https://github.com/pivotal/LicenseFinder"
s.summary = "Audit the OSS licenses of your application's dependencies."
s.description = <<-DESCRIPTION
LicenseFinder works with your package managers to find
dependencies, detect the licenses of the packages in them, compare
those licenses against a user-defined whitelist, and give you an
actionable exception report.
DESCRIPTION
s.license = "MIT"
s.add_dependency "bundler"
s.add_dependency "thor"
s.add_dependency "httparty"
s.add_dependency "xml-simple"
s.add_dependency "rubyzip"
s.add_dependency "with_env"
s.add_development_dependency "capybara", "~> 2.0.0"
s.add_development_dependency "cocoapods", "0.34.0" if LicenseFinder::Platform.darwin?
s.add_development_dependency "fakefs", "~> 0.6.7"
s.add_development_dependency "pry"
s.add_development_dependency "rake"
s.add_development_dependency "rspec", "~> 3"
s.add_development_dependency "rspec-its"
s.add_development_dependency "webmock", "~> 1.13"
# to preserve ruby < 2.2.2 support.
s.add_development_dependency 'rack', ((RUBY_VERSION < '2.2.2') ? '1.6.0' : '> 1.6')
# temporary to preserve ruby 1.9.3 support.
s.add_development_dependency "mime-types", "< 3.0"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
end
Restrict `with_env` dependency to preserve ruby 1.9.3 support
lib = File.expand_path('../lib/', __FILE__)
$:.unshift lib unless $:.include?(lib)
require 'license_finder/platform'
require 'license_finder/version'
Gem::Specification.new do |s|
s.required_ruby_version = '>= 1.9.3'
s.name = "license_finder"
s.version = LicenseFinder::VERSION
s.authors = [
"Jacob Maine",
"Matthew Kane Parker",
"Ian Lesperance",
"David Edwards",
"Paul Meskers",
"Brent Wheeldon",
"Trevor John",
"David Tengdin",
"William Ramsey",
"David Dening",
"Geoff Pleiss",
"Mike Chinigo",
"Mike Dalessio"
]
s.email = ["labs-commoncode@pivotal.io"]
s.homepage = "https://github.com/pivotal/LicenseFinder"
s.summary = "Audit the OSS licenses of your application's dependencies."
s.description = <<-DESCRIPTION
LicenseFinder works with your package managers to find
dependencies, detect the licenses of the packages in them, compare
those licenses against a user-defined whitelist, and give you an
actionable exception report.
DESCRIPTION
s.license = "MIT"
s.add_dependency "bundler"
s.add_dependency "thor"
s.add_dependency "httparty"
s.add_dependency "xml-simple"
s.add_dependency "rubyzip"
# to preserve ruby 1.9.3 support
s.add_dependency 'with_env', ((RUBY_VERSION <= '1.9.3') ? '1.0.0' : '> 1.1')
s.add_development_dependency "capybara", "~> 2.0.0"
s.add_development_dependency "cocoapods", "0.34.0" if LicenseFinder::Platform.darwin?
s.add_development_dependency "fakefs", "~> 0.6.7"
s.add_development_dependency "pry"
s.add_development_dependency "rake"
s.add_development_dependency "rspec", "~> 3"
s.add_development_dependency "rspec-its"
s.add_development_dependency "webmock", "~> 1.13"
# to preserve ruby < 2.2.2 support.
s.add_development_dependency 'rack', ((RUBY_VERSION < '2.2.2') ? '1.6.0' : '> 1.6')
# temporary to preserve ruby 1.9.3 support.
s.add_development_dependency "mime-types", "< 3.0"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
end
|
# frozen_string_literal: true
version = File.read(File.expand_path('VERSION', __dir__)).strip
Gem::Specification.new do |s|
s.required_ruby_version = '>= 2.3.3'
s.name = 'license_finder'
s.version = version
s.authors = [
'Ryan Collins',
'Vikram Yadav',
'Daniil Kouznetsov',
'Andy Shen',
'Shane Lattanzio',
'Li Sheng Tai',
'Vlad vassilovski',
'Jacob Maine',
'Matthew Kane Parker',
'Ian Lesperance',
'David Edwards',
'Paul Meskers',
'Brent Wheeldon',
'Trevor John',
'David Tengdin',
'William Ramsey',
'David Dening',
'Geoff Pleiss',
'Mike Chinigo',
'Mike Dalessio'
]
s.email = ['labs-commoncode@pivotal.io']
s.homepage = 'https://github.com/pivotal-legacy/LicenseFinder'
s.summary = "Audit the OSS licenses of your application's dependencies."
s.description = <<-DESCRIPTION
LicenseFinder works with your package managers to find
dependencies, detect the licenses of the packages in them, compare
those licenses against a user-defined whitelist, and give you an
actionable exception report.
DESCRIPTION
s.license = 'MIT'
s.add_dependency 'bundler'
s.add_dependency 'rubyzip'
s.add_dependency 'thor'
s.add_dependency 'toml', '0.2.0'
s.add_dependency 'with_env', '1.1.0'
s.add_dependency 'xml-simple'
s.add_development_dependency 'addressable', '2.6.0'
s.add_development_dependency 'capybara', '~> 3.14.0'
s.add_development_dependency 'cocoapods', '>= 1.0.0' if RUBY_PLATFORM =~ /darwin/
s.add_development_dependency 'fakefs', '~> 0.19.2'
s.add_development_dependency 'mime-types', '3.2.2'
s.add_development_dependency 'pry'
s.add_development_dependency 'rake'
s.add_development_dependency 'rspec', '~> 3'
s.add_development_dependency 'rspec-its'
s.add_development_dependency 'rubocop', '~> 0.66.0'
s.add_development_dependency 'webmock', '~> 1.13'
# to preserve ruby < 2.2.2 support.
s.add_development_dependency 'rack', (RUBY_VERSION < '2.2.2' ? '1.6.0' : '> 1.6')
s.add_development_dependency 'rack-test', (RUBY_VERSION < '2.2.2' ? '0.7.0' : '> 0.7')
s.files = `git ls-files`.split("\n").reject { |f| f.start_with?('spec', 'features') }
s.executables = `git ls-files -- bin/*`.split("\n").map { |f| File.basename(f) }
end
Update capybara requirement from ~> 3.14.0 to ~> 3.15.0
Updates the requirements on [capybara](https://github.com/teamcapybara/capybara) to permit the latest version.
- [Release notes](https://github.com/teamcapybara/capybara/releases)
- [Changelog](https://github.com/teamcapybara/capybara/blob/master/History.md)
- [Commits](https://github.com/teamcapybara/capybara/compare/3.14.0...3.15.0)
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com>
# frozen_string_literal: true
version = File.read(File.expand_path('VERSION', __dir__)).strip
Gem::Specification.new do |s|
s.required_ruby_version = '>= 2.3.3'
s.name = 'license_finder'
s.version = version
s.authors = [
'Ryan Collins',
'Vikram Yadav',
'Daniil Kouznetsov',
'Andy Shen',
'Shane Lattanzio',
'Li Sheng Tai',
'Vlad vassilovski',
'Jacob Maine',
'Matthew Kane Parker',
'Ian Lesperance',
'David Edwards',
'Paul Meskers',
'Brent Wheeldon',
'Trevor John',
'David Tengdin',
'William Ramsey',
'David Dening',
'Geoff Pleiss',
'Mike Chinigo',
'Mike Dalessio'
]
s.email = ['labs-commoncode@pivotal.io']
s.homepage = 'https://github.com/pivotal-legacy/LicenseFinder'
s.summary = "Audit the OSS licenses of your application's dependencies."
s.description = <<-DESCRIPTION
LicenseFinder works with your package managers to find
dependencies, detect the licenses of the packages in them, compare
those licenses against a user-defined whitelist, and give you an
actionable exception report.
DESCRIPTION
s.license = 'MIT'
s.add_dependency 'bundler'
s.add_dependency 'rubyzip'
s.add_dependency 'thor'
s.add_dependency 'toml', '0.2.0'
s.add_dependency 'with_env', '1.1.0'
s.add_dependency 'xml-simple'
s.add_development_dependency 'addressable', '2.6.0'
s.add_development_dependency 'capybara', '~> 3.15.0'
s.add_development_dependency 'cocoapods', '>= 1.0.0' if RUBY_PLATFORM =~ /darwin/
s.add_development_dependency 'fakefs', '~> 0.19.2'
s.add_development_dependency 'mime-types', '3.2.2'
s.add_development_dependency 'pry'
s.add_development_dependency 'rake'
s.add_development_dependency 'rspec', '~> 3'
s.add_development_dependency 'rspec-its'
s.add_development_dependency 'rubocop', '~> 0.66.0'
s.add_development_dependency 'webmock', '~> 1.13'
# to preserve ruby < 2.2.2 support.
s.add_development_dependency 'rack', (RUBY_VERSION < '2.2.2' ? '1.6.0' : '> 1.6')
s.add_development_dependency 'rack-test', (RUBY_VERSION < '2.2.2' ? '0.7.0' : '> 0.7')
s.files = `git ls-files`.split("\n").reject { |f| f.start_with?('spec', 'features') }
s.executables = `git ls-files -- bin/*`.split("\n").map { |f| File.basename(f) }
end
|
module Brewbit
class DeviceSessionsController < ApplicationController
layout 'brewbit/layouts/devices'
before_action :authorize_action
# GET /sessions
def index
@device_sessions = @device.sessions.where(active: false).order( 'created_at DESC' )
end
# GET /sessions/1
def show
end
# GET /sessions/new
def new
@device_session = Defaults.build_device_session @device, brewbit_current_user.temperature_scale
end
# GET /sessions/1/edit
def edit
end
# POST /sessions
def create
begin
DeviceSession.transaction do
if brewbit_current_user.temperature_scale == 'C'
params[:device_session][:static_setpoint] = TemperatureConverter.celsius_to_fahrenheit params[:device_session][:static_setpoint]
end
# create the new session, but don't save it yet because we need to
# find and deactivate the old session first
@device_session = DeviceSession.new(device_session_params)
@device_session.active = true
# deactivate old session
old_session = @device.active_session_for @device_session.sensor_index
if old_session
old_session.active = false
old_session.save!
end
# save the new session
@device_session.save!
# DeviceService.send_session @device, @device_session
end
rescue ActiveRecord::RecordInvalid => invalid
@device_session = reset_device_session_on_error
flash[:error] = invalid.record.errors.full_messages.to_sentence
render action: 'new'
rescue => e
puts $!.inspect, $@
logger.debug e.inspect
@device_session = reset_device_session_on_error
flash[:error] = 'Session could not be sent to the device.'
render action: 'new'
else
redirect_to @device, notice: 'Device session was successfully sent.'
end
end
# DELETE /sessions/1
def destroy
@device_session.destroy
redirect_to device_sessions_path, notice: 'Device session was successfully destroyed.'
end
def stop_session
begin
DeviceSession.transaction do
empty_session = @device_session.clone
empty_session.output_settings = []
@device_session.active = false
@device_session.save!
DeviceService.send_session @device, empty_session
end
redirect_to @device, error: 'Device session was successfully destroyed.'
rescue => exception
logger.debug "--- #{exception.inspect}"
redirect_to @device, error: 'Session could not be sent to the device.'
end
end
private
def authorize_action
@device = Device.find(params[:device_id])
@active_session_output_info = @device.active_session_output_info
unless ['new', 'create', 'index'].include? action_name
@device_session = @device.sessions.find(params[:id])
@token_authenticated = action_name == 'show' && params[:token] && @device_session.access_token == params[:token]
end
user = brewbit_current_user
@user_authenticated = user && @device.user == user
unless @user_authenticated or @token_authenticated
flash[:error] = "You don't have access to that resource"
redirect_to '/'
end
end
# Only allow a trusted parameter "white list" through.
def device_session_params
params.require(:device_session).permit(
:name, :device_id, :sensor_index, :setpoint_type, :static_setpoint, :temp_profile_id,
output_settings_attributes: [:id, :output_index, :function, :cycle_delay, :_destroy])
end
def reset_device_session_on_error
dsp = device_session_params
dsp[:output_settings_attributes].each {|id, attrs| attrs.delete '_destroy' }
DeviceSession.new(dsp)
end
end
end
Undo accidental comment.
module Brewbit
class DeviceSessionsController < ApplicationController
layout 'brewbit/layouts/devices'
before_action :authorize_action
# GET /sessions
def index
@device_sessions = @device.sessions.where(active: false).order( 'created_at DESC' )
end
# GET /sessions/1
def show
end
# GET /sessions/new
def new
@device_session = Defaults.build_device_session @device, brewbit_current_user.temperature_scale
end
# GET /sessions/1/edit
def edit
end
# POST /sessions
def create
begin
DeviceSession.transaction do
if brewbit_current_user.temperature_scale == 'C'
params[:device_session][:static_setpoint] = TemperatureConverter.celsius_to_fahrenheit params[:device_session][:static_setpoint]
end
# create the new session, but don't save it yet because we need to
# find and deactivate the old session first
@device_session = DeviceSession.new(device_session_params)
@device_session.active = true
# deactivate old session
old_session = @device.active_session_for @device_session.sensor_index
if old_session
old_session.active = false
old_session.save!
end
# save the new session
@device_session.save!
DeviceService.send_session @device, @device_session
end
rescue ActiveRecord::RecordInvalid => invalid
@device_session = reset_device_session_on_error
flash[:error] = invalid.record.errors.full_messages.to_sentence
render action: 'new'
rescue => e
puts $!.inspect, $@
logger.debug e.inspect
@device_session = reset_device_session_on_error
flash[:error] = 'Session could not be sent to the device.'
render action: 'new'
else
redirect_to @device, notice: 'Device session was successfully sent.'
end
end
# DELETE /sessions/1
def destroy
@device_session.destroy
redirect_to device_sessions_path, notice: 'Device session was successfully destroyed.'
end
def stop_session
begin
DeviceSession.transaction do
empty_session = @device_session.clone
empty_session.output_settings = []
@device_session.active = false
@device_session.save!
DeviceService.send_session @device, empty_session
end
redirect_to @device, error: 'Device session was successfully destroyed.'
rescue => exception
logger.debug "--- #{exception.inspect}"
redirect_to @device, error: 'Session could not be sent to the device.'
end
end
private
def authorize_action
@device = Device.find(params[:device_id])
@active_session_output_info = @device.active_session_output_info
unless ['new', 'create', 'index'].include? action_name
@device_session = @device.sessions.find(params[:id])
@token_authenticated = action_name == 'show' && params[:token] && @device_session.access_token == params[:token]
end
user = brewbit_current_user
@user_authenticated = user && @device.user == user
unless @user_authenticated or @token_authenticated
flash[:error] = "You don't have access to that resource"
redirect_to '/'
end
end
# Only allow a trusted parameter "white list" through.
def device_session_params
params.require(:device_session).permit(
:name, :device_id, :sensor_index, :setpoint_type, :static_setpoint, :temp_profile_id,
output_settings_attributes: [:id, :output_index, :function, :cycle_delay, :_destroy])
end
def reset_device_session_on_error
dsp = device_session_params
dsp[:output_settings_attributes].each {|id, attrs| attrs.delete '_destroy' }
DeviceSession.new(dsp)
end
end
end
|
module Effective
module CrudController
extend ActiveSupport::Concern
included do
class << self
def effective_resource
@_effective_resource ||= Effective::Resource.new(controller_path)
end
def submits
effective_resource.submits
end
end
define_actions_from_routes
define_callbacks :resource_render, :resource_save, :resource_error
end
module ClassMethods
# Automatically respond to any action defined via the routes file
def define_actions_from_routes
resource = Effective::Resource.new(controller_path)
resource.member_actions.each { |action| member_action(action) }
resource.collection_actions.each { |action| collection_action(action) }
end
# https://github.com/rails/rails/blob/v5.1.4/actionpack/lib/abstract_controller/callbacks.rb
def before_render(*names, &blk)
_insert_callbacks(names, blk) { |name, options| set_callback(:resource_render, :before, name, options) }
end
def after_save(*names, &blk)
_insert_callbacks(names, blk) { |name, options| set_callback(:resource_save, :after, name, options) }
end
def after_error(*names, &blk)
_insert_callbacks(names, blk) { |name, options| set_callback(:resource_error, :after, name, options) }
end
# This controls the form submit options of effective_submit
# It also controls the redirect path for any actions
#
# Effective::Resource will populate this with all member_post_actions
# And you can control the details with this DSL:
#
# submit :approve, 'Save and Approve', unless: -> { approved? }, redirect: :show
#
# submit :toggle, 'Blacklist', if: -> { sync? }, class: 'btn btn-primary'
# submit :toggle, 'Whitelist', if: -> { !sync? }, class: 'btn btn-primary'
# submit :save, 'Save', success: -> { "#{self} was saved okay!" }
def submit(action, commit = nil, args = {})
raise 'expected args to be a Hash or false' unless args.kind_of?(Hash) || args == false
if commit == false
submits.delete_if { |commit, args| args[:action] == action }; return
end
if args == false
submits.delete(commit); return
end
if commit # Overwrite the default member action when given a custom commit
submits.delete_if { |commit, args| args[:default] && args[:action] == action }
end
if args.key?(:if) && args[:if].respond_to?(:call) == false
raise "expected if: to be callable. Try submit :approve, 'Save and Approve', if: -> { finished? }"
end
if args.key?(:unless) && args[:unless].respond_to?(:call) == false
raise "expected unless: to be callable. Try submit :approve, 'Save and Approve', unless: -> { declined? }"
end
redirect = args.delete(:redirect_to) || args.delete(:redirect) # Remove redirect_to keyword. use redirect.
args.merge!(action: action, redirect: redirect)
(submits[commit] ||= {}).merge!(args)
end
# page_title 'My Title', only: [:new]
def page_title(label = nil, opts = {}, &block)
raise 'expected a label or block' unless (label || block_given?)
instance_exec do
before_action(opts) do
@page_title ||= (block_given? ? instance_exec(&block) : label)
end
end
end
# resource_scope -> { current_user.things }
# resource_scope -> { Thing.active.where(user: current_user) }
# resource_scope do
# { user_id: current_user.id }
# end
# Nested controllers? sure
# resource_scope -> { User.find(params[:user_id]).things }
# Return value should be:
# a Relation: Thing.where(user: current_user)
# a Hash: { user_id: current_user.id }
def resource_scope(obj = nil, opts = {}, &block)
raise 'expected a proc or block' unless (obj.respond_to?(:call) || block_given?)
instance_exec do
before_action(opts) do
@_effective_resource_scope ||= instance_exec(&(block_given? ? block : obj))
end
end
end
# Defines a function to handle a GET and POST request on this URL
# Just add a member action to your routes, you shouldn't need to call this directly
def member_action(action)
define_method(action) do
self.resource ||= resource_scope.find(params[:id])
EffectiveResources.authorize!(self, action, resource)
@page_title ||= "#{action.to_s.titleize} #{resource}"
member_post_action(action) unless request.get?
end
end
# Defines a function to handle a GET and POST request on this URL
# Handles bulk_ actions
# Just add a member action to your routes, you shouldn't need to call this directly
# You shouldn't need to call this directly
def collection_action(action)
define_method(action) do
if params[:ids].present?
self.resources ||= resource_scope.where(id: params[:ids])
end
if effective_resource.scope?(action)
self.resources ||= resource_scope.public_send(action)
end
self.resources ||= resource_scope.all
EffectiveResources.authorize!(self, action, resource_klass)
@page_title ||= "#{action.to_s.titleize} #{resource_plural_name.titleize}"
collection_post_action(action) unless request.get?
end
end
end
def index
@page_title ||= resource_plural_name.titleize
EffectiveDatatables.authorize!(self, :index, resource_klass)
self.resources ||= resource_scope.all
if resource_datatable_class
@datatable ||= resource_datatable_class.new(self, resource_datatable_attributes)
end
run_callbacks(:resource_render)
end
def new
self.resource ||= resource_scope.new
self.resource.assign_attributes(
params.to_unsafe_h.except(:controller, :action, :id).select { |k, v| resource.respond_to?("#{k}=") }
)
if params[:duplicate_id]
duplicate = resource_scope.find(params[:duplicate_id])
EffectiveResources.authorize!(self, :show, duplicate)
self.resource = duplicate_resource(duplicate)
raise "expected duplicate_resource to return an unsaved new #{resource_klass} resource" unless resource.kind_of?(resource_klass) && resource.new_record?
if (message = flash[:success].to_s).present?
flash.delete(:success)
flash.now[:success] = "#{message.chomp('.')}. Adding another #{resource_name.titleize} based on previous."
end
end
@page_title ||= "New #{resource_name.titleize}"
EffectiveResources.authorize!(self, :new, resource)
run_callbacks(:resource_render)
end
def create
self.resource ||= resource_scope.new
@page_title ||= "New #{resource_name.titleize}"
action = commit_action[:action]
EffectiveResources.authorize!(self, action, resource) unless action == :save
EffectiveResources.authorize!(self, :create, resource) if action == :save
resource.created_by ||= current_user if resource.respond_to?(:created_by=)
respond_to do |format|
if save_resource(resource, action, send(resource_params_method_name))
format.html do
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
end
format.js do
if specific_redirect_path?
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
else
flash.now[:success] ||= resource_flash(:success, resource, action)
reload_resource
# create.js.erb
end
end
else
flash.delete(:success)
flash.now[:danger] ||= resource_flash(:danger, resource, action)
format.html { render :new }
format.js {} # create.js.erb
end
end
end
def show
self.resource ||= resource_scope.find(params[:id])
@page_title ||= resource.to_s
EffectiveResources.authorize!(self, :show, resource)
run_callbacks(:resource_render)
end
def edit
self.resource ||= resource_scope.find(params[:id])
@page_title ||= "Edit #{resource}"
EffectiveResources.authorize!(self, :edit, resource)
run_callbacks(:resource_render)
end
def update
self.resource ||= resource_scope.find(params[:id])
@page_title = "Edit #{resource}"
action = commit_action[:action]
EffectiveResources.authorize!(self, action, resource) unless action == :save
EffectiveResources.authorize!(self, :update, resource) if action == :save
respond_to do |format|
if save_resource(resource, action, send(resource_params_method_name))
format.html do
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
end
format.js do
if specific_redirect_path?
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
else
flash.now[:success] ||= resource_flash(:success, resource, action)
reload_resource
# update.js.erb
end
end
else
flash.delete(:success)
flash.now[:danger] ||= resource_flash(:danger, resource, action)
format.html { render :edit }
format.js { } # update.js.erb
end
end
end
def destroy
self.resource = resource_scope.find(params[:id])
action = :destroy
@page_title ||= "Destroy #{resource}"
EffectiveResources.authorize!(self, action, resource)
respond_to do |format|
if save_resource(resource, action)
format.html do
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
end
format.js do
if specific_redirect_path?
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
else
flash.now[:success] ||= resource_flash(:success, resource, action)
# destroy.js.erb
end
end
else
flash.delete(:success)
format.html do
flash[:danger] ||= resource_flash(:danger, resource, action)
redirect_to(resource_redirect_path)
end
format.js do
flash.now[:danger] ||= resource_flash(:danger, resource, action)
# destroy.js.erb
end
end
end
end
def member_post_action(action)
raise 'expected post, patch or put http action' unless (request.post? || request.patch? || request.put?)
respond_to do |format|
if save_resource(resource, action, (send(resource_params_method_name) rescue {}))
format.html do
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
end
format.js do
if specific_redirect_path?(action)
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
else
flash.now[:success] ||= resource_flash(:success, resource, action)
reload_resource
render_member_action_view(action)
end
end
else
flash.delete(:success)
flash.now[:danger] ||= resource_flash(:danger, resource, action)
format.html do
if resource_edit_path && (referer_redirect_path || '').end_with?(resource_edit_path)
@page_title ||= "Edit #{resource}"
render :edit
elsif resource_new_path && (referer_redirect_path || '').end_with?(resource_new_path)
@page_title ||= "New #{resource_name.titleize}"
render :new
elsif resource_show_path && (referer_redirect_path || '').end_with?(resource_show_path)
@page_title ||= resource_name.titleize
render :show
else
@page_title ||= resource.to_s
flash[:danger] = flash.now[:danger]
redirect_to(referer_redirect_path || resource_redirect_path)
end
end
format.js { render_member_action_view(action) }
end
end
end
# Which member javascript view to render: #{action}.js or effective_resources member_action.js
def render_member_action_view(action)
view = lookup_context.template_exists?(action, _prefixes) ? action : :member_action
render(view, locals: { action: action })
end
# No attributes are assigned or saved. We purely call action! on the resource
def collection_post_action(action)
action = action.to_s.gsub('bulk_', '').to_sym
raise 'expected post, patch or put http action' unless (request.post? || request.patch? || request.put?)
raise "expected #{resource_name} to respond to #{action}!" if resources.to_a.present? && !resources.first.respond_to?("#{action}!")
successes = 0
ActiveRecord::Base.transaction do
successes = resources.select do |resource|
begin
resource.public_send("#{action}!") if EffectiveResources.authorized?(self, action, resource)
rescue => e
false
end
end.length
end
render json: { status: 200, message: "Successfully #{action_verb(action)} #{successes} / #{resources.length} selected #{resource_plural_name}" }
end
protected
# This calls the appropriate member action, probably save!, on the resource.
def save_resource(resource, action = :save, to_assign = {}, &block)
raise "expected @#{resource_name} to respond to #{action}!" unless resource.respond_to?("#{action}!")
resource.current_user ||= current_user if resource.respond_to?(:current_user=)
ActiveRecord::Base.transaction do
begin
resource.assign_attributes(to_assign) if to_assign.present?
if resource.public_send("#{action}!") == false
raise("failed to #{action} #{resource}")
end
yield if block_given?
run_callbacks(:resource_save)
return true
rescue => e
if resource.respond_to?(:restore_attributes) && resource.persisted?
resource.restore_attributes(['status', 'state'])
end
flash.delete(:success)
flash.now[:danger] = flash_danger(resource, action, e: e)
raise ActiveRecord::Rollback
end
end
run_callbacks(:resource_error)
false
end
def reload_resource
self.resource.reload if resource.respond_to?(:reload)
end
# Should return a new resource based on the passed one
def duplicate_resource(resource)
resource.dup
end
def resource_flash(status, resource, action)
message = commit_action[status].respond_to?(:call) ? instance_exec(&commit_action[status]) : commit_action[status]
return message if message.present?
message || case status
when :success then flash_success(resource, action)
when :danger then flash_danger(resource, action)
else
raise "unknown resource flash status: #{status}"
end
end
def resource_redirect_path
redirect = commit_action[:redirect].respond_to?(:call) ? instance_exec(&commit_action[:redirect]) : commit_action[:redirect]
commit_action_redirect = case redirect
when :index ; resource_index_path
when :edit ; resource_edit_path
when :show ; resource_show_path
when :new ; resource_new_path
when :duplicate ; resource_duplicate_path
when :back ; referer_redirect_path
when :save ; [resource_edit_path, resource_show_path].compact.first
when Symbol ; resource_action_path(commit_action[:action])
when String ; redirect
else ; nil
end
return commit_action_redirect if commit_action_redirect.present?
case params[:commit].to_s
when 'Save'
[resource_edit_path, resource_show_path, resource_index_path]
when 'Save and Add New', 'Add New'
[resource_new_path, resource_index_path]
when 'Duplicate'
[resource_duplicate_path, resource_index_path]
when 'Continue', 'Save and Continue'
[resource_index_path]
else
[referer_redirect_path, resource_edit_path, resource_show_path, resource_index_path]
end.compact.first.presence || root_path
end
def referer_redirect_path
url = request.referer.to_s
return if (resource && resource.respond_to?(:destroyed?) && resource.destroyed? && url.include?("/#{resource.to_param}"))
return if url.include?('duplicate_id=')
return unless (Rails.application.routes.recognize_path(URI(url).path) rescue false)
url
end
def resource_index_path
effective_resource.action_path(:index)
end
def resource_new_path
effective_resource.action_path(:new)
end
def resource_duplicate_path
effective_resource.action_path(:new, duplicate_id: resource.id)
end
def resource_edit_path
effective_resource.action_path(:edit, resource)
end
def resource_show_path
effective_resource.action_path(:show, resource)
end
def resource_destroy_path
effective_resource.action_path(:destroy, resource)
end
def resource_action_path(action)
effective_resource.action_path(action.to_sym, resource)
end
def resource # @thing
instance_variable_get("@#{resource_name}")
end
def resource=(instance)
instance_variable_set("@#{resource_name}", instance)
end
def resources # @things
send(:instance_variable_get, "@#{resource_plural_name}")
end
def resources=(instance)
send(:instance_variable_set, "@#{resource_plural_name}", instance)
end
private
def effective_resource
@_effective_resource ||= Effective::Resource.new(controller_path)
end
def resource_name # 'thing'
effective_resource.name
end
def resource_klass # Thing
effective_resource.klass
end
def resource_human_name
effective_resource.human_name
end
def resource_plural_name # 'things'
effective_resource.plural_name
end
def action_verb(action)
(action.to_s + (action.to_s.end_with?('e') ? 'd' : 'ed'))
end
def commit_action
self.class.submits[params[:commit].to_s] ||
self.class.submits.find { |_, v| v[:action] == :save }&.last ||
{ action: :save }
end
def submit_action(action)
self.class.submits[action.to_s] ||
self.class.submits.find { |_, v| v[:action] == action }&.last ||
{ action: action }
end
def specific_redirect_path?(action = nil)
submit = (action.nil? ? commit_action : submit_action(action))
(submit[:redirect].respond_to?(:call) ? instance_exec(&submit[:redirect]) : submit[:redirect]).present?
end
# Returns an ActiveRecord relation based on the computed value of `resource_scope` dsl method
def resource_scope # Thing
@_effective_resource_relation ||= (
relation = case @_effective_resource_scope # If this was initialized by the resource_scope before_filter
when ActiveRecord::Relation
@_effective_resource_scope
when Hash
effective_resource.klass.where(@_effective_resource_scope)
when Symbol
effective_resource.klass.send(@_effective_resource_scope)
when nil
effective_resource.klass.all
else
raise "expected resource_scope method to return an ActiveRecord::Relation or Hash"
end
unless relation.kind_of?(ActiveRecord::Relation)
raise("unable to build resource_scope for #{effective_resource.klass || 'unknown klass'}.")
end
relation
)
end
def resource_datatable_attributes
resource_scope.where_values_hash.symbolize_keys
end
def resource_datatable_class # ThingsDatatable
effective_resource.datatable_klass
end
def resource_params_method_name
["#{resource_name}_params", "#{resource_plural_name}_params", 'permitted_params'].find { |name| respond_to?(name, true) } || 'params'
end
end
end
render_member_action helper
module Effective
module CrudController
extend ActiveSupport::Concern
included do
class << self
def effective_resource
@_effective_resource ||= Effective::Resource.new(controller_path)
end
def submits
effective_resource.submits
end
end
define_actions_from_routes
define_callbacks :resource_render, :resource_save, :resource_error
end
module ClassMethods
# Automatically respond to any action defined via the routes file
def define_actions_from_routes
resource = Effective::Resource.new(controller_path)
resource.member_actions.each { |action| member_action(action) }
resource.collection_actions.each { |action| collection_action(action) }
end
# https://github.com/rails/rails/blob/v5.1.4/actionpack/lib/abstract_controller/callbacks.rb
def before_render(*names, &blk)
_insert_callbacks(names, blk) { |name, options| set_callback(:resource_render, :before, name, options) }
end
def after_save(*names, &blk)
_insert_callbacks(names, blk) { |name, options| set_callback(:resource_save, :after, name, options) }
end
def after_error(*names, &blk)
_insert_callbacks(names, blk) { |name, options| set_callback(:resource_error, :after, name, options) }
end
# This controls the form submit options of effective_submit
# It also controls the redirect path for any actions
#
# Effective::Resource will populate this with all member_post_actions
# And you can control the details with this DSL:
#
# submit :approve, 'Save and Approve', unless: -> { approved? }, redirect: :show
#
# submit :toggle, 'Blacklist', if: -> { sync? }, class: 'btn btn-primary'
# submit :toggle, 'Whitelist', if: -> { !sync? }, class: 'btn btn-primary'
# submit :save, 'Save', success: -> { "#{self} was saved okay!" }
def submit(action, commit = nil, args = {})
raise 'expected args to be a Hash or false' unless args.kind_of?(Hash) || args == false
if commit == false
submits.delete_if { |commit, args| args[:action] == action }; return
end
if args == false
submits.delete(commit); return
end
if commit # Overwrite the default member action when given a custom commit
submits.delete_if { |commit, args| args[:default] && args[:action] == action }
end
if args.key?(:if) && args[:if].respond_to?(:call) == false
raise "expected if: to be callable. Try submit :approve, 'Save and Approve', if: -> { finished? }"
end
if args.key?(:unless) && args[:unless].respond_to?(:call) == false
raise "expected unless: to be callable. Try submit :approve, 'Save and Approve', unless: -> { declined? }"
end
redirect = args.delete(:redirect_to) || args.delete(:redirect) # Remove redirect_to keyword. use redirect.
args.merge!(action: action, redirect: redirect)
(submits[commit] ||= {}).merge!(args)
end
# page_title 'My Title', only: [:new]
def page_title(label = nil, opts = {}, &block)
raise 'expected a label or block' unless (label || block_given?)
instance_exec do
before_action(opts) do
@page_title ||= (block_given? ? instance_exec(&block) : label)
end
end
end
# resource_scope -> { current_user.things }
# resource_scope -> { Thing.active.where(user: current_user) }
# resource_scope do
# { user_id: current_user.id }
# end
# Nested controllers? sure
# resource_scope -> { User.find(params[:user_id]).things }
# Return value should be:
# a Relation: Thing.where(user: current_user)
# a Hash: { user_id: current_user.id }
def resource_scope(obj = nil, opts = {}, &block)
raise 'expected a proc or block' unless (obj.respond_to?(:call) || block_given?)
instance_exec do
before_action(opts) do
@_effective_resource_scope ||= instance_exec(&(block_given? ? block : obj))
end
end
end
# Defines a function to handle a GET and POST request on this URL
# Just add a member action to your routes, you shouldn't need to call this directly
def member_action(action)
define_method(action) do
self.resource ||= resource_scope.find(params[:id])
EffectiveResources.authorize!(self, action, resource)
@page_title ||= "#{action.to_s.titleize} #{resource}"
member_post_action(action) unless request.get?
end
end
# Defines a function to handle a GET and POST request on this URL
# Handles bulk_ actions
# Just add a member action to your routes, you shouldn't need to call this directly
# You shouldn't need to call this directly
def collection_action(action)
define_method(action) do
if params[:ids].present?
self.resources ||= resource_scope.where(id: params[:ids])
end
if effective_resource.scope?(action)
self.resources ||= resource_scope.public_send(action)
end
self.resources ||= resource_scope.all
EffectiveResources.authorize!(self, action, resource_klass)
@page_title ||= "#{action.to_s.titleize} #{resource_plural_name.titleize}"
collection_post_action(action) unless request.get?
end
end
end
def index
@page_title ||= resource_plural_name.titleize
EffectiveDatatables.authorize!(self, :index, resource_klass)
self.resources ||= resource_scope.all
if resource_datatable_class
@datatable ||= resource_datatable_class.new(self, resource_datatable_attributes)
end
run_callbacks(:resource_render)
end
def new
self.resource ||= resource_scope.new
self.resource.assign_attributes(
params.to_unsafe_h.except(:controller, :action, :id).select { |k, v| resource.respond_to?("#{k}=") }
)
if params[:duplicate_id]
duplicate = resource_scope.find(params[:duplicate_id])
EffectiveResources.authorize!(self, :show, duplicate)
self.resource = duplicate_resource(duplicate)
raise "expected duplicate_resource to return an unsaved new #{resource_klass} resource" unless resource.kind_of?(resource_klass) && resource.new_record?
if (message = flash[:success].to_s).present?
flash.delete(:success)
flash.now[:success] = "#{message.chomp('.')}. Adding another #{resource_name.titleize} based on previous."
end
end
@page_title ||= "New #{resource_name.titleize}"
EffectiveResources.authorize!(self, :new, resource)
run_callbacks(:resource_render)
end
def create
self.resource ||= resource_scope.new
@page_title ||= "New #{resource_name.titleize}"
action = commit_action[:action]
EffectiveResources.authorize!(self, action, resource) unless action == :save
EffectiveResources.authorize!(self, :create, resource) if action == :save
resource.created_by ||= current_user if resource.respond_to?(:created_by=)
respond_to do |format|
if save_resource(resource, action, send(resource_params_method_name))
format.html do
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
end
format.js do
if specific_redirect_path?
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
else
flash.now[:success] ||= resource_flash(:success, resource, action)
reload_resource
# create.js.erb
end
end
else
flash.delete(:success)
flash.now[:danger] ||= resource_flash(:danger, resource, action)
format.html { render :new }
format.js {} # create.js.erb
end
end
end
def show
self.resource ||= resource_scope.find(params[:id])
@page_title ||= resource.to_s
EffectiveResources.authorize!(self, :show, resource)
run_callbacks(:resource_render)
end
def edit
self.resource ||= resource_scope.find(params[:id])
@page_title ||= "Edit #{resource}"
EffectiveResources.authorize!(self, :edit, resource)
run_callbacks(:resource_render)
end
def update
self.resource ||= resource_scope.find(params[:id])
@page_title = "Edit #{resource}"
action = commit_action[:action]
EffectiveResources.authorize!(self, action, resource) unless action == :save
EffectiveResources.authorize!(self, :update, resource) if action == :save
respond_to do |format|
if save_resource(resource, action, send(resource_params_method_name))
format.html do
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
end
format.js do
if specific_redirect_path?
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
else
flash.now[:success] ||= resource_flash(:success, resource, action)
reload_resource
# update.js.erb
end
end
else
flash.delete(:success)
flash.now[:danger] ||= resource_flash(:danger, resource, action)
format.html { render :edit }
format.js { } # update.js.erb
end
end
end
def destroy
self.resource = resource_scope.find(params[:id])
action = :destroy
@page_title ||= "Destroy #{resource}"
EffectiveResources.authorize!(self, action, resource)
respond_to do |format|
if save_resource(resource, action)
format.html do
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
end
format.js do
if specific_redirect_path?
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
else
flash.now[:success] ||= resource_flash(:success, resource, action)
# destroy.js.erb
end
end
else
flash.delete(:success)
format.html do
flash[:danger] ||= resource_flash(:danger, resource, action)
redirect_to(resource_redirect_path)
end
format.js do
flash.now[:danger] ||= resource_flash(:danger, resource, action)
# destroy.js.erb
end
end
end
end
def member_post_action(action)
raise 'expected post, patch or put http action' unless (request.post? || request.patch? || request.put?)
respond_to do |format|
if save_resource(resource, action, (send(resource_params_method_name) rescue {}))
format.html do
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
end
format.js do
if specific_redirect_path?(action)
flash[:success] ||= resource_flash(:success, resource, action)
redirect_to(resource_redirect_path)
else
flash.now[:success] ||= resource_flash(:success, resource, action)
reload_resource
render_member_action(action)
end
end
else
flash.delete(:success)
flash.now[:danger] ||= resource_flash(:danger, resource, action)
format.html do
if resource_edit_path && (referer_redirect_path || '').end_with?(resource_edit_path)
@page_title ||= "Edit #{resource}"
render :edit
elsif resource_new_path && (referer_redirect_path || '').end_with?(resource_new_path)
@page_title ||= "New #{resource_name.titleize}"
render :new
elsif resource_show_path && (referer_redirect_path || '').end_with?(resource_show_path)
@page_title ||= resource_name.titleize
render :show
else
@page_title ||= resource.to_s
flash[:danger] = flash.now[:danger]
redirect_to(referer_redirect_path || resource_redirect_path)
end
end
format.js { render_member_action(action) }
end
end
end
# Which member javascript view to render: #{action}.js or effective_resources member_action.js
def render_member_action(action)
view = lookup_context.template_exists?(action, _prefixes) ? action : :member_action
render(view, locals: { action: action })
end
# No attributes are assigned or saved. We purely call action! on the resource
def collection_post_action(action)
action = action.to_s.gsub('bulk_', '').to_sym
raise 'expected post, patch or put http action' unless (request.post? || request.patch? || request.put?)
raise "expected #{resource_name} to respond to #{action}!" if resources.to_a.present? && !resources.first.respond_to?("#{action}!")
successes = 0
ActiveRecord::Base.transaction do
successes = resources.select do |resource|
begin
resource.public_send("#{action}!") if EffectiveResources.authorized?(self, action, resource)
rescue => e
false
end
end.length
end
render json: { status: 200, message: "Successfully #{action_verb(action)} #{successes} / #{resources.length} selected #{resource_plural_name}" }
end
protected
# This calls the appropriate member action, probably save!, on the resource.
def save_resource(resource, action = :save, to_assign = {}, &block)
raise "expected @#{resource_name} to respond to #{action}!" unless resource.respond_to?("#{action}!")
resource.current_user ||= current_user if resource.respond_to?(:current_user=)
ActiveRecord::Base.transaction do
begin
resource.assign_attributes(to_assign) if to_assign.present?
if resource.public_send("#{action}!") == false
raise("failed to #{action} #{resource}")
end
yield if block_given?
run_callbacks(:resource_save)
return true
rescue => e
if resource.respond_to?(:restore_attributes) && resource.persisted?
resource.restore_attributes(['status', 'state'])
end
flash.delete(:success)
flash.now[:danger] = flash_danger(resource, action, e: e)
raise ActiveRecord::Rollback
end
end
run_callbacks(:resource_error)
false
end
def reload_resource
self.resource.reload if resource.respond_to?(:reload)
end
# Should return a new resource based on the passed one
def duplicate_resource(resource)
resource.dup
end
def resource_flash(status, resource, action)
message = commit_action[status].respond_to?(:call) ? instance_exec(&commit_action[status]) : commit_action[status]
return message if message.present?
message || case status
when :success then flash_success(resource, action)
when :danger then flash_danger(resource, action)
else
raise "unknown resource flash status: #{status}"
end
end
def resource_redirect_path
redirect = commit_action[:redirect].respond_to?(:call) ? instance_exec(&commit_action[:redirect]) : commit_action[:redirect]
commit_action_redirect = case redirect
when :index ; resource_index_path
when :edit ; resource_edit_path
when :show ; resource_show_path
when :new ; resource_new_path
when :duplicate ; resource_duplicate_path
when :back ; referer_redirect_path
when :save ; [resource_edit_path, resource_show_path].compact.first
when Symbol ; resource_action_path(commit_action[:action])
when String ; redirect
else ; nil
end
return commit_action_redirect if commit_action_redirect.present?
case params[:commit].to_s
when 'Save'
[resource_edit_path, resource_show_path, resource_index_path]
when 'Save and Add New', 'Add New'
[resource_new_path, resource_index_path]
when 'Duplicate'
[resource_duplicate_path, resource_index_path]
when 'Continue', 'Save and Continue'
[resource_index_path]
else
[referer_redirect_path, resource_edit_path, resource_show_path, resource_index_path]
end.compact.first.presence || root_path
end
def referer_redirect_path
url = request.referer.to_s
return if (resource && resource.respond_to?(:destroyed?) && resource.destroyed? && url.include?("/#{resource.to_param}"))
return if url.include?('duplicate_id=')
return unless (Rails.application.routes.recognize_path(URI(url).path) rescue false)
url
end
def resource_index_path
effective_resource.action_path(:index)
end
def resource_new_path
effective_resource.action_path(:new)
end
def resource_duplicate_path
effective_resource.action_path(:new, duplicate_id: resource.id)
end
def resource_edit_path
effective_resource.action_path(:edit, resource)
end
def resource_show_path
effective_resource.action_path(:show, resource)
end
def resource_destroy_path
effective_resource.action_path(:destroy, resource)
end
def resource_action_path(action)
effective_resource.action_path(action.to_sym, resource)
end
def resource # @thing
instance_variable_get("@#{resource_name}")
end
def resource=(instance)
instance_variable_set("@#{resource_name}", instance)
end
def resources # @things
send(:instance_variable_get, "@#{resource_plural_name}")
end
def resources=(instance)
send(:instance_variable_set, "@#{resource_plural_name}", instance)
end
private
def effective_resource
@_effective_resource ||= Effective::Resource.new(controller_path)
end
def resource_name # 'thing'
effective_resource.name
end
def resource_klass # Thing
effective_resource.klass
end
def resource_human_name
effective_resource.human_name
end
def resource_plural_name # 'things'
effective_resource.plural_name
end
def action_verb(action)
(action.to_s + (action.to_s.end_with?('e') ? 'd' : 'ed'))
end
def commit_action
self.class.submits[params[:commit].to_s] ||
self.class.submits.find { |_, v| v[:action] == :save }&.last ||
{ action: :save }
end
def submit_action(action)
self.class.submits[action.to_s] ||
self.class.submits.find { |_, v| v[:action] == action }&.last ||
{ action: action }
end
def specific_redirect_path?(action = nil)
submit = (action.nil? ? commit_action : submit_action(action))
(submit[:redirect].respond_to?(:call) ? instance_exec(&submit[:redirect]) : submit[:redirect]).present?
end
# Returns an ActiveRecord relation based on the computed value of `resource_scope` dsl method
def resource_scope # Thing
@_effective_resource_relation ||= (
relation = case @_effective_resource_scope # If this was initialized by the resource_scope before_filter
when ActiveRecord::Relation
@_effective_resource_scope
when Hash
effective_resource.klass.where(@_effective_resource_scope)
when Symbol
effective_resource.klass.send(@_effective_resource_scope)
when nil
effective_resource.klass.all
else
raise "expected resource_scope method to return an ActiveRecord::Relation or Hash"
end
unless relation.kind_of?(ActiveRecord::Relation)
raise("unable to build resource_scope for #{effective_resource.klass || 'unknown klass'}.")
end
relation
)
end
def resource_datatable_attributes
resource_scope.where_values_hash.symbolize_keys
end
def resource_datatable_class # ThingsDatatable
effective_resource.datatable_klass
end
def resource_params_method_name
["#{resource_name}_params", "#{resource_plural_name}_params", 'permitted_params'].find { |name| respond_to?(name, true) } || 'params'
end
end
end
|
module Gws::Portal::PortletFilter
extend ActiveSupport::Concern
included do
menu_view 'gws/portal/common/portlets/menu'
before_action :set_portlet_addons
end
private
def fix_params
{ cur_user: @cur_user, cur_site: @cur_site, setting_id: @portal.try(:id) }
end
def pre_params
{ group_ids: @portal.group_ids, user_ids: @portal.user_ids }
end
def set_portlet_addons
portlet_model = params[:portlet_model].presence
portlet_model = @item.portlet_model if @item
@addons = @model.portlet_addons(portlet_model) if portlet_model
end
def new_portlet
@item = @model.new pre_params.merge(fix_params)
@item.portlet_model = params[:portlet_model]
raise "403" unless @item.allowed?(:edit, @cur_user, site: @cur_site)
@item.name = @item.label(:portlet_model)
render file: 'gws/portal/common/portlets/select_model' unless @item.portlet_model_enabled?
end
public
def index
@items = @portal.portlets.
search(params[:s])
end
def new
new_portlet
end
def reset
raise '403' unless @portal.allowed?(:edit, @cur_user, site: @cur_site)
return render(file: 'gws/portal/common/portlets/reset') unless request.post?
@portal.portlets.destroy_all
@portal.save_default_portlets
redirect_to action: :index, notice: I18n.t('ss.notice.initialized')
end
end
[fix] gws portlet reset notice (#2466)
module Gws::Portal::PortletFilter
extend ActiveSupport::Concern
included do
menu_view 'gws/portal/common/portlets/menu'
before_action :set_portlet_addons
end
private
def fix_params
{ cur_user: @cur_user, cur_site: @cur_site, setting_id: @portal.try(:id) }
end
def pre_params
{ group_ids: @portal.group_ids, user_ids: @portal.user_ids }
end
def set_portlet_addons
portlet_model = params[:portlet_model].presence
portlet_model = @item.portlet_model if @item
@addons = @model.portlet_addons(portlet_model) if portlet_model
end
def new_portlet
@item = @model.new pre_params.merge(fix_params)
@item.portlet_model = params[:portlet_model]
raise "403" unless @item.allowed?(:edit, @cur_user, site: @cur_site)
@item.name = @item.label(:portlet_model)
render file: 'gws/portal/common/portlets/select_model' unless @item.portlet_model_enabled?
end
public
def index
@items = @portal.portlets.
search(params[:s])
end
def new
new_portlet
end
def reset
raise '403' unless @portal.allowed?(:edit, @cur_user, site: @cur_site)
return render(file: 'gws/portal/common/portlets/reset') unless request.post?
@portal.portlets.destroy_all
@portal.save_default_portlets
redirect_to({ action: :index }, { notice: I18n.t('ss.notice.initialized') })
end
end
|
class NewsFeedItemCommentsController < ProductController
include MarkdownHelper
before_action :find_product!
before_action :set_news_feed_item!
before_action :authenticate_user!, only: [:create]
respond_to :json
def create
@item = @news_feed_item.news_feed_item_comments.create(
user: current_user,
body: params[:body]
)
forward_comment
respond_with @item, location: product_updates_url(@product)
end
def index
comments = ActiveModel::ArraySerializer.new(
@news_feed_item.news_feed_item_comments,
each_serializer: NewsFeedItemCommentSerializer
)
respond_with comments, location: product_url(@product)
end
def forward_comment
if target = @news_feed_item.target
if target.is_a? TeamMembership
wip = target.product.main_thread
event = Event.create_from_comment(
wip,
Event::Comment,
product_markdown(target.product,
"_@#{target.user.username}: " + @item.body + "_"
),
current_user
)
Activities::Chat.publish!(
actor: event.user,
subject: event,
target: wip
)
elsif target.is_a? Wip
event = Event.create_from_comment(
target,
Event::Comment,
@item.body,
current_user
)
Activities::Comment.publish!(
actor: event.user,
subject: event,
target: target
)
else
Activities::Comment.publish!(
actor: @item.user,
subject: @item,
target: target
)
end
end
end
def set_news_feed_item!
@news_feed_item = NewsFeedItem.find(params[:update_id])
end
end
Set explicit order on NewsFeedItemComments
class NewsFeedItemCommentsController < ProductController
include MarkdownHelper
before_action :find_product!
before_action :set_news_feed_item!
before_action :authenticate_user!, only: [:create]
respond_to :json
def create
@item = @news_feed_item.news_feed_item_comments.create(
user: current_user,
body: params[:body]
)
forward_comment
respond_with @item, location: product_updates_url(@product)
end
def index
comments = ActiveModel::ArraySerializer.new(
@news_feed_item.news_feed_item_comments.order(created_at: :asc),
each_serializer: NewsFeedItemCommentSerializer
)
respond_with comments, location: product_url(@product)
end
def forward_comment
if target = @news_feed_item.target
if target.is_a? TeamMembership
wip = target.product.main_thread
event = Event.create_from_comment(
wip,
Event::Comment,
product_markdown(target.product,
"_@#{target.user.username}: " + @item.body + "_"
),
current_user
)
Activities::Chat.publish!(
actor: event.user,
subject: event,
target: wip
)
elsif target.is_a? Wip
event = Event.create_from_comment(
target,
Event::Comment,
@item.body,
current_user
)
Activities::Comment.publish!(
actor: event.user,
subject: event,
target: target
)
else
Activities::Comment.publish!(
actor: @item.user,
subject: @item,
target: target
)
end
end
end
def set_news_feed_item!
@news_feed_item = NewsFeedItem.find(params[:update_id])
end
end
|
module Projects
module Settings
class CiCdController < Projects::ApplicationController
before_action :authorize_admin_pipeline!
def show
define_runners_variables
define_secret_variables
define_triggers_variables
define_badges_variables
define_auto_devops_variables
end
def reset_cache
if ResetProjectCacheService.new(@project, current_user).execute
flash[:notice] = _("Project cache successfully reset.")
else
flash[:error] = _("Unable to reset project cache.")
end
redirect_to project_pipelines_path(@project)
end
private
def define_runners_variables
@project_runners = @project.runners.ordered
@assignable_runners = current_user.ci_authorized_runners
.assignable_for(project).ordered.page(params[:page]).per(20)
@shared_runners = Ci::Runner.shared.active
@shared_runners_count = @shared_runners.count(:all)
end
def define_secret_variables
@variable = Ci::Variable.new(project: project)
.present(current_user: current_user)
@variables = project.variables.order_key_asc
.map { |variable| variable.present(current_user: current_user) }
end
def define_triggers_variables
@triggers = @project.triggers
@trigger = Ci::Trigger.new
end
def define_badges_variables
@ref = params[:ref] || @project.default_branch || 'master'
@badges = [Gitlab::Badge::Pipeline::Status,
Gitlab::Badge::Coverage::Report]
@badges.map! do |badge|
badge.new(@project, @ref).metadata
end
end
def define_auto_devops_variables
@auto_devops = @project.auto_devops || ProjectAutoDevops.new
end
end
end
end
Add missing empty line in #reset_cache
module Projects
module Settings
class CiCdController < Projects::ApplicationController
before_action :authorize_admin_pipeline!
def show
define_runners_variables
define_secret_variables
define_triggers_variables
define_badges_variables
define_auto_devops_variables
end
def reset_cache
if ResetProjectCacheService.new(@project, current_user).execute
flash[:notice] = _("Project cache successfully reset.")
else
flash[:error] = _("Unable to reset project cache.")
end
redirect_to project_pipelines_path(@project)
end
private
def define_runners_variables
@project_runners = @project.runners.ordered
@assignable_runners = current_user.ci_authorized_runners
.assignable_for(project).ordered.page(params[:page]).per(20)
@shared_runners = Ci::Runner.shared.active
@shared_runners_count = @shared_runners.count(:all)
end
def define_secret_variables
@variable = Ci::Variable.new(project: project)
.present(current_user: current_user)
@variables = project.variables.order_key_asc
.map { |variable| variable.present(current_user: current_user) }
end
def define_triggers_variables
@triggers = @project.triggers
@trigger = Ci::Trigger.new
end
def define_badges_variables
@ref = params[:ref] || @project.default_branch || 'master'
@badges = [Gitlab::Badge::Pipeline::Status,
Gitlab::Badge::Coverage::Report]
@badges.map! do |badge|
badge.new(@project, @ref).metadata
end
end
def define_auto_devops_variables
@auto_devops = @project.auto_devops || ProjectAutoDevops.new
end
end
end
end
|
module SparkPlugs
class ApplicationController < ActionController::Base
end
end
Allowing you to use default layout
class SparkPlugs::ApplicationController < ApplicationController
end
|
module Spree
module Admin
class IntegrationController < Spree::Admin::BaseController
def register
email = 'integrator@spreecommerce.com'
if user = Spree::User.where('email' => email).first
# do nothing, for now....
else
passwd = SecureRandom.hex(32)
user = Spree::User.create('email' => email,
'password' => passwd,
'password_confirmation' => passwd)
user.spree_roles << Spree::Role.all
user.generate_spree_api_key!
end
# HTTParty.post to see if store is already registered (and get store_if so)
#
# if not another post to register
#
Spree::Config[:store_id] = '123' #should be real store_id returned either post above.
redirect_to :action => :show
end
end
end
end
Use augury admin api for integration registration
module Spree
module Admin
class IntegrationController < Spree::Admin::BaseController
include HTTParty
base_uri 'http://augury-admin.dev/api'
def register
email = 'integrator@spreecommerce.com'
if user = Spree::User.where('email' => email).first
# do nothing, for now....
else
passwd = SecureRandom.hex(32)
user = Spree::User.create('email' => email,
'password' => passwd,
'password_confirmation' => passwd)
user.spree_roles << Spree::Role.all
user.generate_spree_api_key!
end
# Register Store if Spree::Config[:store_id] isn't present
unless Spree::Config[:store_id].present?
options = {
body: {
signup: {
name: Spree::Config[:site_name],
url: request.url,
version: Spree.version,
api_key: user.spree_api_key,
email: email,
}
}
}
response = self.class.post('/signups.json', options)
if response.code == 201
Spree::Config[:store_id] = response["store_id"]
Spree::Config[:pro_api_key] = response["auth_token"]
end
end
redirect_to :action => :show
end
end
end
end
|
module Spree
module Admin
class SalePricesController < BaseController
# before_filter :load_product
respond_to :js, :html
def index
@sale_prices = Spree::SalePrice.all
@taxons = Spree::Taxon.all
end
def create
params[:sale_price][:taxons].each do |taxon|
@sale_prices = []
@taxon = Spree::Taxon.find(taxon)
@taxon.products.each do |product|
@sale_price = product.put_on_sale(params[:sale_price][:value], sale_price_params)
@sale = Spree::SalePrice.last
Spree::SalePriceTaxon.create({sale_price_id: @sale.id, taxon_id: taxon})
@sale_prices << @sale_price
end
end
respond_with(@sale_prices)
# @sale_price = @product.put_on_sale params[:sale_price][:value], sale_price_params
# respond_with(@sale_price)
end
def destroy
@sale_price = Spree::SalePrice.find(params[:id])
@sale_price.destroy
respond_with(@sale_price)
end
private
def load_product
@product = Spree::Product.find_by(slug: params[:product_id])
redirect_to request.referer unless @product.present?
end
def sale_price_params
params.require(:sale_price).permit(
:id,
:value,
:currency,
:start_at,
:end_at,
:enabled
)
end
end
end
end
Arrumando o nome do campo.
module Spree
module Admin
class SalePricesController < BaseController
# before_filter :load_product
respond_to :js, :html
def index
@sale_prices = Spree::SalePrice.all
@taxons = Spree::Taxon.all
end
def create
params[:sale_price][:taxons].each do |taxon|
@sale_prices = []
@taxon = Spree::Taxon.find(taxon)
@taxon.products.each do |product|
@sale_price = product.put_on_sale(params[:sale_price][:value], sale_price_params)
@sale = Spree::SalePrice.last
Spree::SalePriceTaxon.create({sale_prices_id: @sale.id, taxon_id: taxon})
@sale_prices << @sale_price
end
end
respond_with(@sale_prices)
# @sale_price = @product.put_on_sale params[:sale_price][:value], sale_price_params
# respond_with(@sale_price)
end
def destroy
@sale_price = Spree::SalePrice.find(params[:id])
@sale_price.destroy
respond_with(@sale_price)
end
private
def load_product
@product = Spree::Product.find_by(slug: params[:product_id])
redirect_to request.referer unless @product.present?
end
def sale_price_params
params.require(:sale_price).permit(
:id,
:value,
:currency,
:start_at,
:end_at,
:enabled
)
end
end
end
end
|
module PhysicalServerHelper::TextualSummary
def textual_group_properties
TextualGroup.new(
_("Properties"),
%i(name model product_name manufacturer machine_type serial_number ems_ref capacity memory cores network_devices health_state loc_led_state)
)
end
def textual_group_relationships
TextualGroup.new(
_("Relationships"),
%i(host ext_management_system)
)
end
def textual_group_compliance
end
def textual_group_networks
TextualGroup.new(_("Management Networks"), %i(mac ipv4 ipv6))
end
def textual_group_asset_details
TextualGroup.new(
_("Asset Details"),
%i(support_contact description location room_id rack_name lowest_rack_unit)
)
end
def textual_group_power_management
TextualGroup.new(
_("Power Management"),
%i(power_state)
)
end
def textual_group_firmware_details
TextualTable.new(_('Firmware'), firmware_details, [_('Name'), _('Version')])
end
def textual_group_smart_management
TextualTags.new(_("Smart Management"), %i(tags))
end
def textual_host
h = {:label => _("Host"), :value => @record.host.try(:name), :icon => "pficon pficon-virtual-machine"}
unless @record.host.nil?
h[:link] = url_for(:controller => 'host', :action => 'show', :id => @record.host.try(:id))
end
h
end
def textual_ext_management_system
textual_link(ExtManagementSystem.find(@record.ems_id))
end
def textual_name
{:label => _("Server name"), :value => @record.name }
end
def textual_product_name
{:label => _("Product Name"), :value => @record.product_name }
end
def textual_manufacturer
{:label => _("Manufacturer"), :value => @record.manufacturer }
end
def textual_machine_type
{:label => _("Machine Type"), :value => @record.machine_type }
end
def textual_serial_number
{:label => _("Serial Number"), :value => @record.serial_number }
end
def textual_ems_ref
{:label => _("UUID"), :value => @record.ems_ref }
end
def textual_model
{:label => _("Model"), :value => @record.model}
end
def textual_capacity
{:label => _("Disk Capacity (bytes)"), :value => @record.hardware.disk_capacity}
end
def textual_memory
{:label => _("Total memory (mb)"), :value => @record.hardware.memory_mb }
end
def textual_cores
{:label => _("CPU total cores"), :value => @record.hardware.cpu_total_cores }
end
def textual_power_state
{:label => _("Power State"), :value => @record.power_state}
end
def textual_mac
# Currently, we only want to display the MAC addresses of devices that
# are associated with an entry in the networks table. This ensures that
# we only display management addresses.
{:label => _("MAC Address"), :value => @record.hardware.guest_devices.reject { |device| device.network.nil? }.collect { |device| device[:address] }.join(", ") }
end
def textual_ipv4
# It is possible for guest devices not to have network data (or a network
# hash). As a result, we need to exclude guest devices that don't have
# network data to prevent a nil class error from occurring.
devices_with_networks = @record.hardware.guest_devices.reject { |device| device.network.nil? }
ip_addresses = devices_with_networks.collect { |device| device.network.ipaddress }
# It is possible that each network entry can have multiple IP addresses, separated
# by commas, so first convert the array into a string, separating each array element
# with a comma. Then split this string back into an array using a comma, possibly
# followed by whitespace as the delimiter. Finally, iterate through the array
# and convert each element into a URL containing an IP address.
ip_addresses = ip_addresses.join(",").split(/,\s*/)
ip_address_urls = ip_addresses.collect { |ip_address| create_https_url(ip_address) }
{:label => _("IPv4 Address"), :value => ip_address_urls.join(", ").html_safe }
end
def textual_ipv6
# It is possible for guest devices not to have network data (or a network
# hash). As a result, we need to exclude guest devices that don't have
# network data to prevent a nil class error from occurring.
{:label => _("IPv6 Address"), :value => @record.hardware.guest_devices.reject { |device| device.network.nil? }.collect { |device| device.network.ipv6address }.join(", ") }
end
def textual_loc_led_state
{:label => _("Identify LED State"), :value => @record.location_led_state}
end
def textual_support_contact
{:label => _("Support contact"), :value => @record.asset_detail['contact']}
end
def textual_description
{:label => _("Description"), :value => @record.asset_detail['description']}
end
def textual_location
{:label => _("Location"), :value => @record.asset_detail['location']}
end
def textual_room_id
{:label => _("Room"), :value => @record.asset_detail['room_id']}
end
def textual_rack_name
{:label => _("Rack name"), :value => @record.asset_detail['rack_name']}
end
def textual_lowest_rack_unit
{:label => _("Lowest rack name"), :value => @record.asset_detail['lowest_rack_unit']}
end
def textual_health_state
{:label => _("Health State"), :value => @record.health_state}
end
def textual_network_devices
hardware_nics_count = @record.hardware.nics.count
device = {:label => _("Network Devices"), :value => hardware_nics_count, :icon => "ff ff-network-card"}
if hardware_nics_count.positive?
device[:link] = "/physical_server/show/#{@record.id}?display=guest_devices"
end
device
end
def firmware_details
@record.hardware.firmwares.collect { |fw| [fw.name, fw.version] }
end
private
def create_https_url(ip)
# A target argument with a value of "_blank" is passed so that the
# page loads in a new tab when the link is clicked.
ip.present? ? link_to(ip, URI::HTTPS.build(:host => ip).to_s, :target => "_blank") : ''
end
end
Access hardware through physical_server's computer_system association so that test works
module PhysicalServerHelper::TextualSummary
def textual_group_properties
TextualGroup.new(
_("Properties"),
%i(name model product_name manufacturer machine_type serial_number ems_ref capacity memory cores network_devices health_state loc_led_state)
)
end
def textual_group_relationships
TextualGroup.new(
_("Relationships"),
%i(host ext_management_system)
)
end
def textual_group_compliance
end
def textual_group_networks
TextualGroup.new(_("Management Networks"), %i(mac ipv4 ipv6))
end
def textual_group_asset_details
TextualGroup.new(
_("Asset Details"),
%i(support_contact description location room_id rack_name lowest_rack_unit)
)
end
def textual_group_power_management
TextualGroup.new(
_("Power Management"),
%i(power_state)
)
end
def textual_group_firmware_details
TextualTable.new(_('Firmware'), firmware_details, [_('Name'), _('Version')])
end
def textual_group_smart_management
TextualTags.new(_("Smart Management"), %i(tags))
end
def textual_host
h = {:label => _("Host"), :value => @record.host.try(:name), :icon => "pficon pficon-virtual-machine"}
unless @record.host.nil?
h[:link] = url_for(:controller => 'host', :action => 'show', :id => @record.host.try(:id))
end
h
end
def textual_ext_management_system
textual_link(ExtManagementSystem.find(@record.ems_id))
end
def textual_name
{:label => _("Server name"), :value => @record.name }
end
def textual_product_name
{:label => _("Product Name"), :value => @record.product_name }
end
def textual_manufacturer
{:label => _("Manufacturer"), :value => @record.manufacturer }
end
def textual_machine_type
{:label => _("Machine Type"), :value => @record.machine_type }
end
def textual_serial_number
{:label => _("Serial Number"), :value => @record.serial_number }
end
def textual_ems_ref
{:label => _("UUID"), :value => @record.ems_ref }
end
def textual_model
{:label => _("Model"), :value => @record.model}
end
def textual_capacity
{:label => _("Disk Capacity (bytes)"), :value => @record.hardware.disk_capacity}
end
def textual_memory
{:label => _("Total memory (mb)"), :value => @record.hardware.memory_mb }
end
def textual_cores
{:label => _("CPU total cores"), :value => @record.hardware.cpu_total_cores }
end
def textual_power_state
{:label => _("Power State"), :value => @record.power_state}
end
def textual_mac
# Currently, we only want to display the MAC addresses of devices that
# are associated with an entry in the networks table. This ensures that
# we only display management addresses.
{:label => _("MAC Address"), :value => @record.hardware.guest_devices.reject { |device| device.network.nil? }.collect { |device| device[:address] }.join(", ") }
end
def textual_ipv4
# It is possible for guest devices not to have network data (or a network
# hash). As a result, we need to exclude guest devices that don't have
# network data to prevent a nil class error from occurring.
devices_with_networks = @record.computer_system.hardware.guest_devices.reject { |device| device.network.nil? }
ip_addresses = devices_with_networks.collect { |device| device.network.ipaddress }
# It is possible that each network entry can have multiple IP addresses, separated
# by commas, so first convert the array into a string, separating each array element
# with a comma. Then split this string back into an array using a comma, possibly
# followed by whitespace as the delimiter. Finally, iterate through the array
# and convert each element into a URL containing an IP address.
ip_addresses = ip_addresses.join(",").split(/,\s*/)
ip_address_urls = ip_addresses.collect { |ip_address| create_https_url(ip_address) }
{:label => _("IPv4 Address"), :value => ip_address_urls.join(", ").html_safe }
end
def textual_ipv6
# It is possible for guest devices not to have network data (or a network
# hash). As a result, we need to exclude guest devices that don't have
# network data to prevent a nil class error from occurring.
{:label => _("IPv6 Address"), :value => @record.hardware.guest_devices.reject { |device| device.network.nil? }.collect { |device| device.network.ipv6address }.join(", ") }
end
def textual_loc_led_state
{:label => _("Identify LED State"), :value => @record.location_led_state}
end
def textual_support_contact
{:label => _("Support contact"), :value => @record.asset_detail['contact']}
end
def textual_description
{:label => _("Description"), :value => @record.asset_detail['description']}
end
def textual_location
{:label => _("Location"), :value => @record.asset_detail['location']}
end
def textual_room_id
{:label => _("Room"), :value => @record.asset_detail['room_id']}
end
def textual_rack_name
{:label => _("Rack name"), :value => @record.asset_detail['rack_name']}
end
def textual_lowest_rack_unit
{:label => _("Lowest rack name"), :value => @record.asset_detail['lowest_rack_unit']}
end
def textual_health_state
{:label => _("Health State"), :value => @record.health_state}
end
def textual_network_devices
hardware_nics_count = @record.hardware.nics.count
device = {:label => _("Network Devices"), :value => hardware_nics_count, :icon => "ff ff-network-card"}
if hardware_nics_count.positive?
device[:link] = "/physical_server/show/#{@record.id}?display=guest_devices"
end
device
end
def firmware_details
@record.hardware.firmwares.collect { |fw| [fw.name, fw.version] }
end
private
def create_https_url(ip)
# A target argument with a value of "_blank" is passed so that the
# page loads in a new tab when the link is clicked.
ip.present? ? link_to(ip, URI::HTTPS.build(:host => ip).to_s, :target => "_blank") : ''
end
end
|
# This is extended as class level into Datatable
module Effective
module EffectiveDatatable
module Rendering
def finalize(collection) # Override me if you like
collection
end
protected
# So the idea here is that we want to do as much as possible on the database in ActiveRecord
# And then run any array_columns through in post-processed results
def table_data
col = collection
if active_record_collection?
col = table_tool.order(col)
col = table_tool.search(col)
if table_tool.search_terms.present? && array_tool.search_terms.blank?
if collection_class.connection.respond_to?(:unprepared_statement)
# https://github.com/rails/rails/issues/15331
col_sql = collection_class.connection.unprepared_statement { col.to_sql }
self.display_records = (collection_class.connection.execute("SELECT COUNT(*) FROM (#{col_sql}) AS datatables_filtered_count").first['count'] rescue 1).to_i
else
self.display_records = (collection_class.connection.execute("SELECT COUNT(*) FROM (#{col.to_sql}) AS datatables_filtered_count").first['count'] rescue 1).to_i
end
end
end
if array_tool.search_terms.present?
col = self.arrayize(col)
col = array_tool.search(col)
self.display_records = col.size
end
if array_tool.order_column.present?
col = self.arrayize(col)
col = array_tool.order(col)
end
self.display_records ||= total_records
if col.kind_of?(Array)
col = array_tool.paginate(col)
else
col = table_tool.paginate(col)
col = self.arrayize(col)
end
col = self.finalize(col)
end
def arrayize(collection)
return collection if @arrayized # Prevent the collection from being arrayized more than once
@arrayized = true
# We want to use the render :collection for each column that renders partials
rendered = {}
(display_table_columns || table_columns).each do |name, opts|
if opts[:partial] && opts[:visible]
locals = {
datatable: self,
table_column: table_columns[name],
controller_namespace: view.controller_path.split('/')[0...-1].map { |path| path.downcase.to_sym if path.present? }.compact,
show_action: (opts[:partial_locals] || {})[:show_action],
edit_action: (opts[:partial_locals] || {})[:edit_action],
destroy_action: (opts[:partial_locals] || {})[:destroy_action],
unarchive_action: (opts[:partial_locals] || {})[:unarchive_action]
}
locals.merge!(opts[:partial_locals]) if opts[:partial_locals]
if active_record_collection?
if locals[:show_action] == :authorize
locals[:show_action] = (EffectiveDatatables.authorized?(controller, :show, collection_class) rescue false)
end
if locals[:edit_action] == :authorize
locals[:edit_action] = (EffectiveDatatables.authorized?(controller, :edit, collection_class) rescue false)
end
if locals[:destroy_action] == :authorize
locals[:destroy_action] = (EffectiveDatatables.authorized?(controller, :destroy, collection_class) rescue false)
end
if locals[:unarchive_action] == :authorize
locals[:unarchive_action] = (EffectiveDatatables.authorized?(controller, :unarchive, collection_class) rescue false)
end
end
rendered[name] = (render(
:partial => opts[:partial],
:as => opts[:partial_local],
:collection => collection,
:formats => :html,
:locals => locals,
:spacer_template => '/effective/datatables/spacer_template',
) || '').split('EFFECTIVEDATATABLESSPACER')
end
end
collection.each_with_index.map do |obj, index|
(display_table_columns || table_columns).map do |name, opts|
if opts[:visible] == false
''
elsif opts[:partial]
rendered[name][index]
elsif opts[:block]
view.instance_exec(obj, collection, self, &opts[:block])
elsif opts[:proc]
view.instance_exec(obj, collection, self, &opts[:proc])
elsif opts[:type] == :belongs_to
(obj.send(name) rescue nil).to_s
elsif opts[:type] == :belongs_to_polymorphic
(obj.send(name) rescue nil).to_s
elsif opts[:type] == :has_many
objs = (obj.send(name).map { |x| x.to_s }.sort rescue [])
objs.length == 1 ? objs.first : (opts[:sentence] ? objs.to_sentence : objs.join('<br>'))
elsif opts[:type] == :obfuscated_id
(obj.send(:to_param) rescue nil).to_s
elsif opts[:type] == :effective_roles
(obj.send(:roles) rescue []).join(', ')
else
# Normal value, but we still may want to format it
value = (obj.send(name) rescue (obj[name] rescue (obj[opts[:array_index]] rescue nil)))
case opts[:type]
when :datetime
value.strftime(EffectiveDatatables.datetime_format) rescue nil
when :date
value.strftime(EffectiveDatatables.date_format) rescue nil
when :price
# This is an integer value, "number of cents"
value ||= 0
raise 'column type: price expects an Integer representing the number of cents' unless value.kind_of?(Integer)
number_to_currency(value / 100.0)
when :currency
number_to_currency(value || 0)
when :integer
if EffectiveDatatables.integer_format.kind_of?(Symbol)
view.instance_exec { public_send(EffectiveDatatables.integer_format, value) }
elsif EffectiveDatatables.integer_format.respond_to?(:call)
view.instance_exec { EffectiveDatatables.integer_format.call(value) }
else
value
end
when :boolean
if EffectiveDatatables.boolean_format == :yes_no && value == true
'Yes'
elsif EffectiveDatatables.boolean_format == :yes_no && value == false
'No'
else
value
end
else
value
end
end
end
end
end
end # / Rendering
end
end
Use mail_to for email fields
# This is extended as class level into Datatable
module Effective
module EffectiveDatatable
module Rendering
def finalize(collection) # Override me if you like
collection
end
protected
# So the idea here is that we want to do as much as possible on the database in ActiveRecord
# And then run any array_columns through in post-processed results
def table_data
col = collection
if active_record_collection?
col = table_tool.order(col)
col = table_tool.search(col)
if table_tool.search_terms.present? && array_tool.search_terms.blank?
if collection_class.connection.respond_to?(:unprepared_statement)
# https://github.com/rails/rails/issues/15331
col_sql = collection_class.connection.unprepared_statement { col.to_sql }
self.display_records = (collection_class.connection.execute("SELECT COUNT(*) FROM (#{col_sql}) AS datatables_filtered_count").first['count'] rescue 1).to_i
else
self.display_records = (collection_class.connection.execute("SELECT COUNT(*) FROM (#{col.to_sql}) AS datatables_filtered_count").first['count'] rescue 1).to_i
end
end
end
if array_tool.search_terms.present?
col = self.arrayize(col)
col = array_tool.search(col)
self.display_records = col.size
end
if array_tool.order_column.present?
col = self.arrayize(col)
col = array_tool.order(col)
end
self.display_records ||= total_records
if col.kind_of?(Array)
col = array_tool.paginate(col)
else
col = table_tool.paginate(col)
col = self.arrayize(col)
end
col = self.finalize(col)
end
def arrayize(collection)
return collection if @arrayized # Prevent the collection from being arrayized more than once
@arrayized = true
# We want to use the render :collection for each column that renders partials
rendered = {}
(display_table_columns || table_columns).each do |name, opts|
if opts[:partial] && opts[:visible]
locals = {
datatable: self,
table_column: table_columns[name],
controller_namespace: view.controller_path.split('/')[0...-1].map { |path| path.downcase.to_sym if path.present? }.compact,
show_action: (opts[:partial_locals] || {})[:show_action],
edit_action: (opts[:partial_locals] || {})[:edit_action],
destroy_action: (opts[:partial_locals] || {})[:destroy_action],
unarchive_action: (opts[:partial_locals] || {})[:unarchive_action]
}
locals.merge!(opts[:partial_locals]) if opts[:partial_locals]
if active_record_collection?
if locals[:show_action] == :authorize
locals[:show_action] = (EffectiveDatatables.authorized?(controller, :show, collection_class) rescue false)
end
if locals[:edit_action] == :authorize
locals[:edit_action] = (EffectiveDatatables.authorized?(controller, :edit, collection_class) rescue false)
end
if locals[:destroy_action] == :authorize
locals[:destroy_action] = (EffectiveDatatables.authorized?(controller, :destroy, collection_class) rescue false)
end
if locals[:unarchive_action] == :authorize
locals[:unarchive_action] = (EffectiveDatatables.authorized?(controller, :unarchive, collection_class) rescue false)
end
end
rendered[name] = (render(
:partial => opts[:partial],
:as => opts[:partial_local],
:collection => collection,
:formats => :html,
:locals => locals,
:spacer_template => '/effective/datatables/spacer_template',
) || '').split('EFFECTIVEDATATABLESSPACER')
end
end
collection.each_with_index.map do |obj, index|
(display_table_columns || table_columns).map do |name, opts|
if opts[:visible] == false
''
elsif opts[:partial]
rendered[name][index]
elsif opts[:block]
view.instance_exec(obj, collection, self, &opts[:block])
elsif opts[:proc]
view.instance_exec(obj, collection, self, &opts[:proc])
elsif opts[:type] == :belongs_to
(obj.send(name) rescue nil).to_s
elsif opts[:type] == :belongs_to_polymorphic
(obj.send(name) rescue nil).to_s
elsif opts[:type] == :has_many
objs = (obj.send(name).map { |x| x.to_s }.sort rescue [])
objs.length == 1 ? objs.first : (opts[:sentence] ? objs.to_sentence : objs.join('<br>'))
elsif opts[:type] == :obfuscated_id
(obj.send(:to_param) rescue nil).to_s
elsif opts[:type] == :effective_roles
(obj.send(:roles) rescue []).join(', ')
else
# Normal value, but we still may want to format it
value = (obj.send(name) rescue (obj[name] rescue (obj[opts[:array_index]] rescue nil)))
case opts[:type]
when :datetime
value.strftime(EffectiveDatatables.datetime_format) rescue nil
when :date
value.strftime(EffectiveDatatables.date_format) rescue nil
when :price
# This is an integer value, "number of cents"
value ||= 0
raise 'column type: price expects an Integer representing the number of cents' unless value.kind_of?(Integer)
number_to_currency(value / 100.0)
when :currency
number_to_currency(value || 0)
when :integer
if EffectiveDatatables.integer_format.kind_of?(Symbol)
view.instance_exec { public_send(EffectiveDatatables.integer_format, value) }
elsif EffectiveDatatables.integer_format.respond_to?(:call)
view.instance_exec { EffectiveDatatables.integer_format.call(value) }
else
value
end
when :boolean
if EffectiveDatatables.boolean_format == :yes_no && value == true
'Yes'
elsif EffectiveDatatables.boolean_format == :yes_no && value == false
'No'
else
value
end
when :string
if name == 'email' && value.present?
mail_to(value)
else
value
end
else # Other col_type
value
end
end
end
end
end
end # / Rendering
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.