CombinedText
stringlengths
4
3.42M
require 'test/unit' class XRubyTest < Test::Unit::TestCase def test_array_pack a = [ "a", "b", "c" ] #TODO #assert_equal("a\000\000b\000\000c\000\000", a.pack("a3a3a3")) end end more comment #The tests here are created by xruby team members #Most other tests in the 'test' folder are copied from c ruby require 'test/unit' class XRubyTest < Test::Unit::TestCase def test_array_pack a = [ "a", "b", "c" ] #TODO #assert_equal("a\000\000b\000\000c\000\000", a.pack("a3a3a3")) end end
# -*- encoding: utf-8 -*- lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'touchpunch-rails/version' Gem::Specification.new do |gem| gem.name = "touchpunch-rails" gem.version = Touchpunch::Rails::VERSION gem.authors = ["Geo"] gem.email = ["geo.marshall@gmail.com"] gem.description = %q{Simple asset gem containing jquery mobile ui touch punch. This allows for enabling touch drag and drop for jquery sortable.} gem.summary = %q{Simple asset gem containing jquery mobile ui touch punch.} gem.homepage = "" gem.files = Dir["{lib,vendor}/**/*"] + ["README.md"] gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) } gem.test_files = gem.files.grep(%r{^(test|spec|features)/}) gem.require_paths = ["lib"] gem.add_dependency "railties", "~> 3.1" end support rails 4.0 # -*- encoding: utf-8 -*- lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'touchpunch-rails/version' Gem::Specification.new do |gem| gem.name = "touchpunch-rails" gem.version = Touchpunch::Rails::VERSION gem.authors = ["Geo"] gem.email = ["geo.marshall@gmail.com"] gem.description = %q{Simple asset gem containing jquery mobile ui touch punch. This allows for enabling touch drag and drop for jquery sortable.} gem.summary = %q{Simple asset gem containing jquery mobile ui touch punch.} gem.homepage = "" gem.files = Dir["{lib,vendor}/**/*"] + ["README.md"] gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) } gem.test_files = gem.files.grep(%r{^(test|spec|features)/}) gem.require_paths = ["lib"] gem.add_dependency "railties", ">= 3.1", "< 4.1" end
Fix bad merge
# @todo Go over :attr_accessor and decide on correct access. # @todo Review method names and determine if need '?' or '!' # @todo Decide if methods should return nil or self. module GemCov # This class implements a Hash that uses a gem name as a key, # and stores the GemCoverageEntry for that gem name. # The GemCoverageEntry for a gem stores the gemsets a gem is # found in and the versions of the gem found in those gemset. # # Gems matching various conditions can be listed using the list_* methods. class GemHash < Hash # The update! method takes a gem listing line of the kind produced by # the 'gem list' command and adds or updates the GemCoverageEntry # for a given gem name. def update!(gem_listing_line,gemset) gem_listing = GemListing.new(gem_listing_line) if self.has_key? gem_listing.name self[gem_listing.name].add_gemset_versions(gemset,gem_listing.versions) else self[gem_listing.name] = GemCoverageEntry.new(gem_listing.name) self[gem_listing.name].add_gemset_versions(gemset,gem_listing.versions) end self end # List all gems installed across all gemsets, ordered alphabetically. def list_all_gems() puts "All Gems:" self.sort.each {|k,g| puts g } end # List the locations of the gems specified when using the --gems option. def list_desired_gems(list_of_gems) gem_list_string = list_of_gems.join(', ') puts "Listing selected Gems: #{gem_list_string}" list_of_gems.each do |g| if self.has_key? g puts self[g] else puts "#{g} not found in any gemset" end end end # List all gems flagged as being found in all gemsets # If in 'global' gemset, exclude from listing. def list_common_gems(gemsets) puts "Gems found in all gemsets, but not in 'global':" common_gems = self.each_value.find_all do |gce| gce.in_all_gemsets_but_global? gemsets end common_gems.each {|g| puts g } end # List all gems found in the 'default' gemset, i.e. gems installed to # the ruby instance, but not in a gemset. def list_default_gems() puts "Gems found in default gem install location:" gems_in_default = self.each_value.find_all do |gce| gce.gemsets_containing.include? 'default' end gems_in_default.each {|g| puts g } end end # This class contains information describing which gemsets a gem # is found in and what versions of the gem are installed in each gemset. class GemCoverageEntry attr_accessor :name, :gemset_versions, :in_all_gemsets, :gemsets_containing # Requires a gem name and initializes instance variables. def initialize(name) @name = name @gemset_versions = {} @gemsets_containing = [] end # Given a gemset name and an Array of installed versions, # adds this to the GemCoverageEntry. def add_gemset_versions(gemset,versions) @gemset_versions[gemset] = versions @gemsets_containing << gemset end # Returns a string representation of a GemCoverageEntry def to_s "#{@name}: #{@gemset_versions.inspect}" end # Is gem found in all gemsets for this ruby instance? def in_all_gemsets_but_global?(gemsets) in_all_gemsets = false remaining = gemsets - @gemsets_containing if (remaining.empty? or remaining == ['default']) in_all_gemsets = true end if gemsets.include? 'global' in_all_gemsets = false end in_all_gemsets end end # This class holds the gem name and installed versions for a gem. class GemListing attr_accessor :name, :versions # Requires a gem listing line and initializes instance vars. def initialize(gem_listing_line) @name = '' @versions = [] add_gem_versions(gem_listing_line) end # Given a line produced by running 'gem list' parses it into a gem # name and an array of installed versions. def add_gem_versions(line) @name, version_string = line.split(/\s/,2) @versions = version_string[1..-2].split(/,\s/) end end # This class has one purpose: to find all gems in a ruby instance # and an optional gemset. Only contains a class method. class GemsetGems # Returns a list of installed gems for a given ruby instance and # optionally a specified gemset. def GemsetGems.gem_list(ruby_version,gemset = '') if gemset == '' rvm_version = ruby_version else rvm_version = ruby_version + '@' + gemset end current_gems = RVM.environment("#{rvm_version}").run_command('gem list')[1].split(/\n/) current_gems end end end Fixed attr accessor after refactoring # @todo Go over :attr_accessor and decide on correct access. # @todo Review method names and determine if need '?' or '!' # @todo Decide if methods should return nil or self. module GemCov # This class implements a Hash that uses a gem name as a key, # and stores the GemCoverageEntry for that gem name. # The GemCoverageEntry for a gem stores the gemsets a gem is # found in and the versions of the gem found in those gemset. # # Gems matching various conditions can be listed using the list_* methods. class GemHash < Hash # The update! method takes a gem listing line of the kind produced by # the 'gem list' command and adds or updates the GemCoverageEntry # for a given gem name. def update!(gem_listing_line,gemset) gem_listing = GemListing.new(gem_listing_line) if self.has_key? gem_listing.name self[gem_listing.name].add_gemset_versions(gemset,gem_listing.versions) else self[gem_listing.name] = GemCoverageEntry.new(gem_listing.name) self[gem_listing.name].add_gemset_versions(gemset,gem_listing.versions) end self end # List all gems installed across all gemsets, ordered alphabetically. def list_all_gems() puts "All Gems:" self.sort.each {|k,g| puts g } end # List the locations of the gems specified when using the --gems option. def list_desired_gems(list_of_gems) gem_list_string = list_of_gems.join(', ') puts "Listing selected Gems: #{gem_list_string}" list_of_gems.each do |g| if self.has_key? g puts self[g] else puts "#{g} not found in any gemset" end end end # List all gems flagged as being found in all gemsets # If in 'global' gemset, exclude from listing. def list_common_gems(gemsets) puts "Gems found in all gemsets, but not in 'global':" common_gems = self.each_value.find_all do |gce| gce.in_all_gemsets_but_global? gemsets end common_gems.each {|g| puts g } end # List all gems found in the 'default' gemset, i.e. gems installed to # the ruby instance, but not in a gemset. def list_default_gems() puts "Gems found in default gem install location:" gems_in_default = self.each_value.find_all do |gce| gce.gemsets_containing.include? 'default' end gems_in_default.each {|g| puts g } end end # This class contains information describing which gemsets a gem # is found in and what versions of the gem are installed in each gemset. class GemCoverageEntry attr_accessor :name, :gemset_versions, :gemsets_containing # Requires a gem name and initializes instance variables. def initialize(name) @name = name @gemset_versions = {} @gemsets_containing = [] end # Given a gemset name and an Array of installed versions, # adds this to the GemCoverageEntry. def add_gemset_versions(gemset,versions) @gemset_versions[gemset] = versions @gemsets_containing << gemset end # Returns a string representation of a GemCoverageEntry def to_s "#{@name}: #{@gemset_versions.inspect}" end # Is gem found in all gemsets for this ruby instance? def in_all_gemsets_but_global?(gemsets) in_all_gemsets = false remaining = gemsets - @gemsets_containing if (remaining.empty? or remaining == ['default']) in_all_gemsets = true end if gemsets.include? 'global' in_all_gemsets = false end in_all_gemsets end end # This class holds the gem name and installed versions for a gem. class GemListing attr_accessor :name, :versions # Requires a gem listing line and initializes instance vars. def initialize(gem_listing_line) @name = '' @versions = [] add_gem_versions(gem_listing_line) end # Given a line produced by running 'gem list' parses it into a gem # name and an array of installed versions. def add_gem_versions(line) @name, version_string = line.split(/\s/,2) @versions = version_string[1..-2].split(/,\s/) end end # This class has one purpose: to find all gems in a ruby instance # and an optional gemset. Only contains a class method. class GemsetGems # Returns a list of installed gems for a given ruby instance and # optionally a specified gemset. def GemsetGems.gem_list(ruby_version,gemset = '') if gemset == '' rvm_version = ruby_version else rvm_version = ruby_version + '@' + gemset end current_gems = RVM.environment("#{rvm_version}").run_command('gem list')[1].split(/\n/) current_gems end end end
support ltsv escape
updated rdocs
# Be sure to restart your server when you modify this file. # Version of your assets, change this if you want to expire all your assets. Rails.application.config.assets.version = '1.0' # Add additional assets to the asset load path # Rails.application.config.assets.paths << Emoji.images_path # Precompile additional assets. # application.js, application.css, and all non-JS/CSS in app/assets folder are already added. # Rails.application.config.assets.precompile += %w( search.js ) Rails.application.config.assets.precompile += %w( admin.css admin.js) Rails.application.config.assets.precompile += %w( events.css events.js) Rails.application.config.assets.precompile += %w( login.css login.js) Fix - fontawesome assets # Be sure to restart your server when you modify this file. # Version of your assets, change this if you want to expire all your assets. Rails.application.config.assets.version = '1.0' # Add additional assets to the asset load path # Rails.application.config.assets.paths << Emoji.images_path # Precompile additional assets. # application.js, application.css, and all non-JS/CSS in app/assets folder are already added. # Rails.application.config.assets.precompile += %w( search.js ) Rails.application.config.assets.precompile += %w( admin.css admin.js) Rails.application.config.assets.precompile += %w( events.css events.js) Rails.application.config.assets.precompile += %w( login.css login.js) Rails.application.config.assets.precompile += ["fontawesome-webfont.ttf", "fontawesome-webfont.eot", "fontawesome-webfont.svg", "fontawesome-webfont.woff"]
# Be sure to restart your server when you modify this file. # Version of your assets, change this if you want to expire all your assets. Rails.application.config.assets.version = '1.0' # Add additional assets to the asset load path # Rails.application.config.assets.paths << Emoji.images_path # Precompile additional assets. # application.js, application.css, and all non-JS/CSS in app/assets folder are already added. # Rails.application.config.assets.precompile += %w( search.js ) Fix asset paths for production # Be sure to restart your server when you modify this file. # Version of your assets, change this if you want to expire all your assets. Rails.application.config.assets.version = '1.0' # Add additional assets to the asset load path Rails.application.config.assets.paths << "public/assets" # Precompile additional assets. # application.js, application.css, and all non-JS/CSS in app/assets folder are already added. # Rails.application.config.assets.precompile += %w( search.js )
Spree::Config.set(:blog_entries_per_page => 3) Spree::Config.set(:blog_entries_recent_sidebar => 6) avoid rake complaints about Spree::Config for now (2 config options should be settable via backend) if Spree::Config Spree::Config.set(:blog_entries_per_page => 3) Spree::Config.set(:blog_entries_recent_sidebar => 6) end
# Application-wide settings and constants require "#{Rails.root}/lib/alm_request.rb" require "#{Rails.root}/lib/solr_request.rb" # Number of articles to display per page on the add-articles and preview-list # pages. (The results metrics page uses a different, smaller value.) # This constant must be kept in sync with the constant of the same name in script.js. $RESULTS_PER_PAGE = 25 # Hack required to get this constant into SolrRequest, since it's in lib/ # and doesn't depend on this module. SolrRequest.set_page_size($RESULTS_PER_PAGE) # Maximum number of articles that can be stored in a single report. # This constant must be kept in sync with the constant of the same name in script.js. $ARTICLE_LIMIT = 500 ALM-269: whoops, forgot to commit this. # Application-wide settings and constants require "#{Rails.root}/lib/alm_request.rb" require "#{Rails.root}/lib/geocode_request.rb" require "#{Rails.root}/lib/solr_request.rb" # Number of articles to display per page on the add-articles and preview-list # pages. (The results metrics page uses a different, smaller value.) # This constant must be kept in sync with the constant of the same name in script.js. $RESULTS_PER_PAGE = 25 # Hack required to get this constant into SolrRequest, since it's in lib/ # and doesn't depend on this module. SolrRequest.set_page_size($RESULTS_PER_PAGE) # Maximum number of articles that can be stored in a single report. # This constant must be kept in sync with the constant of the same name in script.js. $ARTICLE_LIMIT = 500
# Use this hook to configure devise mailer, warden hooks and so forth. # Many of these configuration options can be set straight in your model. Devise.setup do |config| # The secret key used by Devise. Devise uses this key to generate # random tokens. Changing this key will render invalid all existing # confirmation, reset password and unlock tokens in the database. # Devise will use the `secret_key_base` on Rails 4+ applications as its `secret_key` # by default. You can change it below and use your own secret key. # config.secret_key = '6e9f5f51707a82d2f07f0cd6464a854aa3261c4cb01246b2b837352907822ab9b1b84d4bb669d51a6af055f9e1b52f5761a2d0948c7b6cb2e6d60587ce1d9113' # ==> Mailer Configuration # Configure the e-mail address which will be shown in Devise::Mailer, # note that it will be overwritten if you use your own mailer class # with default "from" parameter. config.mailer_sender = 'please-change-me-at-config-initializers-devise@example.com' # Configure the class responsible to send e-mails. # config.mailer = 'Devise::Mailer' # ==> ORM configuration # Load and configure the ORM. Supports :active_record (default) and # :mongoid (bson_ext recommended) by default. Other ORMs may be # available as additional gems. require 'devise/orm/active_record' # ==> Configuration for any authentication mechanism # Configure which keys are used when authenticating a user. The default is # just :email. You can configure it to use [:username, :subdomain], so for # authenticating a user, both parameters are required. Remember that those # parameters are used only when authenticating and not when retrieving from # session. If you need permissions, you should implement that in a before filter. # You can also supply a hash where the value is a boolean determining whether # or not authentication should be aborted when the value is not present. # config.authentication_keys = [:email] # Configure parameters from the request object used for authentication. Each entry # given should be a request method and it will automatically be passed to the # find_for_authentication method and considered in your model lookup. For instance, # if you set :request_keys to [:subdomain], :subdomain will be used on authentication. # The same considerations mentioned for authentication_keys also apply to request_keys. # config.request_keys = [] # Configure which authentication keys should be case-insensitive. # These keys will be downcased upon creating or modifying a user and when used # to authenticate or find a user. Default is :email. config.case_insensitive_keys = [:email] # Configure which authentication keys should have whitespace stripped. # These keys will have whitespace before and after removed upon creating or # modifying a user and when used to authenticate or find a user. Default is :email. config.strip_whitespace_keys = [:email] # Tell if authentication through request.params is enabled. True by default. # It can be set to an array that will enable params authentication only for the # given strategies, for example, `config.params_authenticatable = [:database]` will # enable it only for database (email + password) authentication. # config.params_authenticatable = true # Tell if authentication through HTTP Auth is enabled. False by default. # It can be set to an array that will enable http authentication only for the # given strategies, for example, `config.http_authenticatable = [:database]` will # enable it only for database authentication. The supported strategies are: # :database = Support basic authentication with authentication key + password # config.http_authenticatable = false # If 401 status code should be returned for AJAX requests. True by default. # config.http_authenticatable_on_xhr = true # The realm used in Http Basic Authentication. 'Application' by default. # config.http_authentication_realm = 'Application' # It will change confirmation, password recovery and other workflows # to behave the same regardless if the e-mail provided was right or wrong. # Does not affect registerable. # config.paranoid = true # By default Devise will store the user in session. You can skip storage for # particular strategies by setting this option. # Notice that if you are skipping storage for all authentication paths, you # may want to disable generating routes to Devise's sessions controller by # passing skip: :sessions to `devise_for` in your config/routes.rb config.skip_session_storage = [:http_auth] # By default, Devise cleans up the CSRF token on authentication to # avoid CSRF token fixation attacks. This means that, when using AJAX # requests for sign in and sign up, you need to get a new CSRF token # from the server. You can disable this option at your own risk. # config.clean_up_csrf_token_on_authentication = true # ==> Configuration for :database_authenticatable # For bcrypt, this is the cost for hashing the password and defaults to 10. If # using other encryptors, it sets how many times you want the password re-encrypted. # # Limiting the stretches to just one in testing will increase the performance of # your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use # a value less than 10 in other environments. Note that, for bcrypt (the default # encryptor), the cost increases exponentially with the number of stretches (e.g. # a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation). config.stretches = Rails.env.test? ? 1 : 10 # Setup a pepper to generate the encrypted password. # config.pepper = '46f06331c47c2e1756af1eeb742771b8c188af251b37c378d9688bb4d4164131b9d7f95aef0737e5f33f060d27681789569b470f46436e8fa0035e705d6bce6a' # ==> Configuration for :confirmable # A period that the user is allowed to access the website even without # confirming their account. For instance, if set to 2.days, the user will be # able to access the website for two days without confirming their account, # access will be blocked just in the third day. Default is 0.days, meaning # the user cannot access the website without confirming their account. # config.allow_unconfirmed_access_for = 2.days # A period that the user is allowed to confirm their account before their # token becomes invalid. For example, if set to 3.days, the user can confirm # their account within 3 days after the mail was sent, but on the fourth day # their account can't be confirmed with the token any more. # Default is nil, meaning there is no restriction on how long a user can take # before confirming their account. # config.confirm_within = 3.days # If true, requires any email changes to be confirmed (exactly the same way as # initial account confirmation) to be applied. Requires additional unconfirmed_email # db field (see migrations). Until confirmed, new email is stored in # unconfirmed_email column, and copied to email column on successful confirmation. config.reconfirmable = true # Defines which key will be used when confirming an account # config.confirmation_keys = [:email] # ==> Configuration for :rememberable # The time the user will be remembered without asking for credentials again. # config.remember_for = 2.weeks # Invalidates all the remember me tokens when the user signs out. config.expire_all_remember_me_on_sign_out = true # If true, extends the user's remember period when remembered via cookie. # config.extend_remember_period = false # Options to be passed to the created cookie. For instance, you can set # secure: true in order to force SSL only cookies. # config.rememberable_options = {} # ==> Configuration for :validatable # Range for password length. config.password_length = 8..72 # Email regex used to validate email formats. It simply asserts that # one (and only one) @ exists in the given string. This is mainly # to give user feedback and not to assert the e-mail validity. # config.email_regexp = /\A[^@]+@[^@]+\z/ # ==> Configuration for :timeoutable # The time you want to timeout the user session without activity. After this # time the user will be asked for credentials again. Default is 30 minutes. # config.timeout_in = 30.minutes # If true, expires auth token on session timeout. # config.expire_auth_token_on_timeout = false # ==> Configuration for :lockable # Defines which strategy will be used to lock an account. # :failed_attempts = Locks an account after a number of failed attempts to sign in. # :none = No lock strategy. You should handle locking by yourself. # config.lock_strategy = :failed_attempts # Defines which key will be used when locking and unlocking an account # config.unlock_keys = [:email] # Defines which strategy will be used to unlock an account. # :email = Sends an unlock link to the user email # :time = Re-enables login after a certain amount of time (see :unlock_in below) # :both = Enables both strategies # :none = No unlock strategy. You should handle unlocking by yourself. # config.unlock_strategy = :both # Number of authentication tries before locking an account if lock_strategy # is failed attempts. # config.maximum_attempts = 20 # Time interval to unlock the account if :time is enabled as unlock_strategy. # config.unlock_in = 1.hour # Warn on the last attempt before the account is locked. # config.last_attempt_warning = true # ==> Configuration for :recoverable # # Defines which key will be used when recovering the password for an account # config.reset_password_keys = [:email] # Time interval you can reset your password with a reset password key. # Don't put a too small interval or your users won't have the time to # change their passwords. config.reset_password_within = 6.hours # When set to false, does not sign a user in automatically after their password is # reset. Defaults to true, so a user is signed in automatically after a reset. # config.sign_in_after_reset_password = true # ==> Configuration for :encryptable # Allow you to use another encryption algorithm besides bcrypt (default). You can use # :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1, # :authlogic_sha512 (then you should set stretches above to 20 for default behavior) # and :restful_authentication_sha1 (then you should set stretches to 10, and copy # REST_AUTH_SITE_KEY to pepper). # # Require the `devise-encryptable` gem when using anything other than bcrypt # config.encryptor = :sha512 # ==> Scopes configuration # Turn scoped views on. Before rendering "sessions/new", it will first check for # "users/sessions/new". It's turned off by default because it's slower if you # are using only default views. # config.scoped_views = false # Configure the default scope given to Warden. By default it's the first # devise role declared in your routes (usually :user). # config.default_scope = :user # Set this configuration to false if you want /users/sign_out to sign out # only the current scope. By default, Devise signs out all scopes. # config.sign_out_all_scopes = true # ==> Navigation configuration # Lists the formats that should be treated as navigational. Formats like # :html, should redirect to the sign in page when the user does not have # access, but formats like :xml or :json, should return 401. # # If you have any extra navigational formats, like :iphone or :mobile, you # should add them to the navigational formats lists. # # The "*/*" below is required to match Internet Explorer requests. # config.navigational_formats = ['*/*', :html] # The default HTTP method used to sign out a resource. Default is :delete. config.sign_out_via = :delete # ==> OmniAuth # Add a new OmniAuth provider. Check the wiki for more information on setting # up on your models and hooks. # config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo' # ==> Warden configuration # If you want to use other strategies, that are not supported by Devise, or # change the failure app, you can configure them inside the config.warden block. # # config.warden do |manager| # manager.intercept_401 = false # manager.default_strategies(scope: :user).unshift :some_external_strategy # end # ==> Mountable engine configurations # When using Devise inside an engine, let's call it `MyEngine`, and this engine # is mountable, there are some extra configurations to be taken into account. # The following options are available, assuming the engine is mounted as: # # mount MyEngine, at: '/my_engine' # # The router that invoked `devise_for`, in the example above, would be: # config.router_name = :my_engine # # When using OmniAuth, Devise cannot automatically set OmniAuth path, # so you need to do it manually. For the users scope, it would be: # config.omniauth_path_prefix = '/my_engine/users/auth' end change mailer # Use this hook to configure devise mailer, warden hooks and so forth. # Many of these configuration options can be set straight in your model. Devise.setup do |config| # The secret key used by Devise. Devise uses this key to generate # random tokens. Changing this key will render invalid all existing # confirmation, reset password and unlock tokens in the database. # Devise will use the `secret_key_base` on Rails 4+ applications as its `secret_key` # by default. You can change it below and use your own secret key. # config.secret_key = '6e9f5f51707a82d2f07f0cd6464a854aa3261c4cb01246b2b837352907822ab9b1b84d4bb669d51a6af055f9e1b52f5761a2d0948c7b6cb2e6d60587ce1d9113' # ==> Mailer Configuration # Configure the e-mail address which will be shown in Devise::Mailer, # note that it will be overwritten if you use your own mailer class # with default "from" parameter. config.mailer_sender = 'no-reply@viewer.bioqrator.org' # Configure the class responsible to send e-mails. # config.mailer = 'Devise::Mailer' # ==> ORM configuration # Load and configure the ORM. Supports :active_record (default) and # :mongoid (bson_ext recommended) by default. Other ORMs may be # available as additional gems. require 'devise/orm/active_record' # ==> Configuration for any authentication mechanism # Configure which keys are used when authenticating a user. The default is # just :email. You can configure it to use [:username, :subdomain], so for # authenticating a user, both parameters are required. Remember that those # parameters are used only when authenticating and not when retrieving from # session. If you need permissions, you should implement that in a before filter. # You can also supply a hash where the value is a boolean determining whether # or not authentication should be aborted when the value is not present. # config.authentication_keys = [:email] # Configure parameters from the request object used for authentication. Each entry # given should be a request method and it will automatically be passed to the # find_for_authentication method and considered in your model lookup. For instance, # if you set :request_keys to [:subdomain], :subdomain will be used on authentication. # The same considerations mentioned for authentication_keys also apply to request_keys. # config.request_keys = [] # Configure which authentication keys should be case-insensitive. # These keys will be downcased upon creating or modifying a user and when used # to authenticate or find a user. Default is :email. config.case_insensitive_keys = [:email] # Configure which authentication keys should have whitespace stripped. # These keys will have whitespace before and after removed upon creating or # modifying a user and when used to authenticate or find a user. Default is :email. config.strip_whitespace_keys = [:email] # Tell if authentication through request.params is enabled. True by default. # It can be set to an array that will enable params authentication only for the # given strategies, for example, `config.params_authenticatable = [:database]` will # enable it only for database (email + password) authentication. # config.params_authenticatable = true # Tell if authentication through HTTP Auth is enabled. False by default. # It can be set to an array that will enable http authentication only for the # given strategies, for example, `config.http_authenticatable = [:database]` will # enable it only for database authentication. The supported strategies are: # :database = Support basic authentication with authentication key + password # config.http_authenticatable = false # If 401 status code should be returned for AJAX requests. True by default. # config.http_authenticatable_on_xhr = true # The realm used in Http Basic Authentication. 'Application' by default. # config.http_authentication_realm = 'Application' # It will change confirmation, password recovery and other workflows # to behave the same regardless if the e-mail provided was right or wrong. # Does not affect registerable. # config.paranoid = true # By default Devise will store the user in session. You can skip storage for # particular strategies by setting this option. # Notice that if you are skipping storage for all authentication paths, you # may want to disable generating routes to Devise's sessions controller by # passing skip: :sessions to `devise_for` in your config/routes.rb config.skip_session_storage = [:http_auth] # By default, Devise cleans up the CSRF token on authentication to # avoid CSRF token fixation attacks. This means that, when using AJAX # requests for sign in and sign up, you need to get a new CSRF token # from the server. You can disable this option at your own risk. # config.clean_up_csrf_token_on_authentication = true # ==> Configuration for :database_authenticatable # For bcrypt, this is the cost for hashing the password and defaults to 10. If # using other encryptors, it sets how many times you want the password re-encrypted. # # Limiting the stretches to just one in testing will increase the performance of # your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use # a value less than 10 in other environments. Note that, for bcrypt (the default # encryptor), the cost increases exponentially with the number of stretches (e.g. # a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation). config.stretches = Rails.env.test? ? 1 : 10 # Setup a pepper to generate the encrypted password. # config.pepper = '46f06331c47c2e1756af1eeb742771b8c188af251b37c378d9688bb4d4164131b9d7f95aef0737e5f33f060d27681789569b470f46436e8fa0035e705d6bce6a' # ==> Configuration for :confirmable # A period that the user is allowed to access the website even without # confirming their account. For instance, if set to 2.days, the user will be # able to access the website for two days without confirming their account, # access will be blocked just in the third day. Default is 0.days, meaning # the user cannot access the website without confirming their account. # config.allow_unconfirmed_access_for = 2.days # A period that the user is allowed to confirm their account before their # token becomes invalid. For example, if set to 3.days, the user can confirm # their account within 3 days after the mail was sent, but on the fourth day # their account can't be confirmed with the token any more. # Default is nil, meaning there is no restriction on how long a user can take # before confirming their account. # config.confirm_within = 3.days # If true, requires any email changes to be confirmed (exactly the same way as # initial account confirmation) to be applied. Requires additional unconfirmed_email # db field (see migrations). Until confirmed, new email is stored in # unconfirmed_email column, and copied to email column on successful confirmation. config.reconfirmable = true # Defines which key will be used when confirming an account # config.confirmation_keys = [:email] # ==> Configuration for :rememberable # The time the user will be remembered without asking for credentials again. # config.remember_for = 2.weeks # Invalidates all the remember me tokens when the user signs out. config.expire_all_remember_me_on_sign_out = true # If true, extends the user's remember period when remembered via cookie. # config.extend_remember_period = false # Options to be passed to the created cookie. For instance, you can set # secure: true in order to force SSL only cookies. # config.rememberable_options = {} # ==> Configuration for :validatable # Range for password length. config.password_length = 8..72 # Email regex used to validate email formats. It simply asserts that # one (and only one) @ exists in the given string. This is mainly # to give user feedback and not to assert the e-mail validity. # config.email_regexp = /\A[^@]+@[^@]+\z/ # ==> Configuration for :timeoutable # The time you want to timeout the user session without activity. After this # time the user will be asked for credentials again. Default is 30 minutes. # config.timeout_in = 30.minutes # If true, expires auth token on session timeout. # config.expire_auth_token_on_timeout = false # ==> Configuration for :lockable # Defines which strategy will be used to lock an account. # :failed_attempts = Locks an account after a number of failed attempts to sign in. # :none = No lock strategy. You should handle locking by yourself. # config.lock_strategy = :failed_attempts # Defines which key will be used when locking and unlocking an account # config.unlock_keys = [:email] # Defines which strategy will be used to unlock an account. # :email = Sends an unlock link to the user email # :time = Re-enables login after a certain amount of time (see :unlock_in below) # :both = Enables both strategies # :none = No unlock strategy. You should handle unlocking by yourself. # config.unlock_strategy = :both # Number of authentication tries before locking an account if lock_strategy # is failed attempts. # config.maximum_attempts = 20 # Time interval to unlock the account if :time is enabled as unlock_strategy. # config.unlock_in = 1.hour # Warn on the last attempt before the account is locked. # config.last_attempt_warning = true # ==> Configuration for :recoverable # # Defines which key will be used when recovering the password for an account # config.reset_password_keys = [:email] # Time interval you can reset your password with a reset password key. # Don't put a too small interval or your users won't have the time to # change their passwords. config.reset_password_within = 6.hours # When set to false, does not sign a user in automatically after their password is # reset. Defaults to true, so a user is signed in automatically after a reset. # config.sign_in_after_reset_password = true # ==> Configuration for :encryptable # Allow you to use another encryption algorithm besides bcrypt (default). You can use # :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1, # :authlogic_sha512 (then you should set stretches above to 20 for default behavior) # and :restful_authentication_sha1 (then you should set stretches to 10, and copy # REST_AUTH_SITE_KEY to pepper). # # Require the `devise-encryptable` gem when using anything other than bcrypt # config.encryptor = :sha512 # ==> Scopes configuration # Turn scoped views on. Before rendering "sessions/new", it will first check for # "users/sessions/new". It's turned off by default because it's slower if you # are using only default views. # config.scoped_views = false # Configure the default scope given to Warden. By default it's the first # devise role declared in your routes (usually :user). # config.default_scope = :user # Set this configuration to false if you want /users/sign_out to sign out # only the current scope. By default, Devise signs out all scopes. # config.sign_out_all_scopes = true # ==> Navigation configuration # Lists the formats that should be treated as navigational. Formats like # :html, should redirect to the sign in page when the user does not have # access, but formats like :xml or :json, should return 401. # # If you have any extra navigational formats, like :iphone or :mobile, you # should add them to the navigational formats lists. # # The "*/*" below is required to match Internet Explorer requests. # config.navigational_formats = ['*/*', :html] # The default HTTP method used to sign out a resource. Default is :delete. config.sign_out_via = :delete # ==> OmniAuth # Add a new OmniAuth provider. Check the wiki for more information on setting # up on your models and hooks. # config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo' # ==> Warden configuration # If you want to use other strategies, that are not supported by Devise, or # change the failure app, you can configure them inside the config.warden block. # # config.warden do |manager| # manager.intercept_401 = false # manager.default_strategies(scope: :user).unshift :some_external_strategy # end # ==> Mountable engine configurations # When using Devise inside an engine, let's call it `MyEngine`, and this engine # is mountable, there are some extra configurations to be taken into account. # The following options are available, assuming the engine is mounted as: # # mount MyEngine, at: '/my_engine' # # The router that invoked `devise_for`, in the example above, would be: # config.router_name = :my_engine # # When using OmniAuth, Devise cannot automatically set OmniAuth path, # so you need to do it manually. For the users scope, it would be: # config.omniauth_path_prefix = '/my_engine/users/auth' end
# Use this hook to configure devise mailer, warden hooks and so forth. # Many of these configuration options can be set straight in your model. Devise.setup do |config| config.omniauth :google_oauth2, ENV["GOOGLE_KEY"], ENV["GOOGLE_SECRET"],{} config.omniauth :facebook, ENV["FACEBOOK_KEY"], ENV["FACEBOOK_SECRET"],{} config.omniauth :github, ENV["GITHUB_KEY"], ENV["GITHUB_SECRET"],{} config.omniauth :linkedin, ENV["LINKEDIN_KEY"], ENV["LINKEDIN_SECRET"],{:scope => 'r_basicprofile r_emailaddress'} # The secret key used by Devise. Devise uses this key to generate # random tokens. Changing this key will render invalid all existing # confirmation, reset password and unlock tokens in the database. # Devise will use the `secret_key_base` on Rails 4+ applications as its `secret_key` # by default. You can change it below and use your own secret key. # config.secret_key = '5124617cd8686ff51c84baa25f73275782cd1c51074c9b4195556214926ef9cb83e1f00d5a26a898168f95ef26c0d4e9530b9d64221342e231d2e506dc664539' # ==> Mailer Configuration # Configure the e-mail address which will be shown in Devise::Mailer, # note that it will be overwritten if you use your own mailer class # with default "from" parameter. config.mailer_sender = 'inscricoes@gorails.com.br' # Configure the class responsible to send e-mails. # config.mailer = 'Devise::Mailer' # ==> ORM configuration # Load and configure the ORM. Supports :active_record (default) and # :mongoid (bson_ext recommended) by default. Other ORMs may be # available as additional gems. require 'devise/orm/active_record' # ==> Configuration for any authentication mechanism # Configure which keys are used when authenticating a user. The default is # just :email. You can configure it to use [:username, :subdomain], so for # authenticating a user, both parameters are required. Remember that those # parameters are used only when authenticating and not when retrieving from # session. If you need permissions, you should implement that in a before filter. # You can also supply a hash where the value is a boolean determining whether # or not authentication should be aborted when the value is not present. # config.authentication_keys = [:email] # Configure parameters from the request object used for authentication. Each entry # given should be a request method and it will automatically be passed to the # find_for_authentication method and considered in your model lookup. For instance, # if you set :request_keys to [:subdomain], :subdomain will be used on authentication. # The same considerations mentioned for authentication_keys also apply to request_keys. # config.request_keys = [] # Configure which authentication keys should be case-insensitive. # These keys will be downcased upon creating or modifying a user and when used # to authenticate or find a user. Default is :email. config.case_insensitive_keys = [:email] # Configure which authentication keys should have whitespace stripped. # These keys will have whitespace before and after removed upon creating or # modifying a user and when used to authenticate or find a user. Default is :email. config.strip_whitespace_keys = [:email] # Tell if authentication through request.params is enabled. True by default. # It can be set to an array that will enable params authentication only for the # given strategies, for example, `config.params_authenticatable = [:database]` will # enable it only for database (email + password) authentication. # config.params_authenticatable = true # Tell if authentication through HTTP Auth is enabled. False by default. # It can be set to an array that will enable http authentication only for the # given strategies, for example, `config.http_authenticatable = [:database]` will # enable it only for database authentication. The supported strategies are: # :database = Support basic authentication with authentication key + password # config.http_authenticatable = false # If 401 status code should be returned for AJAX requests. True by default. # config.http_authenticatable_on_xhr = true # The realm used in Http Basic Authentication. 'Application' by default. # config.http_authentication_realm = 'Application' # It will change confirmation, password recovery and other workflows # to behave the same regardless if the e-mail provided was right or wrong. # Does not affect registerable. # config.paranoid = true # By default Devise will store the user in session. You can skip storage for # particular strategies by setting this option. # Notice that if you are skipping storage for all authentication paths, you # may want to disable generating routes to Devise's sessions controller by # passing skip: :sessions to `devise_for` in your config/routes.rb config.skip_session_storage = [:http_auth] # By default, Devise cleans up the CSRF token on authentication to # avoid CSRF token fixation attacks. This means that, when using AJAX # requests for sign in and sign up, you need to get a new CSRF token # from the server. You can disable this option at your own risk. # config.clean_up_csrf_token_on_authentication = true # ==> Configuration for :database_authenticatable # For bcrypt, this is the cost for hashing the password and defaults to 10. If # using other encryptors, it sets how many times you want the password re-encrypted. # # Limiting the stretches to just one in testing will increase the performance of # your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use # a value less than 10 in other environments. Note that, for bcrypt (the default # encryptor), the cost increases exponentially with the number of stretches (e.g. # a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation). config.stretches = Rails.env.test? ? 1 : 10 # Setup a pepper to generate the encrypted password. # config.pepper = '3f3ba9103db6c2a642ebabfcd271a84102554ad86371f6bd40a56f757e8da7ddab19695f1bef9142e3daac997ac80bd94ab139767ffef59c5244ba478f209679' # ==> Configuration for :confirmable # A period that the user is allowed to access the website even without # confirming their account. For instance, if set to 2.days, the user will be # able to access the website for two days without confirming their account, # access will be blocked just in the third day. Default is 0.days, meaning # the user cannot access the website without confirming their account. # config.allow_unconfirmed_access_for = 2.days # A period that the user is allowed to confirm their account before their # token becomes invalid. For example, if set to 3.days, the user can confirm # their account within 3 days after the mail was sent, but on the fourth day # their account can't be confirmed with the token any more. # Default is nil, meaning there is no restriction on how long a user can take # before confirming their account. # config.confirm_within = 3.days # If true, requires any email changes to be confirmed (exactly the same way as # initial account confirmation) to be applied. Requires additional unconfirmed_email # db field (see migrations). Until confirmed, new email is stored in # unconfirmed_email column, and copied to email column on successful confirmation. config.reconfirmable = true # Defines which key will be used when confirming an account # config.confirmation_keys = [:email] # ==> Configuration for :rememberable # The time the user will be remembered without asking for credentials again. # config.remember_for = 2.weeks # Invalidates all the remember me tokens when the user signs out. config.expire_all_remember_me_on_sign_out = true # If true, extends the user's remember period when remembered via cookie. # config.extend_remember_period = false # Options to be passed to the created cookie. For instance, you can set # secure: true in order to force SSL only cookies. # config.rememberable_options = {} # ==> Configuration for :validatable # Range for password length. config.password_length = 8..72 # Email regex used to validate email formats. It simply asserts that # one (and only one) @ exists in the given string. This is mainly # to give user feedback and not to assert the e-mail validity. # config.email_regexp = /\A[^@]+@[^@]+\z/ # ==> Configuration for :timeoutable # The time you want to timeout the user session without activity. After this # time the user will be asked for credentials again. Default is 30 minutes. # config.timeout_in = 30.minutes # ==> Configuration for :lockable # Defines which strategy will be used to lock an account. # :failed_attempts = Locks an account after a number of failed attempts to sign in. # :none = No lock strategy. You should handle locking by yourself. # config.lock_strategy = :failed_attempts # Defines which key will be used when locking and unlocking an account # config.unlock_keys = [:email] # Defines which strategy will be used to unlock an account. # :email = Sends an unlock link to the user email # :time = Re-enables login after a certain amount of time (see :unlock_in below) # :both = Enables both strategies # :none = No unlock strategy. You should handle unlocking by yourself. # config.unlock_strategy = :both # Number of authentication tries before locking an account if lock_strategy # is failed attempts. # config.maximum_attempts = 20 # Time interval to unlock the account if :time is enabled as unlock_strategy. # config.unlock_in = 1.hour # Warn on the last attempt before the account is locked. # config.last_attempt_warning = true # ==> Configuration for :recoverable # # Defines which key will be used when recovering the password for an account # config.reset_password_keys = [:email] # Time interval you can reset your password with a reset password key. # Don't put a too small interval or your users won't have the time to # change their passwords. config.reset_password_within = 6.hours # When set to false, does not sign a user in automatically after their password is # reset. Defaults to true, so a user is signed in automatically after a reset. # config.sign_in_after_reset_password = true # ==> Configuration for :encryptable # Allow you to use another encryption algorithm besides bcrypt (default). You can use # :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1, # :authlogic_sha512 (then you should set stretches above to 20 for default behavior) # and :restful_authentication_sha1 (then you should set stretches to 10, and copy # REST_AUTH_SITE_KEY to pepper). # # Require the `devise-encryptable` gem when using anything other than bcrypt # config.encryptor = :sha512 # ==> Scopes configuration # Turn scoped views on. Before rendering "sessions/new", it will first check for # "users/sessions/new". It's turned off by default because it's slower if you # are using only default views. # config.scoped_views = false # Configure the default scope given to Warden. By default it's the first # devise role declared in your routes (usually :user). # config.default_scope = :user # Set this configuration to false if you want /users/sign_out to sign out # only the current scope. By default, Devise signs out all scopes. # config.sign_out_all_scopes = true # ==> Navigation configuration # Lists the formats that should be treated as navigational. Formats like # :html, should redirect to the sign in page when the user does not have # access, but formats like :xml or :json, should return 401. # # If you have any extra navigational formats, like :iphone or :mobile, you # should add them to the navigational formats lists. # # The "*/*" below is required to match Internet Explorer requests. # config.navigational_formats = ['*/*', :html] # The default HTTP method used to sign out a resource. Default is :delete. config.sign_out_via = :delete # ==> OmniAuth # Add a new OmniAuth provider. Check the wiki for more information on setting # up on your models and hooks. # config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo' # ==> Warden configuration # If you want to use other strategies, that are not supported by Devise, or # change the failure app, you can configure them inside the config.warden block. # # config.warden do |manager| # manager.intercept_401 = false # manager.default_strategies(scope: :user).unshift :some_external_strategy # end # ==> Mountable engine configurations # When using Devise inside an engine, let's call it `MyEngine`, and this engine # is mountable, there are some extra configurations to be taken into account. # The following options are available, assuming the engine is mounted as: # # mount MyEngine, at: '/my_engine' # # The router that invoked `devise_for`, in the example above, would be: # config.router_name = :my_engine # # When using OmniAuth, Devise cannot automatically set OmniAuth path, # so you need to do it manually. For the users scope, it would be: # config.omniauth_path_prefix = '/my_engine/users/auth' end [config/initializers] Add config.omniauth do twitter # Use this hook to configure devise mailer, warden hooks and so forth. # Many of these configuration options can be set straight in your model. Devise.setup do |config| config.omniauth :google_oauth2, ENV["GOOGLE_KEY"], ENV["GOOGLE_SECRET"],{} config.omniauth :facebook, ENV["FACEBOOK_KEY"], ENV["FACEBOOK_SECRET"],{} config.omniauth :github, ENV["GITHUB_KEY"], ENV["GITHUB_SECRET"],{} config.omniauth :linkedin, ENV["LINKEDIN_KEY"], ENV["LINKEDIN_SECRET"],{:scope => 'r_basicprofile r_emailaddress'} config.omniauth :twitter, ENV["TWITTER_KEY"], ENV["TWITTER_SECRET"],{} # The secret key used by Devise. Devise uses this key to generate # random tokens. Changing this key will render invalid all existing # confirmation, reset password and unlock tokens in the database. # Devise will use the `secret_key_base` on Rails 4+ applications as its `secret_key` # by default. You can change it below and use your own secret key. # config.secret_key = '5124617cd8686ff51c84baa25f73275782cd1c51074c9b4195556214926ef9cb83e1f00d5a26a898168f95ef26c0d4e9530b9d64221342e231d2e506dc664539' # ==> Mailer Configuration # Configure the e-mail address which will be shown in Devise::Mailer, # note that it will be overwritten if you use your own mailer class # with default "from" parameter. config.mailer_sender = 'inscricoes@gorails.com.br' # Configure the class responsible to send e-mails. # config.mailer = 'Devise::Mailer' # ==> ORM configuration # Load and configure the ORM. Supports :active_record (default) and # :mongoid (bson_ext recommended) by default. Other ORMs may be # available as additional gems. require 'devise/orm/active_record' # ==> Configuration for any authentication mechanism # Configure which keys are used when authenticating a user. The default is # just :email. You can configure it to use [:username, :subdomain], so for # authenticating a user, both parameters are required. Remember that those # parameters are used only when authenticating and not when retrieving from # session. If you need permissions, you should implement that in a before filter. # You can also supply a hash where the value is a boolean determining whether # or not authentication should be aborted when the value is not present. # config.authentication_keys = [:email] # Configure parameters from the request object used for authentication. Each entry # given should be a request method and it will automatically be passed to the # find_for_authentication method and considered in your model lookup. For instance, # if you set :request_keys to [:subdomain], :subdomain will be used on authentication. # The same considerations mentioned for authentication_keys also apply to request_keys. # config.request_keys = [] # Configure which authentication keys should be case-insensitive. # These keys will be downcased upon creating or modifying a user and when used # to authenticate or find a user. Default is :email. config.case_insensitive_keys = [:email] # Configure which authentication keys should have whitespace stripped. # These keys will have whitespace before and after removed upon creating or # modifying a user and when used to authenticate or find a user. Default is :email. config.strip_whitespace_keys = [:email] # Tell if authentication through request.params is enabled. True by default. # It can be set to an array that will enable params authentication only for the # given strategies, for example, `config.params_authenticatable = [:database]` will # enable it only for database (email + password) authentication. # config.params_authenticatable = true # Tell if authentication through HTTP Auth is enabled. False by default. # It can be set to an array that will enable http authentication only for the # given strategies, for example, `config.http_authenticatable = [:database]` will # enable it only for database authentication. The supported strategies are: # :database = Support basic authentication with authentication key + password # config.http_authenticatable = false # If 401 status code should be returned for AJAX requests. True by default. # config.http_authenticatable_on_xhr = true # The realm used in Http Basic Authentication. 'Application' by default. # config.http_authentication_realm = 'Application' # It will change confirmation, password recovery and other workflows # to behave the same regardless if the e-mail provided was right or wrong. # Does not affect registerable. # config.paranoid = true # By default Devise will store the user in session. You can skip storage for # particular strategies by setting this option. # Notice that if you are skipping storage for all authentication paths, you # may want to disable generating routes to Devise's sessions controller by # passing skip: :sessions to `devise_for` in your config/routes.rb config.skip_session_storage = [:http_auth] # By default, Devise cleans up the CSRF token on authentication to # avoid CSRF token fixation attacks. This means that, when using AJAX # requests for sign in and sign up, you need to get a new CSRF token # from the server. You can disable this option at your own risk. # config.clean_up_csrf_token_on_authentication = true # ==> Configuration for :database_authenticatable # For bcrypt, this is the cost for hashing the password and defaults to 10. If # using other encryptors, it sets how many times you want the password re-encrypted. # # Limiting the stretches to just one in testing will increase the performance of # your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use # a value less than 10 in other environments. Note that, for bcrypt (the default # encryptor), the cost increases exponentially with the number of stretches (e.g. # a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation). config.stretches = Rails.env.test? ? 1 : 10 # Setup a pepper to generate the encrypted password. # config.pepper = '3f3ba9103db6c2a642ebabfcd271a84102554ad86371f6bd40a56f757e8da7ddab19695f1bef9142e3daac997ac80bd94ab139767ffef59c5244ba478f209679' # ==> Configuration for :confirmable # A period that the user is allowed to access the website even without # confirming their account. For instance, if set to 2.days, the user will be # able to access the website for two days without confirming their account, # access will be blocked just in the third day. Default is 0.days, meaning # the user cannot access the website without confirming their account. # config.allow_unconfirmed_access_for = 2.days # A period that the user is allowed to confirm their account before their # token becomes invalid. For example, if set to 3.days, the user can confirm # their account within 3 days after the mail was sent, but on the fourth day # their account can't be confirmed with the token any more. # Default is nil, meaning there is no restriction on how long a user can take # before confirming their account. # config.confirm_within = 3.days # If true, requires any email changes to be confirmed (exactly the same way as # initial account confirmation) to be applied. Requires additional unconfirmed_email # db field (see migrations). Until confirmed, new email is stored in # unconfirmed_email column, and copied to email column on successful confirmation. config.reconfirmable = true # Defines which key will be used when confirming an account # config.confirmation_keys = [:email] # ==> Configuration for :rememberable # The time the user will be remembered without asking for credentials again. # config.remember_for = 2.weeks # Invalidates all the remember me tokens when the user signs out. config.expire_all_remember_me_on_sign_out = true # If true, extends the user's remember period when remembered via cookie. # config.extend_remember_period = false # Options to be passed to the created cookie. For instance, you can set # secure: true in order to force SSL only cookies. # config.rememberable_options = {} # ==> Configuration for :validatable # Range for password length. config.password_length = 8..72 # Email regex used to validate email formats. It simply asserts that # one (and only one) @ exists in the given string. This is mainly # to give user feedback and not to assert the e-mail validity. # config.email_regexp = /\A[^@]+@[^@]+\z/ # ==> Configuration for :timeoutable # The time you want to timeout the user session without activity. After this # time the user will be asked for credentials again. Default is 30 minutes. # config.timeout_in = 30.minutes # ==> Configuration for :lockable # Defines which strategy will be used to lock an account. # :failed_attempts = Locks an account after a number of failed attempts to sign in. # :none = No lock strategy. You should handle locking by yourself. # config.lock_strategy = :failed_attempts # Defines which key will be used when locking and unlocking an account # config.unlock_keys = [:email] # Defines which strategy will be used to unlock an account. # :email = Sends an unlock link to the user email # :time = Re-enables login after a certain amount of time (see :unlock_in below) # :both = Enables both strategies # :none = No unlock strategy. You should handle unlocking by yourself. # config.unlock_strategy = :both # Number of authentication tries before locking an account if lock_strategy # is failed attempts. # config.maximum_attempts = 20 # Time interval to unlock the account if :time is enabled as unlock_strategy. # config.unlock_in = 1.hour # Warn on the last attempt before the account is locked. # config.last_attempt_warning = true # ==> Configuration for :recoverable # # Defines which key will be used when recovering the password for an account # config.reset_password_keys = [:email] # Time interval you can reset your password with a reset password key. # Don't put a too small interval or your users won't have the time to # change their passwords. config.reset_password_within = 6.hours # When set to false, does not sign a user in automatically after their password is # reset. Defaults to true, so a user is signed in automatically after a reset. # config.sign_in_after_reset_password = true # ==> Configuration for :encryptable # Allow you to use another encryption algorithm besides bcrypt (default). You can use # :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1, # :authlogic_sha512 (then you should set stretches above to 20 for default behavior) # and :restful_authentication_sha1 (then you should set stretches to 10, and copy # REST_AUTH_SITE_KEY to pepper). # # Require the `devise-encryptable` gem when using anything other than bcrypt # config.encryptor = :sha512 # ==> Scopes configuration # Turn scoped views on. Before rendering "sessions/new", it will first check for # "users/sessions/new". It's turned off by default because it's slower if you # are using only default views. # config.scoped_views = false # Configure the default scope given to Warden. By default it's the first # devise role declared in your routes (usually :user). # config.default_scope = :user # Set this configuration to false if you want /users/sign_out to sign out # only the current scope. By default, Devise signs out all scopes. # config.sign_out_all_scopes = true # ==> Navigation configuration # Lists the formats that should be treated as navigational. Formats like # :html, should redirect to the sign in page when the user does not have # access, but formats like :xml or :json, should return 401. # # If you have any extra navigational formats, like :iphone or :mobile, you # should add them to the navigational formats lists. # # The "*/*" below is required to match Internet Explorer requests. # config.navigational_formats = ['*/*', :html] # The default HTTP method used to sign out a resource. Default is :delete. config.sign_out_via = :delete # ==> OmniAuth # Add a new OmniAuth provider. Check the wiki for more information on setting # up on your models and hooks. # config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo' # ==> Warden configuration # If you want to use other strategies, that are not supported by Devise, or # change the failure app, you can configure them inside the config.warden block. # # config.warden do |manager| # manager.intercept_401 = false # manager.default_strategies(scope: :user).unshift :some_external_strategy # end # ==> Mountable engine configurations # When using Devise inside an engine, let's call it `MyEngine`, and this engine # is mountable, there are some extra configurations to be taken into account. # The following options are available, assuming the engine is mounted as: # # mount MyEngine, at: '/my_engine' # # The router that invoked `devise_for`, in the example above, would be: # config.router_name = :my_engine # # When using OmniAuth, Devise cannot automatically set OmniAuth path, # so you need to do it manually. For the users scope, it would be: # config.omniauth_path_prefix = '/my_engine/users/auth' end
# Use this hook to configure devise mailer, warden hooks and so forth. # Many of these configuration options can be set straight in your model. Devise.setup do |config| # The secret key used by Devise. Devise uses this key to generate # random tokens. Changing this key will render invalid all existing # confirmation, reset password and unlock tokens in the database. # Devise will use the `secret_key_base` on Rails 4+ applications as its `secret_key` # by default. You can change it below and use your own secret key. config.secret_key = ENV['DEVISE_SECRET_KEY'] # ==> Mailer Configuration # Configure the e-mail address which will be shown in Devise::Mailer, # note that it will be overwritten if you use your own mailer class # with default "from" parameter. config.mailer_sender = ENV['DEVISE_MAILER_SENDER'] # Configure the class responsible to send e-mails. # config.mailer = 'Devise::Mailer' # ==> ORM configuration # Load and configure the ORM. Supports :active_record (default) and # :mongoid (bson_ext recommended) by default. Other ORMs may be # available as additional gems. require 'devise/orm/active_record' # ==> Configuration for any authentication mechanism # Configure which keys are used when authenticating a user. The default is # just :email. You can configure it to use [:username, :subdomain], so for # authenticating a user, both parameters are required. Remember that those # parameters are used only when authenticating and not when retrieving from # session. If you need permissions, you should implement that in a before filter. # You can also supply a hash where the value is a boolean determining whether # or not authentication should be aborted when the value is not present. # config.authentication_keys = [:email] # Configure parameters from the request object used for authentication. Each entry # given should be a request method and it will automatically be passed to the # find_for_authentication method and considered in your model lookup. For instance, # if you set :request_keys to [:subdomain], :subdomain will be used on authentication. # The same considerations mentioned for authentication_keys also apply to request_keys. # config.request_keys = [] # Configure which authentication keys should be case-insensitive. # These keys will be downcased upon creating or modifying a user and when used # to authenticate or find a user. Default is :email. config.case_insensitive_keys = [:email] # Configure which authentication keys should have whitespace stripped. # These keys will have whitespace before and after removed upon creating or # modifying a user and when used to authenticate or find a user. Default is :email. config.strip_whitespace_keys = [:email] # Tell if authentication through request.params is enabled. True by default. # It can be set to an array that will enable params authentication only for the # given strategies, for example, `config.params_authenticatable = [:database]` will # enable it only for database (email + password) authentication. # config.params_authenticatable = true # Tell if authentication through HTTP Auth is enabled. False by default. # It can be set to an array that will enable http authentication only for the # given strategies, for example, `config.http_authenticatable = [:database]` will # enable it only for database authentication. The supported strategies are: # :database = Support basic authentication with authentication key + password # config.http_authenticatable = false # If 401 status code should be returned for AJAX requests. True by default. # config.http_authenticatable_on_xhr = true # The realm used in Http Basic Authentication. 'Application' by default. # config.http_authentication_realm = 'Application' # It will change confirmation, password recovery and other workflows # to behave the same regardless if the e-mail provided was right or wrong. # Does not affect registerable. # config.paranoid = true # By default Devise will store the user in session. You can skip storage for # particular strategies by setting this option. # Notice that if you are skipping storage for all authentication paths, you # may want to disable generating routes to Devise's sessions controller by # passing skip: :sessions to `devise_for` in your config/routes.rb config.skip_session_storage = [:http_auth] # By default, Devise cleans up the CSRF token on authentication to # avoid CSRF token fixation attacks. This means that, when using AJAX # requests for sign in and sign up, you need to get a new CSRF token # from the server. You can disable this option at your own risk. # config.clean_up_csrf_token_on_authentication = true # ==> Configuration for :database_authenticatable # For bcrypt, this is the cost for hashing the password and defaults to 10. If # using other encryptors, it sets how many times you want the password re-encrypted. # # Limiting the stretches to just one in testing will increase the performance of # your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use # a value less than 10 in other environments. Note that, for bcrypt (the default # encryptor), the cost increases exponentially with the number of stretches (e.g. # a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation). config.stretches = Rails.env.test? ? 1 : 10 # Setup a pepper to generate the encrypted password. # config.pepper = '4663bd8bbcf5d28b1cc93400fe1315fc5d2f4b3f1b39f78c9c4bed1c2b245ac885d4f053c56199f78332bbaaf428ff0d454b58a733e3cc51fe2a3c0c5561d559' # ==> Configuration for :confirmable # A period that the user is allowed to access the website even without # confirming their account. For instance, if set to 2.days, the user will be # able to access the website for two days without confirming their account, # access will be blocked just in the third day. Default is 0.days, meaning # the user cannot access the website without confirming their account. # config.allow_unconfirmed_access_for = 2.days # A period that the user is allowed to confirm their account before their # token becomes invalid. For example, if set to 3.days, the user can confirm # their account within 3 days after the mail was sent, but on the fourth day # their account can't be confirmed with the token any more. # Default is nil, meaning there is no restriction on how long a user can take # before confirming their account. # config.confirm_within = 3.days # If true, requires any email changes to be confirmed (exactly the same way as # initial account confirmation) to be applied. Requires additional unconfirmed_email # db field (see migrations). Until confirmed, new email is stored in # unconfirmed_email column, and copied to email column on successful confirmation. config.reconfirmable = true # Defines which key will be used when confirming an account # config.confirmation_keys = [:email] # ==> Configuration for :rememberable # The time the user will be remembered without asking for credentials again. # config.remember_for = 2.weeks # Invalidates all the remember me tokens when the user signs out. config.expire_all_remember_me_on_sign_out = true # If true, extends the user's remember period when remembered via cookie. # config.extend_remember_period = false # Options to be passed to the created cookie. For instance, you can set # secure: true in order to force SSL only cookies. # config.rememberable_options = {} # ==> Configuration for :validatable # Range for password length. config.password_length = 8..72 # Email regex used to validate email formats. It simply asserts that # one (and only one) @ exists in the given string. This is mainly # to give user feedback and not to assert the e-mail validity. # config.email_regexp = /\A[^@]+@[^@]+\z/ # ==> Configuration for :timeoutable # The time you want to timeout the user session without activity. After this # time the user will be asked for credentials again. Default is 30 minutes. # config.timeout_in = 30.minutes # ==> Configuration for :lockable # Defines which strategy will be used to lock an account. # :failed_attempts = Locks an account after a number of failed attempts to sign in. # :none = No lock strategy. You should handle locking by yourself. # config.lock_strategy = :failed_attempts # Defines which key will be used when locking and unlocking an account # config.unlock_keys = [:email] # Defines which strategy will be used to unlock an account. # :email = Sends an unlock link to the user email # :time = Re-enables login after a certain amount of time (see :unlock_in below) # :both = Enables both strategies # :none = No unlock strategy. You should handle unlocking by yourself. # config.unlock_strategy = :both # Number of authentication tries before locking an account if lock_strategy # is failed attempts. # config.maximum_attempts = 20 # Time interval to unlock the account if :time is enabled as unlock_strategy. # config.unlock_in = 1.hour # Warn on the last attempt before the account is locked. # config.last_attempt_warning = true # ==> Configuration for :recoverable # # Defines which key will be used when recovering the password for an account # config.reset_password_keys = [:email] # Time interval you can reset your password with a reset password key. # Don't put a too small interval or your users won't have the time to # change their passwords. config.reset_password_within = 6.hours # When set to false, does not sign a user in automatically after their password is # reset. Defaults to true, so a user is signed in automatically after a reset. # config.sign_in_after_reset_password = true # ==> Configuration for :encryptable # Allow you to use another encryption algorithm besides bcrypt (default). You can use # :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1, # :authlogic_sha512 (then you should set stretches above to 20 for default behavior) # and :restful_authentication_sha1 (then you should set stretches to 10, and copy # REST_AUTH_SITE_KEY to pepper). # # Require the `devise-encryptable` gem when using anything other than bcrypt # config.encryptor = :sha512 # ==> Scopes configuration # Turn scoped views on. Before rendering "sessions/new", it will first check for # "users/sessions/new". It's turned off by default because it's slower if you # are using only default views. # config.scoped_views = false # Configure the default scope given to Warden. By default it's the first # devise role declared in your routes (usually :user). # config.default_scope = :user # Set this configuration to false if you want /users/sign_out to sign out # only the current scope. By default, Devise signs out all scopes. # config.sign_out_all_scopes = true # ==> Navigation configuration # Lists the formats that should be treated as navigational. Formats like # :html, should redirect to the sign in page when the user does not have # access, but formats like :xml or :json, should return 401. # # If you have any extra navigational formats, like :iphone or :mobile, you # should add them to the navigational formats lists. # # The "*/*" below is required to match Internet Explorer requests. # config.navigational_formats = ['*/*', :html] # The default HTTP method used to sign out a resource. Default is :delete. config.sign_out_via = :delete # ==> OmniAuth # Add a new OmniAuth provider. Check the wiki for more information on setting # up on your models and hooks. # config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo' # ==> Warden configuration # If you want to use other strategies, that are not supported by Devise, or # change the failure app, you can configure them inside the config.warden block. # # config.warden do |manager| # manager.intercept_401 = false # manager.default_strategies(scope: :user).unshift :some_external_strategy # end # ==> Mountable engine configurations # When using Devise inside an engine, let's call it `MyEngine`, and this engine # is mountable, there are some extra configurations to be taken into account. # The following options are available, assuming the engine is mounted as: # # mount MyEngine, at: '/my_engine' # # The router that invoked `devise_for`, in the example above, would be: # config.router_name = :my_engine # # When using OmniAuth, Devise cannot automatically set OmniAuth path, # so you need to do it manually. For the users scope, it would be: # config.omniauth_path_prefix = '/my_engine/users/auth' end login timeout on 30 minutes # Use this hook to configure devise mailer, warden hooks and so forth. # Many of these configuration options can be set straight in your model. Devise.setup do |config| # The secret key used by Devise. Devise uses this key to generate # random tokens. Changing this key will render invalid all existing # confirmation, reset password and unlock tokens in the database. # Devise will use the `secret_key_base` on Rails 4+ applications as its `secret_key` # by default. You can change it below and use your own secret key. config.secret_key = ENV['DEVISE_SECRET_KEY'] # ==> Mailer Configuration # Configure the e-mail address which will be shown in Devise::Mailer, # note that it will be overwritten if you use your own mailer class # with default "from" parameter. config.mailer_sender = ENV['DEVISE_MAILER_SENDER'] # Configure the class responsible to send e-mails. # config.mailer = 'Devise::Mailer' # ==> ORM configuration # Load and configure the ORM. Supports :active_record (default) and # :mongoid (bson_ext recommended) by default. Other ORMs may be # available as additional gems. require 'devise/orm/active_record' # ==> Configuration for any authentication mechanism # Configure which keys are used when authenticating a user. The default is # just :email. You can configure it to use [:username, :subdomain], so for # authenticating a user, both parameters are required. Remember that those # parameters are used only when authenticating and not when retrieving from # session. If you need permissions, you should implement that in a before filter. # You can also supply a hash where the value is a boolean determining whether # or not authentication should be aborted when the value is not present. # config.authentication_keys = [:email] # Configure parameters from the request object used for authentication. Each entry # given should be a request method and it will automatically be passed to the # find_for_authentication method and considered in your model lookup. For instance, # if you set :request_keys to [:subdomain], :subdomain will be used on authentication. # The same considerations mentioned for authentication_keys also apply to request_keys. # config.request_keys = [] # Configure which authentication keys should be case-insensitive. # These keys will be downcased upon creating or modifying a user and when used # to authenticate or find a user. Default is :email. config.case_insensitive_keys = [:email] # Configure which authentication keys should have whitespace stripped. # These keys will have whitespace before and after removed upon creating or # modifying a user and when used to authenticate or find a user. Default is :email. config.strip_whitespace_keys = [:email] # Tell if authentication through request.params is enabled. True by default. # It can be set to an array that will enable params authentication only for the # given strategies, for example, `config.params_authenticatable = [:database]` will # enable it only for database (email + password) authentication. # config.params_authenticatable = true # Tell if authentication through HTTP Auth is enabled. False by default. # It can be set to an array that will enable http authentication only for the # given strategies, for example, `config.http_authenticatable = [:database]` will # enable it only for database authentication. The supported strategies are: # :database = Support basic authentication with authentication key + password # config.http_authenticatable = false # If 401 status code should be returned for AJAX requests. True by default. # config.http_authenticatable_on_xhr = true # The realm used in Http Basic Authentication. 'Application' by default. # config.http_authentication_realm = 'Application' # It will change confirmation, password recovery and other workflows # to behave the same regardless if the e-mail provided was right or wrong. # Does not affect registerable. # config.paranoid = true # By default Devise will store the user in session. You can skip storage for # particular strategies by setting this option. # Notice that if you are skipping storage for all authentication paths, you # may want to disable generating routes to Devise's sessions controller by # passing skip: :sessions to `devise_for` in your config/routes.rb config.skip_session_storage = [:http_auth] # By default, Devise cleans up the CSRF token on authentication to # avoid CSRF token fixation attacks. This means that, when using AJAX # requests for sign in and sign up, you need to get a new CSRF token # from the server. You can disable this option at your own risk. # config.clean_up_csrf_token_on_authentication = true # ==> Configuration for :database_authenticatable # For bcrypt, this is the cost for hashing the password and defaults to 10. If # using other encryptors, it sets how many times you want the password re-encrypted. # # Limiting the stretches to just one in testing will increase the performance of # your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use # a value less than 10 in other environments. Note that, for bcrypt (the default # encryptor), the cost increases exponentially with the number of stretches (e.g. # a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation). config.stretches = Rails.env.test? ? 1 : 10 # Setup a pepper to generate the encrypted password. # config.pepper = '4663bd8bbcf5d28b1cc93400fe1315fc5d2f4b3f1b39f78c9c4bed1c2b245ac885d4f053c56199f78332bbaaf428ff0d454b58a733e3cc51fe2a3c0c5561d559' # ==> Configuration for :confirmable # A period that the user is allowed to access the website even without # confirming their account. For instance, if set to 2.days, the user will be # able to access the website for two days without confirming their account, # access will be blocked just in the third day. Default is 0.days, meaning # the user cannot access the website without confirming their account. # config.allow_unconfirmed_access_for = 2.days # A period that the user is allowed to confirm their account before their # token becomes invalid. For example, if set to 3.days, the user can confirm # their account within 3 days after the mail was sent, but on the fourth day # their account can't be confirmed with the token any more. # Default is nil, meaning there is no restriction on how long a user can take # before confirming their account. # config.confirm_within = 3.days # If true, requires any email changes to be confirmed (exactly the same way as # initial account confirmation) to be applied. Requires additional unconfirmed_email # db field (see migrations). Until confirmed, new email is stored in # unconfirmed_email column, and copied to email column on successful confirmation. config.reconfirmable = true # Defines which key will be used when confirming an account # config.confirmation_keys = [:email] # ==> Configuration for :rememberable # The time the user will be remembered without asking for credentials again. # config.remember_for = 2.weeks # Invalidates all the remember me tokens when the user signs out. config.expire_all_remember_me_on_sign_out = true # If true, extends the user's remember period when remembered via cookie. # config.extend_remember_period = false # Options to be passed to the created cookie. For instance, you can set # secure: true in order to force SSL only cookies. # config.rememberable_options = {} # ==> Configuration for :validatable # Range for password length. config.password_length = 8..72 # Email regex used to validate email formats. It simply asserts that # one (and only one) @ exists in the given string. This is mainly # to give user feedback and not to assert the e-mail validity. # config.email_regexp = /\A[^@]+@[^@]+\z/ # ==> Configuration for :timeoutable # The time you want to timeout the user session without activity. After this # time the user will be asked for credentials again. Default is 30 minutes. config.timeout_in = 30.minutes # ==> Configuration for :lockable # Defines which strategy will be used to lock an account. # :failed_attempts = Locks an account after a number of failed attempts to sign in. # :none = No lock strategy. You should handle locking by yourself. # config.lock_strategy = :failed_attempts # Defines which key will be used when locking and unlocking an account # config.unlock_keys = [:email] # Defines which strategy will be used to unlock an account. # :email = Sends an unlock link to the user email # :time = Re-enables login after a certain amount of time (see :unlock_in below) # :both = Enables both strategies # :none = No unlock strategy. You should handle unlocking by yourself. # config.unlock_strategy = :both # Number of authentication tries before locking an account if lock_strategy # is failed attempts. # config.maximum_attempts = 20 # Time interval to unlock the account if :time is enabled as unlock_strategy. # config.unlock_in = 1.hour # Warn on the last attempt before the account is locked. # config.last_attempt_warning = true # ==> Configuration for :recoverable # # Defines which key will be used when recovering the password for an account # config.reset_password_keys = [:email] # Time interval you can reset your password with a reset password key. # Don't put a too small interval or your users won't have the time to # change their passwords. config.reset_password_within = 6.hours # When set to false, does not sign a user in automatically after their password is # reset. Defaults to true, so a user is signed in automatically after a reset. # config.sign_in_after_reset_password = true # ==> Configuration for :encryptable # Allow you to use another encryption algorithm besides bcrypt (default). You can use # :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1, # :authlogic_sha512 (then you should set stretches above to 20 for default behavior) # and :restful_authentication_sha1 (then you should set stretches to 10, and copy # REST_AUTH_SITE_KEY to pepper). # # Require the `devise-encryptable` gem when using anything other than bcrypt # config.encryptor = :sha512 # ==> Scopes configuration # Turn scoped views on. Before rendering "sessions/new", it will first check for # "users/sessions/new". It's turned off by default because it's slower if you # are using only default views. # config.scoped_views = false # Configure the default scope given to Warden. By default it's the first # devise role declared in your routes (usually :user). # config.default_scope = :user # Set this configuration to false if you want /users/sign_out to sign out # only the current scope. By default, Devise signs out all scopes. # config.sign_out_all_scopes = true # ==> Navigation configuration # Lists the formats that should be treated as navigational. Formats like # :html, should redirect to the sign in page when the user does not have # access, but formats like :xml or :json, should return 401. # # If you have any extra navigational formats, like :iphone or :mobile, you # should add them to the navigational formats lists. # # The "*/*" below is required to match Internet Explorer requests. # config.navigational_formats = ['*/*', :html] # The default HTTP method used to sign out a resource. Default is :delete. config.sign_out_via = :delete # ==> OmniAuth # Add a new OmniAuth provider. Check the wiki for more information on setting # up on your models and hooks. # config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo' # ==> Warden configuration # If you want to use other strategies, that are not supported by Devise, or # change the failure app, you can configure them inside the config.warden block. # # config.warden do |manager| # manager.intercept_401 = false # manager.default_strategies(scope: :user).unshift :some_external_strategy # end # ==> Mountable engine configurations # When using Devise inside an engine, let's call it `MyEngine`, and this engine # is mountable, there are some extra configurations to be taken into account. # The following options are available, assuming the engine is mounted as: # # mount MyEngine, at: '/my_engine' # # The router that invoked `devise_for`, in the example above, would be: # config.router_name = :my_engine # # When using OmniAuth, Devise cannot automatically set OmniAuth path, # so you need to do it manually. For the users scope, it would be: # config.omniauth_path_prefix = '/my_engine/users/auth' end
# Use this hook to configure devise mailer, warden hooks and so forth. # Many of these configuration options can be set straight in your model. Devise.setup do |config| # ==> Mailer Configuration # Configure the e-mail address which will be shown in Devise::Mailer, # note that it will be overwritten if you use your own mailer class with default "from" parameter. config.mailer_sender = "please-change-me-at-config-initializers-devise@example.com" # Configure the class responsible to send e-mails. # config.mailer = "Devise::Mailer" # ==> ORM configuration # Load and configure the ORM. Supports :active_record (default) and # :mongoid (bson_ext recommended) by default. Other ORMs may be # available as additional gems. require 'devise/orm/active_record' # ==> Configuration for any authentication mechanism # Configure which keys are used when authenticating a user. The default is # just :email. You can configure it to use [:username, :subdomain], so for # authenticating a user, both parameters are required. Remember that those # parameters are used only when authenticating and not when retrieving from # session. If you need permissions, you should implement that in a before filter. # You can also supply a hash where the value is a boolean determining whether # or not authentication should be aborted when the value is not present. # config.authentication_keys = [ :email ] # Configure parameters from the request object used for authentication. Each entry # given should be a request method and it will automatically be passed to the # find_for_authentication method and considered in your model lookup. For instance, # if you set :request_keys to [:subdomain], :subdomain will be used on authentication. # The same considerations mentioned for authentication_keys also apply to request_keys. # config.request_keys = [] # Configure which authentication keys should be case-insensitive. # These keys will be downcased upon creating or modifying a user and when used # to authenticate or find a user. Default is :email. config.case_insensitive_keys = [ :email ] # Configure which authentication keys should have whitespace stripped. # These keys will have whitespace before and after removed upon creating or # modifying a user and when used to authenticate or find a user. Default is :email. config.strip_whitespace_keys = [ :email ] # Tell if authentication through request.params is enabled. True by default. # It can be set to an array that will enable params authentication only for the # given strategies, for example, `config.params_authenticatable = [:database]` will # enable it only for database (email + password) authentication. # config.params_authenticatable = true # Tell if authentication through HTTP Basic Auth is enabled. False by default. # It can be set to an array that will enable http authentication only for the # given strategies, for example, `config.http_authenticatable = [:token]` will # enable it only for token authentication. # config.http_authenticatable = false # If http headers should be returned for AJAX requests. True by default. # config.http_authenticatable_on_xhr = true # The realm used in Http Basic Authentication. "Application" by default. # config.http_authentication_realm = "Application" # It will change confirmation, password recovery and other workflows # to behave the same regardless if the e-mail provided was right or wrong. # Does not affect registerable. # config.paranoid = true # By default Devise will store the user in session. You can skip storage for # :http_auth and :token_auth by adding those symbols to the array below. # Notice that if you are skipping storage for all authentication paths, you # may want to disable generating routes to Devise's sessions controller by # passing :skip => :sessions to `devise_for` in your config/routes.rb config.skip_session_storage = [:http_auth] # ==> Configuration for :database_authenticatable # For bcrypt, this is the cost for hashing the password and defaults to 10. If # using other encryptors, it sets how many times you want the password re-encrypted. # # Limiting the stretches to just one in testing will increase the performance of # your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use # a value less than 10 in other environments. config.stretches = Rails.env.test? ? 1 : 10 # Setup a pepper to generate the encrypted password. # config.pepper = "05ffcee711ccd15279d8dec19afe6030246e3399c601e8daee2d961ace9a83b80304c93cc21dccc045781c831dd18242afc4c0939c17e356c06c1fb755020ad9" # ==> Configuration for :confirmable # A period that the user is allowed to access the website even without # confirming his account. For instance, if set to 2.days, the user will be # able to access the website for two days without confirming his account, # access will be blocked just in the third day. Default is 0.days, meaning # the user cannot access the website without confirming his account. # config.allow_unconfirmed_access_for = 2.days # If true, requires any email changes to be confirmed (exactly the same way as # initial account confirmation) to be applied. Requires additional unconfirmed_email # db field (see migrations). Until confirmed new email is stored in # unconfirmed email column, and copied to email column on successful confirmation. config.reconfirmable = true # Defines which key will be used when confirming an account # config.confirmation_keys = [ :email ] # ==> Configuration for :rememberable # The time the user will be remembered without asking for credentials again. # config.remember_for = 2.weeks # If true, extends the user's remember period when remembered via cookie. # config.extend_remember_period = false # Options to be passed to the created cookie. For instance, you can set # :secure => true in order to force SSL only cookies. # config.rememberable_options = {} # ==> Configuration for :validatable # Range for password length. Default is 6..128. # config.password_length = 6..128 # Email regex used to validate email formats. It simply asserts that # an one (and only one) @ exists in the given string. This is mainly # to give user feedback and not to assert the e-mail validity. # config.email_regexp = /\A[^@]+@[^@]+\z/ # ==> Configuration for :timeoutable # The time you want to timeout the user session without activity. After this # time the user will be asked for credentials again. Default is 30 minutes. # config.timeout_in = 30.minutes # If true, expires auth token on session timeout. # config.expire_auth_token_on_timeout = false # ==> Configuration for :lockable # Defines which strategy will be used to lock an account. # :failed_attempts = Locks an account after a number of failed attempts to sign in. # :none = No lock strategy. You should handle locking by yourself. # config.lock_strategy = :failed_attempts # Defines which key will be used when locking and unlocking an account # config.unlock_keys = [ :email ] # Defines which strategy will be used to unlock an account. # :email = Sends an unlock link to the user email # :time = Re-enables login after a certain amount of time (see :unlock_in below) # :both = Enables both strategies # :none = No unlock strategy. You should handle unlocking by yourself. # config.unlock_strategy = :both # Number of authentication tries before locking an account if lock_strategy # is failed attempts. # config.maximum_attempts = 20 # Time interval to unlock the account if :time is enabled as unlock_strategy. # config.unlock_in = 1.hour # ==> Configuration for :recoverable # # Defines which key will be used when recovering the password for an account # config.reset_password_keys = [ :email ] # Time interval you can reset your password with a reset password key. # Don't put a too small interval or your users won't have the time to # change their passwords. config.reset_password_within = 6.hours # ==> Configuration for :encryptable # Allow you to use another encryption algorithm besides bcrypt (default). You can use # :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1, # :authlogic_sha512 (then you should set stretches above to 20 for default behavior) # and :restful_authentication_sha1 (then you should set stretches to 10, and copy # REST_AUTH_SITE_KEY to pepper) # config.encryptor = :sha512 # ==> Configuration for :token_authenticatable # Defines name of the authentication token params key # config.token_authentication_key = :auth_token # ==> Scopes configuration # Turn scoped views on. Before rendering "sessions/new", it will first check for # "users/sessions/new". It's turned off by default because it's slower if you # are using only default views. # config.scoped_views = false # Configure the default scope given to Warden. By default it's the first # devise role declared in your routes (usually :user). # config.default_scope = :user # Set this configuration to false if you want /users/sign_out to sign out # only the current scope. By default, Devise signs out all scopes. # config.sign_out_all_scopes = true # ==> Navigation configuration # Lists the formats that should be treated as navigational. Formats like # :html, should redirect to the sign in page when the user does not have # access, but formats like :xml or :json, should return 401. # # If you have any extra navigational formats, like :iphone or :mobile, you # should add them to the navigational formats lists. # # The "*/*" below is required to match Internet Explorer requests. # config.navigational_formats = ["*/*", :html] # The default HTTP method used to sign out a resource. Default is :delete. config.sign_out_via = :delete # ==> OmniAuth # Add a new OmniAuth provider. Check the wiki for more information on setting # up on your models and hooks. # config.omniauth :github, 'APP_ID', 'APP_SECRET', :scope => 'user,public_repo' # ==> Warden configuration # If you want to use other strategies, that are not supported by Devise, or # change the failure app, you can configure them inside the config.warden block. # # config.warden do |manager| # manager.intercept_401 = false # manager.default_strategies(:scope => :user).unshift :some_external_strategy # end # ==> Mountable engine configurations # When using Devise inside an engine, let's call it `MyEngine`, and this engine # is mountable, there are some extra configurations to be taken into account. # The following options are available, assuming the engine is mounted as: # # mount MyEngine, at: "/my_engine" # # The router that invoked `devise_for`, in the example above, would be: # config.router_name = :my_engine # # When using omniauth, Devise cannot automatically set Omniauth path, # so you need to do it manually. For the users scope, it would be: # config.omniauth_path_prefix = "/my_engine/users/auth" end Fix devise mail sender # Use this hook to configure devise mailer, warden hooks and so forth. # Many of these configuration options can be set straight in your model. Devise.setup do |config| # ==> Mailer Configuration # Configure the e-mail address which will be shown in Devise::Mailer, # note that it will be overwritten if you use your own mailer class with default "from" parameter. config.mailer_sender = "no-reply@holderdeord.no" # Configure the class responsible to send e-mails. # config.mailer = "Devise::Mailer" # ==> ORM configuration # Load and configure the ORM. Supports :active_record (default) and # :mongoid (bson_ext recommended) by default. Other ORMs may be # available as additional gems. require 'devise/orm/active_record' # ==> Configuration for any authentication mechanism # Configure which keys are used when authenticating a user. The default is # just :email. You can configure it to use [:username, :subdomain], so for # authenticating a user, both parameters are required. Remember that those # parameters are used only when authenticating and not when retrieving from # session. If you need permissions, you should implement that in a before filter. # You can also supply a hash where the value is a boolean determining whether # or not authentication should be aborted when the value is not present. # config.authentication_keys = [ :email ] # Configure parameters from the request object used for authentication. Each entry # given should be a request method and it will automatically be passed to the # find_for_authentication method and considered in your model lookup. For instance, # if you set :request_keys to [:subdomain], :subdomain will be used on authentication. # The same considerations mentioned for authentication_keys also apply to request_keys. # config.request_keys = [] # Configure which authentication keys should be case-insensitive. # These keys will be downcased upon creating or modifying a user and when used # to authenticate or find a user. Default is :email. config.case_insensitive_keys = [ :email ] # Configure which authentication keys should have whitespace stripped. # These keys will have whitespace before and after removed upon creating or # modifying a user and when used to authenticate or find a user. Default is :email. config.strip_whitespace_keys = [ :email ] # Tell if authentication through request.params is enabled. True by default. # It can be set to an array that will enable params authentication only for the # given strategies, for example, `config.params_authenticatable = [:database]` will # enable it only for database (email + password) authentication. # config.params_authenticatable = true # Tell if authentication through HTTP Basic Auth is enabled. False by default. # It can be set to an array that will enable http authentication only for the # given strategies, for example, `config.http_authenticatable = [:token]` will # enable it only for token authentication. # config.http_authenticatable = false # If http headers should be returned for AJAX requests. True by default. # config.http_authenticatable_on_xhr = true # The realm used in Http Basic Authentication. "Application" by default. # config.http_authentication_realm = "Application" # It will change confirmation, password recovery and other workflows # to behave the same regardless if the e-mail provided was right or wrong. # Does not affect registerable. # config.paranoid = true # By default Devise will store the user in session. You can skip storage for # :http_auth and :token_auth by adding those symbols to the array below. # Notice that if you are skipping storage for all authentication paths, you # may want to disable generating routes to Devise's sessions controller by # passing :skip => :sessions to `devise_for` in your config/routes.rb config.skip_session_storage = [:http_auth] # ==> Configuration for :database_authenticatable # For bcrypt, this is the cost for hashing the password and defaults to 10. If # using other encryptors, it sets how many times you want the password re-encrypted. # # Limiting the stretches to just one in testing will increase the performance of # your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use # a value less than 10 in other environments. config.stretches = Rails.env.test? ? 1 : 10 # Setup a pepper to generate the encrypted password. # config.pepper = "05ffcee711ccd15279d8dec19afe6030246e3399c601e8daee2d961ace9a83b80304c93cc21dccc045781c831dd18242afc4c0939c17e356c06c1fb755020ad9" # ==> Configuration for :confirmable # A period that the user is allowed to access the website even without # confirming his account. For instance, if set to 2.days, the user will be # able to access the website for two days without confirming his account, # access will be blocked just in the third day. Default is 0.days, meaning # the user cannot access the website without confirming his account. # config.allow_unconfirmed_access_for = 2.days # If true, requires any email changes to be confirmed (exactly the same way as # initial account confirmation) to be applied. Requires additional unconfirmed_email # db field (see migrations). Until confirmed new email is stored in # unconfirmed email column, and copied to email column on successful confirmation. config.reconfirmable = true # Defines which key will be used when confirming an account # config.confirmation_keys = [ :email ] # ==> Configuration for :rememberable # The time the user will be remembered without asking for credentials again. # config.remember_for = 2.weeks # If true, extends the user's remember period when remembered via cookie. # config.extend_remember_period = false # Options to be passed to the created cookie. For instance, you can set # :secure => true in order to force SSL only cookies. # config.rememberable_options = {} # ==> Configuration for :validatable # Range for password length. Default is 6..128. # config.password_length = 6..128 # Email regex used to validate email formats. It simply asserts that # an one (and only one) @ exists in the given string. This is mainly # to give user feedback and not to assert the e-mail validity. # config.email_regexp = /\A[^@]+@[^@]+\z/ # ==> Configuration for :timeoutable # The time you want to timeout the user session without activity. After this # time the user will be asked for credentials again. Default is 30 minutes. # config.timeout_in = 30.minutes # If true, expires auth token on session timeout. # config.expire_auth_token_on_timeout = false # ==> Configuration for :lockable # Defines which strategy will be used to lock an account. # :failed_attempts = Locks an account after a number of failed attempts to sign in. # :none = No lock strategy. You should handle locking by yourself. # config.lock_strategy = :failed_attempts # Defines which key will be used when locking and unlocking an account # config.unlock_keys = [ :email ] # Defines which strategy will be used to unlock an account. # :email = Sends an unlock link to the user email # :time = Re-enables login after a certain amount of time (see :unlock_in below) # :both = Enables both strategies # :none = No unlock strategy. You should handle unlocking by yourself. # config.unlock_strategy = :both # Number of authentication tries before locking an account if lock_strategy # is failed attempts. # config.maximum_attempts = 20 # Time interval to unlock the account if :time is enabled as unlock_strategy. # config.unlock_in = 1.hour # ==> Configuration for :recoverable # # Defines which key will be used when recovering the password for an account # config.reset_password_keys = [ :email ] # Time interval you can reset your password with a reset password key. # Don't put a too small interval or your users won't have the time to # change their passwords. config.reset_password_within = 6.hours # ==> Configuration for :encryptable # Allow you to use another encryption algorithm besides bcrypt (default). You can use # :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1, # :authlogic_sha512 (then you should set stretches above to 20 for default behavior) # and :restful_authentication_sha1 (then you should set stretches to 10, and copy # REST_AUTH_SITE_KEY to pepper) # config.encryptor = :sha512 # ==> Configuration for :token_authenticatable # Defines name of the authentication token params key # config.token_authentication_key = :auth_token # ==> Scopes configuration # Turn scoped views on. Before rendering "sessions/new", it will first check for # "users/sessions/new". It's turned off by default because it's slower if you # are using only default views. # config.scoped_views = false # Configure the default scope given to Warden. By default it's the first # devise role declared in your routes (usually :user). # config.default_scope = :user # Set this configuration to false if you want /users/sign_out to sign out # only the current scope. By default, Devise signs out all scopes. # config.sign_out_all_scopes = true # ==> Navigation configuration # Lists the formats that should be treated as navigational. Formats like # :html, should redirect to the sign in page when the user does not have # access, but formats like :xml or :json, should return 401. # # If you have any extra navigational formats, like :iphone or :mobile, you # should add them to the navigational formats lists. # # The "*/*" below is required to match Internet Explorer requests. # config.navigational_formats = ["*/*", :html] # The default HTTP method used to sign out a resource. Default is :delete. config.sign_out_via = :delete # ==> OmniAuth # Add a new OmniAuth provider. Check the wiki for more information on setting # up on your models and hooks. # config.omniauth :github, 'APP_ID', 'APP_SECRET', :scope => 'user,public_repo' # ==> Warden configuration # If you want to use other strategies, that are not supported by Devise, or # change the failure app, you can configure them inside the config.warden block. # # config.warden do |manager| # manager.intercept_401 = false # manager.default_strategies(:scope => :user).unshift :some_external_strategy # end # ==> Mountable engine configurations # When using Devise inside an engine, let's call it `MyEngine`, and this engine # is mountable, there are some extra configurations to be taken into account. # The following options are available, assuming the engine is mounted as: # # mount MyEngine, at: "/my_engine" # # The router that invoked `devise_for`, in the example above, would be: # config.router_name = :my_engine # # When using omniauth, Devise cannot automatically set Omniauth path, # so you need to do it manually. For the users scope, it would be: # config.omniauth_path_prefix = "/my_engine/users/auth" end
# Use this hook to configure devise mailer, warden hooks and so forth. # Many of these configuration options can be set straight in your model. Devise.setup do |config| config.omniauth :facebook, "417848128426013", "8b1dc4f4e9f36ca0a7a0ea6c3137307b", scope: 'email', info_fields: 'email, name, first_name, last_name' config.omniauth :vkontakte, "5277782", "QIywkEf3MpTj6yhzRp9K", scope: 'email' #callback_url: 'devise/registrations#edit' # The secret key used by Devise. Devise uses this key to generate # random tokens. Changing this key will render invalid all existing # confirmation, reset password and unlock tokens in the database. # Devise will use the `secret_key_base` on Rails 4+ applications as its `secret_key` # by default. You can change it below and use your own secret key. # config.secret_key = 'f718c5e687c5599983326aea44c1c6c8aba45bc6495ddfa211267b123e6467eb0ad09ad27e2e4c4c177f684d028850c6f13cecaf98848a0ba118e33d34a5d316' # ==> Mailer Configuration # Configure the e-mail address which will be shown in Devise::Mailer, # note that it will be overwritten if you use your own mailer class # with default "from" parameter. config.mailer_sender = 'please-change-me-at-config-initializers-devise@example.com' # Configure the class responsible to send e-mails. # config.mailer = 'Devise::Mailer' # ==> ORM configuration # Load and configure the ORM. Supports :active_record (default) and # :mongoid (bson_ext recommended) by default. Other ORMs may be # available as additional gems. require 'devise/orm/active_record' # ==> Configuration for any authentication mechanism # Configure which keys are used when authenticating a user. The default is # just :email. You can configure it to use [:username, :subdomain], so for # authenticating a user, both parameters are required. Remember that those # parameters are used only when authenticating and not when retrieving from # session. If you need permissions, you should implement that in a before filter. # You can also supply a hash where the value is a boolean determining whether # or not authentication should be aborted when the value is not present. # config.authentication_keys = [:email] # Configure parameters from the request object used for authentication. Each entry # given should be a request method and it will automatically be passed to the # find_for_authentication method and considered in your model lookup. For instance, # if you set :request_keys to [:subdomain], :subdomain will be used on authentication. # The same considerations mentioned for authentication_keys also apply to request_keys. # config.request_keys = [] # Configure which authentication keys should be case-insensitive. # These keys will be downcased upon creating or modifying a user and when used # to authenticate or find a user. Default is :email. config.case_insensitive_keys = [:email] # Configure which authentication keys should have whitespace stripped. # These keys will have whitespace before and after removed upon creating or # modifying a user and when used to authenticate or find a user. Default is :email. config.strip_whitespace_keys = [:email] # Tell if authentication through request.params is enabled. True by default. # It can be set to an array that will enable params authentication only for the # given strategies, for example, `config.params_authenticatable = [:database]` will # enable it only for database (email + password) authentication. # config.params_authenticatable = true # Tell if authentication through HTTP Auth is enabled. False by default. # It can be set to an array that will enable http authentication only for the # given strategies, for example, `config.http_authenticatable = [:database]` will # enable it only for database authentication. The supported strategies are: # :database = Support basic authentication with authentication key + password # config.http_authenticatable = false # If 401 status code should be returned for AJAX requests. True by default. # config.http_authenticatable_on_xhr = true # The realm used in Http Basic Authentication. 'Application' by default. # config.http_authentication_realm = 'Application' # It will change confirmation, password recovery and other workflows # to behave the same regardless if the e-mail provided was right or wrong. # Does not affect registerable. # config.paranoid = true # By default Devise will store the user in session. You can skip storage for # particular strategies by setting this option. # Notice that if you are skipping storage for all authentication paths, you # may want to disable generating routes to Devise's sessions controller by # passing skip: :sessions to `devise_for` in your config/routes.rb config.skip_session_storage = [:http_auth] # By default, Devise cleans up the CSRF token on authentication to # avoid CSRF token fixation attacks. This means that, when using AJAX # requests for sign in and sign up, you need to get a new CSRF token # from the server. You can disable this option at your own risk. # config.clean_up_csrf_token_on_authentication = true # ==> Configuration for :database_authenticatable # For bcrypt, this is the cost for hashing the password and defaults to 10. If # using other encryptors, it sets how many times you want the password re-encrypted. # # Limiting the stretches to just one in testing will increase the performance of # your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use # a value less than 10 in other environments. Note that, for bcrypt (the default # encryptor), the cost increases exponentially with the number of stretches (e.g. # a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation). config.stretches = Rails.env.test? ? 1 : 10 # Setup a pepper to generate the encrypted password. # config.pepper = '7dcd1b5612573922d13773729b166a0c55b49d139b2f95da3c599f0ead7e3a72a00987eff9619a6f7d1fe9393032b72a00eda1638c9210cbdc9739c2bfb5feef' # ==> Configuration for :confirmable # A period that the user is allowed to access the website even without # confirming their account. For instance, if set to 2.days, the user will be # able to access the website for two days without confirming their account, # access will be blocked just in the third day. Default is 0.days, meaning # the user cannot access the website without confirming their account. # config.allow_unconfirmed_access_for = 2.days # A period that the user is allowed to confirm their account before their # token becomes invalid. For example, if set to 3.days, the user can confirm # their account within 3 days after the mail was sent, but on the fourth day # their account can't be confirmed with the token any more. # Default is nil, meaning there is no restriction on how long a user can take # before confirming their account. # config.confirm_within = 3.days # If true, requires any email changes to be confirmed (exactly the same way as # initial account confirmation) to be applied. Requires additional unconfirmed_email # db field (see migrations). Until confirmed, new email is stored in # unconfirmed_email column, and copied to email column on successful confirmation. config.reconfirmable = true # Defines which key will be used when confirming an account # config.confirmation_keys = [:email] # ==> Configuration for :rememberable # The time the user will be remembered without asking for credentials again. # config.remember_for = 2.weeks # Invalidates all the remember me tokens when the user signs out. config.expire_all_remember_me_on_sign_out = true # If true, extends the user's remember period when remembered via cookie. # config.extend_remember_period = false # Options to be passed to the created cookie. For instance, you can set # secure: true in order to force SSL only cookies. # config.rememberable_options = {} # ==> Configuration for :validatable # Range for password length. config.password_length = 8..72 # Email regex used to validate email formats. It simply asserts that # one (and only one) @ exists in the given string. This is mainly # to give user feedback and not to assert the e-mail validity. # config.email_regexp = /\A[^@]+@[^@]+\z/ # ==> Configuration for :timeoutable # The time you want to timeout the user session without activity. After this # time the user will be asked for credentials again. Default is 30 minutes. # config.timeout_in = 30.minutes # ==> Configuration for :lockable # Defines which strategy will be used to lock an account. # :failed_attempts = Locks an account after a number of failed attempts to sign in. # :none = No lock strategy. You should handle locking by yourself. # config.lock_strategy = :failed_attempts # Defines which key will be used when locking and unlocking an account # config.unlock_keys = [:email] # Defines which strategy will be used to unlock an account. # :email = Sends an unlock link to the user email # :time = Re-enables login after a certain amount of time (see :unlock_in below) # :both = Enables both strategies # :none = No unlock strategy. You should handle unlocking by yourself. # config.unlock_strategy = :both # Number of authentication tries before locking an account if lock_strategy # is failed attempts. # config.maximum_attempts = 20 # Time interval to unlock the account if :time is enabled as unlock_strategy. # config.unlock_in = 1.hour # Warn on the last attempt before the account is locked. # config.last_attempt_warning = true # ==> Configuration for :recoverable # # Defines which key will be used when recovering the password for an account # config.reset_password_keys = [:email] # Time interval you can reset your password with a reset password key. # Don't put a too small interval or your users won't have the time to # change their passwords. config.reset_password_within = 6.hours # When set to false, does not sign a user in automatically after their password is # reset. Defaults to true, so a user is signed in automatically after a reset. # config.sign_in_after_reset_password = true # ==> Configuration for :encryptable # Allow you to use another encryption algorithm besides bcrypt (default). You can use # :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1, # :authlogic_sha512 (then you should set stretches above to 20 for default behavior) # and :restful_authentication_sha1 (then you should set stretches to 10, and copy # REST_AUTH_SITE_KEY to pepper). # # Require the `devise-encryptable` gem when using anything other than bcrypt # config.encryptor = :sha512 # ==> Scopes configuration # Turn scoped views on. Before rendering "sessions/new", it will first check for # "users/sessions/new". It's turned off by default because it's slower if you # are using only default views. # config.scoped_views = false # Configure the default scope given to Warden. By default it's the first # devise role declared in your routes (usually :user). # config.default_scope = :user # Set this configuration to false if you want /users/sign_out to sign out # only the current scope. By default, Devise signs out all scopes. # config.sign_out_all_scopes = true # ==> Navigation configuration # Lists the formats that should be treated as navigational. Formats like # :html, should redirect to the sign in page when the user does not have # access, but formats like :xml or :json, should return 401. # # If you have any extra navigational formats, like :iphone or :mobile, you # should add them to the navigational formats lists. # # The "*/*" below is required to match Internet Explorer requests. # config.navigational_formats = ['*/*', :html] # The default HTTP method used to sign out a resource. Default is :delete. config.sign_out_via = :delete # ==> OmniAuth # Add a new OmniAuth provider. Check the wiki for more information on setting # up on your models and hooks. # config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo' # ==> Warden configuration # If you want to use other strategies, that are not supported by Devise, or # change the failure app, you can configure them inside the config.warden block. # # config.warden do |manager| # manager.intercept_401 = false # manager.default_strategies(scope: :user).unshift :some_external_strategy # end # ==> Mountable engine configurations # When using Devise inside an engine, let's call it `MyEngine`, and this engine # is mountable, there are some extra configurations to be taken into account. # The following options are available, assuming the engine is mounted as: # # mount MyEngine, at: '/my_engine' # # The router that invoked `devise_for`, in the example above, would be: # config.router_name = :my_engine # # When using OmniAuth, Devise cannot automatically set OmniAuth path, # so you need to do it manually. For the users scope, it would be: # config.omniauth_path_prefix = '/my_engine/users/auth' end update vkontakte auth # Use this hook to configure devise mailer, warden hooks and so forth. # Many of these configuration options can be set straight in your model. Devise.setup do |config| config.omniauth :facebook, "417848128426013", "8b1dc4f4e9f36ca0a7a0ea6c3137307b", scope: 'email', info_fields: 'email, name, first_name, last_name' config.omniauth :vkontakte, "5277782", "QIywkEf3MpTj6yhzRp9K", scope: 'email, name, first_name, last_name' #callback_url: 'devise/registrations#edit' # The secret key used by Devise. Devise uses this key to generate # random tokens. Changing this key will render invalid all existing # confirmation, reset password and unlock tokens in the database. # Devise will use the `secret_key_base` on Rails 4+ applications as its `secret_key` # by default. You can change it below and use your own secret key. # config.secret_key = 'f718c5e687c5599983326aea44c1c6c8aba45bc6495ddfa211267b123e6467eb0ad09ad27e2e4c4c177f684d028850c6f13cecaf98848a0ba118e33d34a5d316' # ==> Mailer Configuration # Configure the e-mail address which will be shown in Devise::Mailer, # note that it will be overwritten if you use your own mailer class # with default "from" parameter. config.mailer_sender = 'please-change-me-at-config-initializers-devise@example.com' # Configure the class responsible to send e-mails. # config.mailer = 'Devise::Mailer' # ==> ORM configuration # Load and configure the ORM. Supports :active_record (default) and # :mongoid (bson_ext recommended) by default. Other ORMs may be # available as additional gems. require 'devise/orm/active_record' # ==> Configuration for any authentication mechanism # Configure which keys are used when authenticating a user. The default is # just :email. You can configure it to use [:username, :subdomain], so for # authenticating a user, both parameters are required. Remember that those # parameters are used only when authenticating and not when retrieving from # session. If you need permissions, you should implement that in a before filter. # You can also supply a hash where the value is a boolean determining whether # or not authentication should be aborted when the value is not present. # config.authentication_keys = [:email] # Configure parameters from the request object used for authentication. Each entry # given should be a request method and it will automatically be passed to the # find_for_authentication method and considered in your model lookup. For instance, # if you set :request_keys to [:subdomain], :subdomain will be used on authentication. # The same considerations mentioned for authentication_keys also apply to request_keys. # config.request_keys = [] # Configure which authentication keys should be case-insensitive. # These keys will be downcased upon creating or modifying a user and when used # to authenticate or find a user. Default is :email. config.case_insensitive_keys = [:email] # Configure which authentication keys should have whitespace stripped. # These keys will have whitespace before and after removed upon creating or # modifying a user and when used to authenticate or find a user. Default is :email. config.strip_whitespace_keys = [:email] # Tell if authentication through request.params is enabled. True by default. # It can be set to an array that will enable params authentication only for the # given strategies, for example, `config.params_authenticatable = [:database]` will # enable it only for database (email + password) authentication. # config.params_authenticatable = true # Tell if authentication through HTTP Auth is enabled. False by default. # It can be set to an array that will enable http authentication only for the # given strategies, for example, `config.http_authenticatable = [:database]` will # enable it only for database authentication. The supported strategies are: # :database = Support basic authentication with authentication key + password # config.http_authenticatable = false # If 401 status code should be returned for AJAX requests. True by default. # config.http_authenticatable_on_xhr = true # The realm used in Http Basic Authentication. 'Application' by default. # config.http_authentication_realm = 'Application' # It will change confirmation, password recovery and other workflows # to behave the same regardless if the e-mail provided was right or wrong. # Does not affect registerable. # config.paranoid = true # By default Devise will store the user in session. You can skip storage for # particular strategies by setting this option. # Notice that if you are skipping storage for all authentication paths, you # may want to disable generating routes to Devise's sessions controller by # passing skip: :sessions to `devise_for` in your config/routes.rb config.skip_session_storage = [:http_auth] # By default, Devise cleans up the CSRF token on authentication to # avoid CSRF token fixation attacks. This means that, when using AJAX # requests for sign in and sign up, you need to get a new CSRF token # from the server. You can disable this option at your own risk. # config.clean_up_csrf_token_on_authentication = true # ==> Configuration for :database_authenticatable # For bcrypt, this is the cost for hashing the password and defaults to 10. If # using other encryptors, it sets how many times you want the password re-encrypted. # # Limiting the stretches to just one in testing will increase the performance of # your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use # a value less than 10 in other environments. Note that, for bcrypt (the default # encryptor), the cost increases exponentially with the number of stretches (e.g. # a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation). config.stretches = Rails.env.test? ? 1 : 10 # Setup a pepper to generate the encrypted password. # config.pepper = '7dcd1b5612573922d13773729b166a0c55b49d139b2f95da3c599f0ead7e3a72a00987eff9619a6f7d1fe9393032b72a00eda1638c9210cbdc9739c2bfb5feef' # ==> Configuration for :confirmable # A period that the user is allowed to access the website even without # confirming their account. For instance, if set to 2.days, the user will be # able to access the website for two days without confirming their account, # access will be blocked just in the third day. Default is 0.days, meaning # the user cannot access the website without confirming their account. # config.allow_unconfirmed_access_for = 2.days # A period that the user is allowed to confirm their account before their # token becomes invalid. For example, if set to 3.days, the user can confirm # their account within 3 days after the mail was sent, but on the fourth day # their account can't be confirmed with the token any more. # Default is nil, meaning there is no restriction on how long a user can take # before confirming their account. # config.confirm_within = 3.days # If true, requires any email changes to be confirmed (exactly the same way as # initial account confirmation) to be applied. Requires additional unconfirmed_email # db field (see migrations). Until confirmed, new email is stored in # unconfirmed_email column, and copied to email column on successful confirmation. config.reconfirmable = true # Defines which key will be used when confirming an account # config.confirmation_keys = [:email] # ==> Configuration for :rememberable # The time the user will be remembered without asking for credentials again. # config.remember_for = 2.weeks # Invalidates all the remember me tokens when the user signs out. config.expire_all_remember_me_on_sign_out = true # If true, extends the user's remember period when remembered via cookie. # config.extend_remember_period = false # Options to be passed to the created cookie. For instance, you can set # secure: true in order to force SSL only cookies. # config.rememberable_options = {} # ==> Configuration for :validatable # Range for password length. config.password_length = 8..72 # Email regex used to validate email formats. It simply asserts that # one (and only one) @ exists in the given string. This is mainly # to give user feedback and not to assert the e-mail validity. # config.email_regexp = /\A[^@]+@[^@]+\z/ # ==> Configuration for :timeoutable # The time you want to timeout the user session without activity. After this # time the user will be asked for credentials again. Default is 30 minutes. # config.timeout_in = 30.minutes # ==> Configuration for :lockable # Defines which strategy will be used to lock an account. # :failed_attempts = Locks an account after a number of failed attempts to sign in. # :none = No lock strategy. You should handle locking by yourself. # config.lock_strategy = :failed_attempts # Defines which key will be used when locking and unlocking an account # config.unlock_keys = [:email] # Defines which strategy will be used to unlock an account. # :email = Sends an unlock link to the user email # :time = Re-enables login after a certain amount of time (see :unlock_in below) # :both = Enables both strategies # :none = No unlock strategy. You should handle unlocking by yourself. # config.unlock_strategy = :both # Number of authentication tries before locking an account if lock_strategy # is failed attempts. # config.maximum_attempts = 20 # Time interval to unlock the account if :time is enabled as unlock_strategy. # config.unlock_in = 1.hour # Warn on the last attempt before the account is locked. # config.last_attempt_warning = true # ==> Configuration for :recoverable # # Defines which key will be used when recovering the password for an account # config.reset_password_keys = [:email] # Time interval you can reset your password with a reset password key. # Don't put a too small interval or your users won't have the time to # change their passwords. config.reset_password_within = 6.hours # When set to false, does not sign a user in automatically after their password is # reset. Defaults to true, so a user is signed in automatically after a reset. # config.sign_in_after_reset_password = true # ==> Configuration for :encryptable # Allow you to use another encryption algorithm besides bcrypt (default). You can use # :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1, # :authlogic_sha512 (then you should set stretches above to 20 for default behavior) # and :restful_authentication_sha1 (then you should set stretches to 10, and copy # REST_AUTH_SITE_KEY to pepper). # # Require the `devise-encryptable` gem when using anything other than bcrypt # config.encryptor = :sha512 # ==> Scopes configuration # Turn scoped views on. Before rendering "sessions/new", it will first check for # "users/sessions/new". It's turned off by default because it's slower if you # are using only default views. # config.scoped_views = false # Configure the default scope given to Warden. By default it's the first # devise role declared in your routes (usually :user). # config.default_scope = :user # Set this configuration to false if you want /users/sign_out to sign out # only the current scope. By default, Devise signs out all scopes. # config.sign_out_all_scopes = true # ==> Navigation configuration # Lists the formats that should be treated as navigational. Formats like # :html, should redirect to the sign in page when the user does not have # access, but formats like :xml or :json, should return 401. # # If you have any extra navigational formats, like :iphone or :mobile, you # should add them to the navigational formats lists. # # The "*/*" below is required to match Internet Explorer requests. # config.navigational_formats = ['*/*', :html] # The default HTTP method used to sign out a resource. Default is :delete. config.sign_out_via = :delete # ==> OmniAuth # Add a new OmniAuth provider. Check the wiki for more information on setting # up on your models and hooks. # config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo' # ==> Warden configuration # If you want to use other strategies, that are not supported by Devise, or # change the failure app, you can configure them inside the config.warden block. # # config.warden do |manager| # manager.intercept_401 = false # manager.default_strategies(scope: :user).unshift :some_external_strategy # end # ==> Mountable engine configurations # When using Devise inside an engine, let's call it `MyEngine`, and this engine # is mountable, there are some extra configurations to be taken into account. # The following options are available, assuming the engine is mounted as: # # mount MyEngine, at: '/my_engine' # # The router that invoked `devise_for`, in the example above, would be: # config.router_name = :my_engine # # When using OmniAuth, Devise cannot automatically set OmniAuth path, # so you need to do it manually. For the users scope, it would be: # config.omniauth_path_prefix = '/my_engine/users/auth' end
key = YAML::load_file(File.join(Rails.root.to_s, "config", "pusher.yml"))["key"] socket = PusherClient::Socket.new(key) socket.connect(true) # Connect asynchronously # Subscribe to business chanel socket.subscribe(SANI[:business_token]) # Bind to a global event socket.bind('new_transaccion') do |data| SaniGetWorker.perform_async end using new #schedule method key = YAML::load_file(File.join(Rails.root.to_s, "config", "pusher.yml"))["key"] socket = PusherClient::Socket.new(key) socket.connect(true) # Connect asynchronously # Subscribe to business chanel socket.subscribe(SANI[:business_token]) # Bind to a global event socket.bind('new_transaccion') do |data| SaniGetWorker.schedule end
# BoringSSL CocoaPods podspec # Copyright 2015, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Pod::Spec.new do |s| s.name = 'BoringSSL' version = '5.0' s.version = version s.summary = 'BoringSSL is a fork of OpenSSL that is designed to meet Google’s needs.' # Adapted from the homepage: s.description = <<-DESC BoringSSL is a fork of OpenSSL that is designed to meet Google’s needs. Although BoringSSL is an open source project, it is not intended for general use, as OpenSSL is. We don’t recommend that third parties depend upon it. Doing so is likely to be frustrating because there are no guarantees of API stability. Only the latest version of this pod is supported, and every new version is a new major version. We update Google libraries and programs that use BoringSSL as needed when deciding to make API changes. This allows us to mostly avoid compromises in the name of compatibility. It works for us, but it may not work for you. As a Cocoapods pod, it has the advantage over OpenSSL's pods that the library doesn't need to be precompiled. This eliminates the 10 - 20 minutes of wait the first time a user does "pod install", lets it be used as a dynamic framework (pending solution of Cocoapods' issue #4605), and works with bitcode automatically. It's also thought to be smaller than OpenSSL (which takes 1MB - 2MB per ARM architecture), but we don't have specific numbers yet. BoringSSL arose because Google used OpenSSL for many years in various ways and, over time, built up a large number of patches that were maintained while tracking upstream OpenSSL. As Google’s product portfolio became more complex, more copies of OpenSSL sprung up and the effort involved in maintaining all these patches in multiple places was growing steadily. Currently BoringSSL is the SSL library in Chrome/Chromium, Android (but it’s not part of the NDK) and a number of other apps/programs. DESC s.homepage = 'https://boringssl.googlesource.com/boringssl/' s.documentation_url = 'https://commondatastorage.googleapis.com/chromium-boringssl-docs/headers.html' s.license = { :type => 'Mixed', :file => 'LICENSE' } # "The name and email addresses of the library maintainers, not the Podspec maintainer." s.authors = 'Adam Langley', 'David Benjamin', 'Matt Braithwaite' s.source = { :git => 'https://boringssl.googlesource.com/boringssl', :tag => "version_for_cocoapods_#{version}" } name = 'openssl' # When creating a dynamic framework, name it openssl.framework instead of BoringSSL.framework. # This lets users write their includes like `#include <openssl/ssl.h>` as opposed to `#include # <BoringSSL/ssl.h>`. s.module_name = name # When creating a dynamic framework, copy the headers under `include/openssl/` into the root of # the `Headers/` directory of the framework (i.e., not under `Headers/include/openssl`). # # TODO(jcanizales): Debug why this doesn't work on macOS. s.header_mappings_dir = 'include/openssl' # The above has an undesired effect when creating a static library: It forces users to write # includes like `#include <BoringSSL/ssl.h>`. `s.header_dir` adds a path prefix to that, and # because Cocoapods lets omit the pod name when including headers of static libraries, the # following lets users write `#include <openssl/ssl.h>`. s.header_dir = name # The module map and umbrella header created automatically by Cocoapods don't work for C libraries # like this one. The following file, and a correct umbrella header, are created on the fly by the # `prepare_command` of this pod. s.module_map = 'include/openssl/module.modulemap' # We don't need to inhibit all warnings; only -Wno-shorten-64-to-32. But Cocoapods' linter doesn't # want that for some reason. s.compiler_flags = '-DOPENSSL_NO_ASM', '-GCC_WARN_INHIBIT_ALL_WARNINGS', '-w' s.requires_arc = false # Like many other C libraries, BoringSSL has its public headers under `include/<libname>/` and its # sources and private headers in other directories outside `include/`. Cocoapods' linter doesn't # allow any header to be listed outside the `header_mappings_dir` (even though doing so works in # practice). Because we need our `header_mappings_dir` to be `include/openssl/` for the reason # mentioned above, we work around the linter limitation by dividing the pod into two subspecs, one # for public headers and the other for implementation. Each gets its own `header_mappings_dir`, # making the linter happy. s.subspec 'Interface' do |ss| ss.header_mappings_dir = 'include/openssl' ss.source_files = 'include/openssl/*.h' end s.subspec 'Implementation' do |ss| ss.header_mappings_dir = '.' ss.source_files = 'ssl/*.{h,c}', 'ssl/**/*.{h,c}', '*.{h,c}', 'crypto/*.{h,c}', 'crypto/**/*.{h,c}' ss.private_header_files = 'ssl/*.h', 'ssl/**/*.h', '*.h', 'crypto/*.h', 'crypto/**/*.h' ss.exclude_files = '**/*_test.*', '**/test_*.*', '**/test/*.*' ss.dependency "#{s.name}/Interface", version end s.prepare_command = <<-END_OF_COMMAND # Replace "const BIGNUM *I" in rsa.h with a lowercase i, as the former fails when including # OpenSSL in a Swift bridging header (complex.h defines "I", and it's as if the compiler # included it in every bridged header). sed -E -i '.back' 's/\\*I,/*i,/g' include/openssl/rsa.h # Replace `#include "../crypto/internal.h"` in e_tls.c with `#include "../internal.h"`. The # former assumes crypto/ is in the headers search path, which is hard to enforce when using # dynamic frameworks. The latters always works, being relative to the current file. sed -E -i '.back' 's/crypto\\///g' crypto/cipher/e_tls.c # Add a module map and an umbrella header cat > include/openssl/umbrella.h <<EOF #include "ssl.h" #include "crypto.h" #include "aes.h" /* The following macros are defined by base.h. The latter is the first file included by the other headers. */ #if defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64) # include "arm_arch.h" #endif #include "asn1.h" #include "asn1_mac.h" #include "asn1t.h" #include "blowfish.h" #include "cast.h" #include "chacha.h" #include "cmac.h" #include "conf.h" #include "cpu.h" #include "curve25519.h" #include "des.h" #include "dtls1.h" #include "hkdf.h" #include "md4.h" #include "md5.h" #include "newhope.h" #include "obj_mac.h" #include "objects.h" #include "opensslv.h" #include "ossl_typ.h" #include "pkcs12.h" #include "pkcs7.h" #include "pkcs8.h" #include "poly1305.h" #include "rand.h" #include "rc4.h" #include "ripemd.h" #include "safestack.h" #include "srtp.h" #include "time_support.h" #include "x509.h" #include "x509v3.h" EOF cat > include/openssl/module.modulemap <<EOF framework module openssl { umbrella header "umbrella.h" export * module * { export * } } EOF # #include <inttypes.h> fails to compile when building a dynamic framework. libgit2 in # https://github.com/libgit2/libgit2/commit/1ddada422caf8e72ba97dca2568d2bf879fed5f2 and libvpx # in https://chromium.googlesource.com/webm/libvpx/+/1bec0c5a7e885ec792f6bb658eb3f34ad8f37b15 # work around it by removing the include. We need four of its macros, so we expand them here. sed -E -i '.back' '/<inttypes.h>/d' include/openssl/bn.h sed -E -i '.back' 's/PRIu32/"u"/g' include/openssl/bn.h sed -E -i '.back' 's/PRIx32/"x"/g' include/openssl/bn.h sed -E -i '.back' 's/PRIu64/"llu"/g' include/openssl/bn.h sed -E -i '.back' 's/PRIx64/"llx"/g' include/openssl/bn.h # This is a bit ridiculous, but requiring people to install Go in order to build is slightly # more ridiculous IMO. To save you from scrolling, this is the last part of the podspec. # TODO(jcanizales): Translate err_data_generate.go into a Bash or Ruby script. cat > err_data.c <<EOF /* Copyright (c) 2015, Google Inc. * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ /* This file was generated by err_data_generate.go. */ #include <openssl/base.h> #include <openssl/err.h> #include <openssl/type_check.h> OPENSSL_COMPILE_ASSERT(ERR_LIB_NONE == 1, library_values_changed_1); OPENSSL_COMPILE_ASSERT(ERR_LIB_SYS == 2, library_values_changed_2); OPENSSL_COMPILE_ASSERT(ERR_LIB_BN == 3, library_values_changed_3); OPENSSL_COMPILE_ASSERT(ERR_LIB_RSA == 4, library_values_changed_4); OPENSSL_COMPILE_ASSERT(ERR_LIB_DH == 5, library_values_changed_5); OPENSSL_COMPILE_ASSERT(ERR_LIB_EVP == 6, library_values_changed_6); OPENSSL_COMPILE_ASSERT(ERR_LIB_BUF == 7, library_values_changed_7); OPENSSL_COMPILE_ASSERT(ERR_LIB_OBJ == 8, library_values_changed_8); OPENSSL_COMPILE_ASSERT(ERR_LIB_PEM == 9, library_values_changed_9); OPENSSL_COMPILE_ASSERT(ERR_LIB_DSA == 10, library_values_changed_10); OPENSSL_COMPILE_ASSERT(ERR_LIB_X509 == 11, library_values_changed_11); OPENSSL_COMPILE_ASSERT(ERR_LIB_ASN1 == 12, library_values_changed_12); OPENSSL_COMPILE_ASSERT(ERR_LIB_CONF == 13, library_values_changed_13); OPENSSL_COMPILE_ASSERT(ERR_LIB_CRYPTO == 14, library_values_changed_14); OPENSSL_COMPILE_ASSERT(ERR_LIB_EC == 15, library_values_changed_15); OPENSSL_COMPILE_ASSERT(ERR_LIB_SSL == 16, library_values_changed_16); OPENSSL_COMPILE_ASSERT(ERR_LIB_BIO == 17, library_values_changed_17); OPENSSL_COMPILE_ASSERT(ERR_LIB_PKCS7 == 18, library_values_changed_18); OPENSSL_COMPILE_ASSERT(ERR_LIB_PKCS8 == 19, library_values_changed_19); OPENSSL_COMPILE_ASSERT(ERR_LIB_X509V3 == 20, library_values_changed_20); OPENSSL_COMPILE_ASSERT(ERR_LIB_RAND == 21, library_values_changed_21); OPENSSL_COMPILE_ASSERT(ERR_LIB_ENGINE == 22, library_values_changed_22); OPENSSL_COMPILE_ASSERT(ERR_LIB_OCSP == 23, library_values_changed_23); OPENSSL_COMPILE_ASSERT(ERR_LIB_UI == 24, library_values_changed_24); OPENSSL_COMPILE_ASSERT(ERR_LIB_COMP == 25, library_values_changed_25); OPENSSL_COMPILE_ASSERT(ERR_LIB_ECDSA == 26, library_values_changed_26); OPENSSL_COMPILE_ASSERT(ERR_LIB_ECDH == 27, library_values_changed_27); OPENSSL_COMPILE_ASSERT(ERR_LIB_HMAC == 28, library_values_changed_28); OPENSSL_COMPILE_ASSERT(ERR_LIB_DIGEST == 29, library_values_changed_29); OPENSSL_COMPILE_ASSERT(ERR_LIB_CIPHER == 30, library_values_changed_30); OPENSSL_COMPILE_ASSERT(ERR_LIB_HKDF == 31, library_values_changed_31); OPENSSL_COMPILE_ASSERT(ERR_LIB_USER == 32, library_values_changed_32); OPENSSL_COMPILE_ASSERT(ERR_NUM_LIBS == 33, library_values_changed_num); const uint32_t kOpenSSLReasonValues[] = { 0xc320838, 0xc328852, 0xc330861, 0xc338871, 0xc340880, 0xc348899, 0xc3508a5, 0xc3588c2, 0xc3608d4, 0xc3688e2, 0xc3708f2, 0xc3788ff, 0xc38090f, 0xc38891a, 0xc390930, 0xc39893f, 0xc3a0953, 0xc3a8845, 0xc3b00ea, 0x10320845, 0x103293ab, 0x103313b7, 0x103393d0, 0x103413e3, 0x10348e8b, 0x10350c19, 0x103593f6, 0x1036140b, 0x1036941e, 0x1037143d, 0x10379456, 0x1038146b, 0x10389489, 0x10391498, 0x103994b4, 0x103a14cf, 0x103a94de, 0x103b14fa, 0x103b9515, 0x103c152c, 0x103c80ea, 0x103d153d, 0x103d9551, 0x103e1570, 0x103e957f, 0x103f1596, 0x103f95a9, 0x10400bea, 0x104095bc, 0x104115da, 0x104195ed, 0x10421607, 0x10429617, 0x1043162b, 0x10439641, 0x10441659, 0x1044966e, 0x10451682, 0x10459694, 0x104605fb, 0x1046893f, 0x104716a9, 0x104796c0, 0x104816d5, 0x104896e3, 0x14320bcd, 0x14328bdb, 0x14330bea, 0x14338bfc, 0x143400ac, 0x143480ea, 0x18320083, 0x18328ee1, 0x183300ac, 0x18338ef7, 0x18340f0b, 0x183480ea, 0x18350f20, 0x18358f38, 0x18360f4d, 0x18368f61, 0x18370f85, 0x18378f9b, 0x18380faf, 0x18388fbf, 0x18390a57, 0x18398fcf, 0x183a0fe4, 0x183a8ff8, 0x183b0c25, 0x183b9005, 0x183c1017, 0x183c9022, 0x183d1032, 0x183d9043, 0x183e1054, 0x183e9066, 0x183f108f, 0x183f90a8, 0x184010c0, 0x184086d3, 0x203210e7, 0x243210f3, 0x24328985, 0x24331105, 0x24339112, 0x2434111f, 0x24349131, 0x24351140, 0x2435915d, 0x2436116a, 0x24369178, 0x24371186, 0x24379194, 0x2438119d, 0x243891aa, 0x243911bd, 0x28320c0d, 0x28328c25, 0x28330bea, 0x28338c38, 0x28340c19, 0x283480ac, 0x283500ea, 0x2c322775, 0x2c32a783, 0x2c332795, 0x2c33a7a7, 0x2c3427bb, 0x2c34a7cd, 0x2c3527e8, 0x2c35a7fa, 0x2c36280d, 0x2c36832d, 0x2c37281a, 0x2c37a82c, 0x2c38283f, 0x2c38a856, 0x2c392864, 0x2c39a874, 0x2c3a2886, 0x2c3aa89a, 0x2c3b28ab, 0x2c3ba8ca, 0x2c3c28de, 0x2c3ca8f4, 0x2c3d290d, 0x2c3da92a, 0x2c3e293b, 0x2c3ea949, 0x2c3f2961, 0x2c3fa979, 0x2c402986, 0x2c4090e7, 0x2c412997, 0x2c41a9aa, 0x2c4210c0, 0x2c42a9bb, 0x2c430720, 0x2c43a8bc, 0x30320000, 0x30328015, 0x3033001f, 0x30338038, 0x3034004a, 0x30348064, 0x3035006b, 0x30358083, 0x30360094, 0x303680ac, 0x303700b9, 0x303780c8, 0x303800ea, 0x303880f7, 0x3039010a, 0x30398125, 0x303a013a, 0x303a814e, 0x303b0162, 0x303b8173, 0x303c018c, 0x303c81a9, 0x303d01b7, 0x303d81cb, 0x303e01db, 0x303e81f4, 0x303f0204, 0x303f8217, 0x30400226, 0x30408232, 0x30410247, 0x30418257, 0x3042026e, 0x3042827b, 0x3043028e, 0x3043829d, 0x304402b2, 0x304482d3, 0x304502e6, 0x304582f9, 0x30460312, 0x3046832d, 0x3047034a, 0x30478363, 0x30480371, 0x30488382, 0x30490391, 0x304983a9, 0x304a03bb, 0x304a83cf, 0x304b03ee, 0x304b8401, 0x304c040c, 0x304c841d, 0x304d0429, 0x304d843f, 0x304e044d, 0x304e8463, 0x304f0475, 0x304f8487, 0x3050049a, 0x305084ad, 0x305104be, 0x305184ce, 0x305204e6, 0x305284fb, 0x30530513, 0x30538527, 0x3054053f, 0x30548558, 0x30550571, 0x3055858e, 0x30560599, 0x305685b1, 0x305705c1, 0x305785d2, 0x305805e5, 0x305885fb, 0x30590604, 0x30598619, 0x305a062c, 0x305a863b, 0x305b065b, 0x305b866a, 0x305c068b, 0x305c86a7, 0x305d06b3, 0x305d86d3, 0x305e06ef, 0x305e8700, 0x305f0716, 0x305f8720, 0x34320b47, 0x34328b5b, 0x34330b78, 0x34338b8b, 0x34340b9a, 0x34348bb7, 0x3c320083, 0x3c328c62, 0x3c330c7b, 0x3c338c96, 0x3c340cb3, 0x3c348cdd, 0x3c350cf8, 0x3c358d1e, 0x3c360d37, 0x3c368d4f, 0x3c370d60, 0x3c378d6e, 0x3c380d7b, 0x3c388d8f, 0x3c390c25, 0x3c398da3, 0x3c3a0db7, 0x3c3a88ff, 0x3c3b0dc7, 0x3c3b8de2, 0x3c3c0df4, 0x3c3c8e0a, 0x3c3d0e14, 0x3c3d8e28, 0x3c3e0e36, 0x3c3e8e5b, 0x3c3f0c4e, 0x3c3f8e44, 0x3c4000ac, 0x3c4080ea, 0x3c410cce, 0x3c418d0d, 0x403216fa, 0x40329710, 0x4033173e, 0x40339748, 0x4034175f, 0x4034977d, 0x4035178d, 0x4035979f, 0x403617ac, 0x403697b8, 0x403717cd, 0x403797df, 0x403817ea, 0x403897fc, 0x40390e8b, 0x4039980c, 0x403a181f, 0x403a9840, 0x403b1851, 0x403b9861, 0x403c0064, 0x403c8083, 0x403d186d, 0x403d9883, 0x403e1892, 0x403e98a5, 0x403f18bf, 0x403f98cd, 0x404018e2, 0x404098f6, 0x40411913, 0x4041992e, 0x40421947, 0x4042995a, 0x4043196e, 0x40439986, 0x4044199d, 0x404480ac, 0x404519b2, 0x404599c4, 0x404619e8, 0x40469a08, 0x40471a16, 0x40479a2a, 0x40481a3f, 0x40489a58, 0x40491a6f, 0x40499a89, 0x404a1aa0, 0x404a9abe, 0x404b1ad6, 0x404b9aed, 0x404c1b03, 0x404c9b15, 0x404d1b36, 0x404d9b58, 0x404e1b6c, 0x404e9b79, 0x404f1b90, 0x404f9ba0, 0x40501bca, 0x40509bde, 0x40511bf9, 0x40519c09, 0x40521c20, 0x40529c32, 0x40531c4a, 0x40539c5d, 0x40541c72, 0x40549c95, 0x40551ca3, 0x40559cc0, 0x40561ccd, 0x40569ce6, 0x40571cfe, 0x40579d11, 0x40581d26, 0x40589d38, 0x40591d48, 0x40599d61, 0x405a1d75, 0x405a9d85, 0x405b1d9d, 0x405b9dae, 0x405c1dc1, 0x405c9dd2, 0x405d1ddf, 0x405d9df6, 0x405e1e16, 0x405e8a95, 0x405f1e37, 0x405f9e44, 0x40601e52, 0x40609e74, 0x40611e9c, 0x40619eb1, 0x40621ec8, 0x40629ed9, 0x40631eea, 0x40639eff, 0x40641f16, 0x40649f27, 0x40651f42, 0x40659f59, 0x40661f71, 0x40669f9b, 0x40671fc6, 0x40679fe7, 0x40681ffa, 0x4068a01b, 0x4069204d, 0x4069a07b, 0x406a209c, 0x406aa0bc, 0x406b2244, 0x406ba267, 0x406c227d, 0x406ca4a9, 0x406d24d8, 0x406da500, 0x406e2519, 0x406ea531, 0x406f2550, 0x406fa565, 0x40702578, 0x4070a595, 0x40710800, 0x4071a5a7, 0x407225ba, 0x4072a5d3, 0x407325eb, 0x4073936d, 0x407425ff, 0x4074a619, 0x4075262a, 0x4075a63e, 0x4076264c, 0x407691aa, 0x40772671, 0x4077a693, 0x407826ae, 0x4078a6c3, 0x407926da, 0x4079a6f0, 0x407a26fc, 0x407aa70f, 0x407b2724, 0x407ba736, 0x407c274b, 0x407ca754, 0x407d2036, 0x407d9bb0, 0x41f4216f, 0x41f92201, 0x41fe20f4, 0x41fea2d0, 0x41ff23c1, 0x42032188, 0x420821aa, 0x4208a1e6, 0x420920d8, 0x4209a220, 0x420a212f, 0x420aa10f, 0x420b214f, 0x420ba1c8, 0x420c23dd, 0x420ca29d, 0x420d22b7, 0x420da2ee, 0x42122308, 0x421723a4, 0x4217a34a, 0x421c236c, 0x421f2327, 0x422123f4, 0x42262387, 0x422b248d, 0x422ba456, 0x422c2475, 0x422ca430, 0x422d240f, 0x4432072b, 0x4432873a, 0x44330746, 0x44338754, 0x44340767, 0x44348778, 0x4435077f, 0x44358789, 0x4436079c, 0x443687b2, 0x443707c4, 0x443787d1, 0x443807e0, 0x443887e8, 0x44390800, 0x4439880e, 0x443a0821, 0x4c3211d4, 0x4c3291e4, 0x4c3311f7, 0x4c339217, 0x4c3400ac, 0x4c3480ea, 0x4c351223, 0x4c359231, 0x4c36124d, 0x4c369260, 0x4c37126f, 0x4c37927d, 0x4c381292, 0x4c38929e, 0x4c3912be, 0x4c3992e8, 0x4c3a1301, 0x4c3a931a, 0x4c3b05fb, 0x4c3b9333, 0x4c3c1345, 0x4c3c9354, 0x4c3d136d, 0x4c3d937c, 0x4c3e1389, 0x503229cd, 0x5032a9dc, 0x503329e7, 0x5033a9f7, 0x50342a10, 0x5034aa2a, 0x50352a38, 0x5035aa4e, 0x50362a60, 0x5036aa76, 0x50372a8f, 0x5037aaa2, 0x50382aba, 0x5038aacb, 0x50392ae0, 0x5039aaf4, 0x503a2b14, 0x503aab2a, 0x503b2b42, 0x503bab54, 0x503c2b70, 0x503cab87, 0x503d2ba0, 0x503dabb6, 0x503e2bc3, 0x503eabd9, 0x503f2beb, 0x503f8382, 0x50402bfe, 0x5040ac0e, 0x50412c28, 0x5041ac37, 0x50422c51, 0x5042ac6e, 0x50432c7e, 0x5043ac8e, 0x50442c9d, 0x5044843f, 0x50452cb1, 0x5045accf, 0x50462ce2, 0x5046acf8, 0x50472d0a, 0x5047ad1f, 0x50482d45, 0x5048ad53, 0x50492d66, 0x5049ad7b, 0x504a2d91, 0x504aada1, 0x504b2dc1, 0x504badd4, 0x504c2df7, 0x504cae25, 0x504d2e37, 0x504dae54, 0x504e2e6f, 0x504eae8b, 0x504f2e9d, 0x504faeb4, 0x50502ec3, 0x505086ef, 0x50512ed6, 0x58320ec9, 0x68320e8b, 0x68328c25, 0x68330c38, 0x68338e99, 0x68340ea9, 0x683480ea, 0x6c320e67, 0x6c328bfc, 0x6c330e72, 0x74320a0b, 0x78320970, 0x78328985, 0x78330991, 0x78338083, 0x783409a0, 0x783489b5, 0x783509d4, 0x783589f6, 0x78360a0b, 0x78368a21, 0x78370a31, 0x78378a44, 0x78380a57, 0x78388a69, 0x78390a76, 0x78398a95, 0x783a0aaa, 0x783a8ab8, 0x783b0ac2, 0x783b8ad6, 0x783c0aed, 0x783c8b02, 0x783d0b19, 0x783d8b2e, 0x783e0a84, 0x7c3210d6, }; const size_t kOpenSSLReasonValuesLen = sizeof(kOpenSSLReasonValues) / sizeof(kOpenSSLReasonValues[0]); const char kOpenSSLReasonStringData[] = "ASN1_LENGTH_MISMATCH\\0" "AUX_ERROR\\0" "BAD_GET_ASN1_OBJECT_CALL\\0" "BAD_OBJECT_HEADER\\0" "BMPSTRING_IS_WRONG_LENGTH\\0" "BN_LIB\\0" "BOOLEAN_IS_WRONG_LENGTH\\0" "BUFFER_TOO_SMALL\\0" "CONTEXT_NOT_INITIALISED\\0" "DECODE_ERROR\\0" "DEPTH_EXCEEDED\\0" "DIGEST_AND_KEY_TYPE_NOT_SUPPORTED\\0" "ENCODE_ERROR\\0" "ERROR_GETTING_TIME\\0" "EXPECTING_AN_ASN1_SEQUENCE\\0" "EXPECTING_AN_INTEGER\\0" "EXPECTING_AN_OBJECT\\0" "EXPECTING_A_BOOLEAN\\0" "EXPECTING_A_TIME\\0" "EXPLICIT_LENGTH_MISMATCH\\0" "EXPLICIT_TAG_NOT_CONSTRUCTED\\0" "FIELD_MISSING\\0" "FIRST_NUM_TOO_LARGE\\0" "HEADER_TOO_LONG\\0" "ILLEGAL_BITSTRING_FORMAT\\0" "ILLEGAL_BOOLEAN\\0" "ILLEGAL_CHARACTERS\\0" "ILLEGAL_FORMAT\\0" "ILLEGAL_HEX\\0" "ILLEGAL_IMPLICIT_TAG\\0" "ILLEGAL_INTEGER\\0" "ILLEGAL_NESTED_TAGGING\\0" "ILLEGAL_NULL\\0" "ILLEGAL_NULL_VALUE\\0" "ILLEGAL_OBJECT\\0" "ILLEGAL_OPTIONAL_ANY\\0" "ILLEGAL_OPTIONS_ON_ITEM_TEMPLATE\\0" "ILLEGAL_TAGGED_ANY\\0" "ILLEGAL_TIME_VALUE\\0" "INTEGER_NOT_ASCII_FORMAT\\0" "INTEGER_TOO_LARGE_FOR_LONG\\0" "INVALID_BIT_STRING_BITS_LEFT\\0" "INVALID_BMPSTRING_LENGTH\\0" "INVALID_DIGIT\\0" "INVALID_MODIFIER\\0" "INVALID_NUMBER\\0" "INVALID_OBJECT_ENCODING\\0" "INVALID_SEPARATOR\\0" "INVALID_TIME_FORMAT\\0" "INVALID_UNIVERSALSTRING_LENGTH\\0" "INVALID_UTF8STRING\\0" "LIST_ERROR\\0" "MISSING_ASN1_EOS\\0" "MISSING_EOC\\0" "MISSING_SECOND_NUMBER\\0" "MISSING_VALUE\\0" "MSTRING_NOT_UNIVERSAL\\0" "MSTRING_WRONG_TAG\\0" "NESTED_ASN1_ERROR\\0" "NESTED_ASN1_STRING\\0" "NON_HEX_CHARACTERS\\0" "NOT_ASCII_FORMAT\\0" "NOT_ENOUGH_DATA\\0" "NO_MATCHING_CHOICE_TYPE\\0" "NULL_IS_WRONG_LENGTH\\0" "OBJECT_NOT_ASCII_FORMAT\\0" "ODD_NUMBER_OF_CHARS\\0" "SECOND_NUMBER_TOO_LARGE\\0" "SEQUENCE_LENGTH_MISMATCH\\0" "SEQUENCE_NOT_CONSTRUCTED\\0" "SEQUENCE_OR_SET_NEEDS_CONFIG\\0" "SHORT_LINE\\0" "STREAMING_NOT_SUPPORTED\\0" "STRING_TOO_LONG\\0" "STRING_TOO_SHORT\\0" "TAG_VALUE_TOO_HIGH\\0" "TIME_NOT_ASCII_FORMAT\\0" "TOO_LONG\\0" "TYPE_NOT_CONSTRUCTED\\0" "TYPE_NOT_PRIMITIVE\\0" "UNEXPECTED_EOC\\0" "UNIVERSALSTRING_IS_WRONG_LENGTH\\0" "UNKNOWN_FORMAT\\0" "UNKNOWN_MESSAGE_DIGEST_ALGORITHM\\0" "UNKNOWN_SIGNATURE_ALGORITHM\\0" "UNKNOWN_TAG\\0" "UNSUPPORTED_ANY_DEFINED_BY_TYPE\\0" "UNSUPPORTED_PUBLIC_KEY_TYPE\\0" "UNSUPPORTED_TYPE\\0" "WRONG_PUBLIC_KEY_TYPE\\0" "WRONG_TAG\\0" "WRONG_TYPE\\0" "BAD_FOPEN_MODE\\0" "BROKEN_PIPE\\0" "CONNECT_ERROR\\0" "ERROR_SETTING_NBIO\\0" "INVALID_ARGUMENT\\0" "IN_USE\\0" "KEEPALIVE\\0" "NBIO_CONNECT_ERROR\\0" "NO_HOSTNAME_SPECIFIED\\0" "NO_PORT_SPECIFIED\\0" "NO_SUCH_FILE\\0" "NULL_PARAMETER\\0" "SYS_LIB\\0" "UNABLE_TO_CREATE_SOCKET\\0" "UNINITIALIZED\\0" "UNSUPPORTED_METHOD\\0" "WRITE_TO_READ_ONLY_BIO\\0" "ARG2_LT_ARG3\\0" "BAD_ENCODING\\0" "BAD_RECIPROCAL\\0" "BIGNUM_TOO_LONG\\0" "BITS_TOO_SMALL\\0" "CALLED_WITH_EVEN_MODULUS\\0" "DIV_BY_ZERO\\0" "EXPAND_ON_STATIC_BIGNUM_DATA\\0" "INPUT_NOT_REDUCED\\0" "INVALID_RANGE\\0" "NEGATIVE_NUMBER\\0" "NOT_A_SQUARE\\0" "NOT_INITIALIZED\\0" "NO_INVERSE\\0" "PRIVATE_KEY_TOO_LARGE\\0" "P_IS_NOT_PRIME\\0" "TOO_MANY_ITERATIONS\\0" "TOO_MANY_TEMPORARY_VARIABLES\\0" "AES_KEY_SETUP_FAILED\\0" "BAD_DECRYPT\\0" "BAD_KEY_LENGTH\\0" "CTRL_NOT_IMPLEMENTED\\0" "CTRL_OPERATION_NOT_IMPLEMENTED\\0" "DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH\\0" "INITIALIZATION_ERROR\\0" "INPUT_NOT_INITIALIZED\\0" "INVALID_AD_SIZE\\0" "INVALID_KEY_LENGTH\\0" "INVALID_NONCE_SIZE\\0" "INVALID_OPERATION\\0" "IV_TOO_LARGE\\0" "NO_CIPHER_SET\\0" "NO_DIRECTION_SET\\0" "OUTPUT_ALIASES_INPUT\\0" "TAG_TOO_LARGE\\0" "TOO_LARGE\\0" "UNSUPPORTED_AD_SIZE\\0" "UNSUPPORTED_INPUT_SIZE\\0" "UNSUPPORTED_KEY_SIZE\\0" "UNSUPPORTED_NONCE_SIZE\\0" "UNSUPPORTED_TAG_SIZE\\0" "WRONG_FINAL_BLOCK_LENGTH\\0" "LIST_CANNOT_BE_NULL\\0" "MISSING_CLOSE_SQUARE_BRACKET\\0" "MISSING_EQUAL_SIGN\\0" "NO_CLOSE_BRACE\\0" "UNABLE_TO_CREATE_NEW_SECTION\\0" "VARIABLE_HAS_NO_VALUE\\0" "BAD_GENERATOR\\0" "INVALID_PUBKEY\\0" "MODULUS_TOO_LARGE\\0" "NO_PRIVATE_VALUE\\0" "BAD_Q_VALUE\\0" "BAD_VERSION\\0" "MISSING_PARAMETERS\\0" "NEED_NEW_SETUP_VALUES\\0" "BIGNUM_OUT_OF_RANGE\\0" "COORDINATES_OUT_OF_RANGE\\0" "D2I_ECPKPARAMETERS_FAILURE\\0" "EC_GROUP_NEW_BY_NAME_FAILURE\\0" "GROUP2PKPARAMETERS_FAILURE\\0" "GROUP_MISMATCH\\0" "I2D_ECPKPARAMETERS_FAILURE\\0" "INCOMPATIBLE_OBJECTS\\0" "INVALID_COFACTOR\\0" "INVALID_COMPRESSED_POINT\\0" "INVALID_COMPRESSION_BIT\\0" "INVALID_ENCODING\\0" "INVALID_FIELD\\0" "INVALID_FORM\\0" "INVALID_GROUP_ORDER\\0" "INVALID_PRIVATE_KEY\\0" "MISSING_PRIVATE_KEY\\0" "NON_NAMED_CURVE\\0" "PKPARAMETERS2GROUP_FAILURE\\0" "POINT_AT_INFINITY\\0" "POINT_IS_NOT_ON_CURVE\\0" "SLOT_FULL\\0" "UNDEFINED_GENERATOR\\0" "UNKNOWN_GROUP\\0" "UNKNOWN_ORDER\\0" "WRONG_CURVE_PARAMETERS\\0" "WRONG_ORDER\\0" "KDF_FAILED\\0" "POINT_ARITHMETIC_FAILURE\\0" "BAD_SIGNATURE\\0" "NOT_IMPLEMENTED\\0" "RANDOM_NUMBER_GENERATION_FAILED\\0" "OPERATION_NOT_SUPPORTED\\0" "COMMAND_NOT_SUPPORTED\\0" "DIFFERENT_KEY_TYPES\\0" "DIFFERENT_PARAMETERS\\0" "EXPECTING_AN_EC_KEY_KEY\\0" "EXPECTING_AN_RSA_KEY\\0" "EXPECTING_A_DSA_KEY\\0" "ILLEGAL_OR_UNSUPPORTED_PADDING_MODE\\0" "INVALID_DIGEST_LENGTH\\0" "INVALID_DIGEST_TYPE\\0" "INVALID_KEYBITS\\0" "INVALID_MGF1_MD\\0" "INVALID_PADDING_MODE\\0" "INVALID_PSS_SALTLEN\\0" "KEYS_NOT_SET\\0" "NO_DEFAULT_DIGEST\\0" "NO_KEY_SET\\0" "NO_MDC2_SUPPORT\\0" "NO_NID_FOR_CURVE\\0" "NO_OPERATION_SET\\0" "NO_PARAMETERS_SET\\0" "OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE\\0" "OPERATON_NOT_INITIALIZED\\0" "UNKNOWN_PUBLIC_KEY_TYPE\\0" "UNSUPPORTED_ALGORITHM\\0" "OUTPUT_TOO_LARGE\\0" "UNKNOWN_NID\\0" "BAD_BASE64_DECODE\\0" "BAD_END_LINE\\0" "BAD_IV_CHARS\\0" "BAD_PASSWORD_READ\\0" "CIPHER_IS_NULL\\0" "ERROR_CONVERTING_PRIVATE_KEY\\0" "NOT_DEK_INFO\\0" "NOT_ENCRYPTED\\0" "NOT_PROC_TYPE\\0" "NO_START_LINE\\0" "READ_KEY\\0" "SHORT_HEADER\\0" "UNSUPPORTED_CIPHER\\0" "UNSUPPORTED_ENCRYPTION\\0" "BAD_PKCS12_DATA\\0" "BAD_PKCS12_VERSION\\0" "CIPHER_HAS_NO_OBJECT_IDENTIFIER\\0" "CRYPT_ERROR\\0" "ENCRYPT_ERROR\\0" "ERROR_SETTING_CIPHER_PARAMS\\0" "INCORRECT_PASSWORD\\0" "KEYGEN_FAILURE\\0" "KEY_GEN_ERROR\\0" "METHOD_NOT_SUPPORTED\\0" "MISSING_MAC\\0" "MULTIPLE_PRIVATE_KEYS_IN_PKCS12\\0" "PKCS12_PUBLIC_KEY_INTEGRITY_NOT_SUPPORTED\\0" "PKCS12_TOO_DEEPLY_NESTED\\0" "PRIVATE_KEY_DECODE_ERROR\\0" "PRIVATE_KEY_ENCODE_ERROR\\0" "UNKNOWN_ALGORITHM\\0" "UNKNOWN_CIPHER\\0" "UNKNOWN_CIPHER_ALGORITHM\\0" "UNKNOWN_DIGEST\\0" "UNKNOWN_HASH\\0" "UNSUPPORTED_PRIVATE_KEY_ALGORITHM\\0" "BAD_E_VALUE\\0" "BAD_FIXED_HEADER_DECRYPT\\0" "BAD_PAD_BYTE_COUNT\\0" "BAD_RSA_PARAMETERS\\0" "BLOCK_TYPE_IS_NOT_01\\0" "BN_NOT_INITIALIZED\\0" "CANNOT_RECOVER_MULTI_PRIME_KEY\\0" "CRT_PARAMS_ALREADY_GIVEN\\0" "CRT_VALUES_INCORRECT\\0" "DATA_LEN_NOT_EQUAL_TO_MOD_LEN\\0" "DATA_TOO_LARGE\\0" "DATA_TOO_LARGE_FOR_KEY_SIZE\\0" "DATA_TOO_LARGE_FOR_MODULUS\\0" "DATA_TOO_SMALL\\0" "DATA_TOO_SMALL_FOR_KEY_SIZE\\0" "DIGEST_TOO_BIG_FOR_RSA_KEY\\0" "D_E_NOT_CONGRUENT_TO_1\\0" "EMPTY_PUBLIC_KEY\\0" "FIRST_OCTET_INVALID\\0" "INCONSISTENT_SET_OF_CRT_VALUES\\0" "INTERNAL_ERROR\\0" "INVALID_MESSAGE_LENGTH\\0" "KEY_SIZE_TOO_SMALL\\0" "LAST_OCTET_INVALID\\0" "MUST_HAVE_AT_LEAST_TWO_PRIMES\\0" "NO_PUBLIC_EXPONENT\\0" "NULL_BEFORE_BLOCK_MISSING\\0" "N_NOT_EQUAL_P_Q\\0" "OAEP_DECODING_ERROR\\0" "ONLY_ONE_OF_P_Q_GIVEN\\0" "OUTPUT_BUFFER_TOO_SMALL\\0" "PADDING_CHECK_FAILED\\0" "PKCS_DECODING_ERROR\\0" "SLEN_CHECK_FAILED\\0" "SLEN_RECOVERY_FAILED\\0" "UNKNOWN_ALGORITHM_TYPE\\0" "UNKNOWN_PADDING_TYPE\\0" "VALUE_MISSING\\0" "WRONG_SIGNATURE_LENGTH\\0" "APP_DATA_IN_HANDSHAKE\\0" "ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT\\0" "BAD_ALERT\\0" "BAD_CHANGE_CIPHER_SPEC\\0" "BAD_DATA_RETURNED_BY_CALLBACK\\0" "BAD_DH_P_LENGTH\\0" "BAD_DIGEST_LENGTH\\0" "BAD_ECC_CERT\\0" "BAD_ECPOINT\\0" "BAD_HANDSHAKE_RECORD\\0" "BAD_HELLO_REQUEST\\0" "BAD_LENGTH\\0" "BAD_PACKET_LENGTH\\0" "BAD_RSA_ENCRYPT\\0" "BAD_SRTP_MKI_VALUE\\0" "BAD_SRTP_PROTECTION_PROFILE_LIST\\0" "BAD_SSL_FILETYPE\\0" "BAD_WRITE_RETRY\\0" "BIO_NOT_SET\\0" "CA_DN_LENGTH_MISMATCH\\0" "CA_DN_TOO_LONG\\0" "CCS_RECEIVED_EARLY\\0" "CERTIFICATE_VERIFY_FAILED\\0" "CERT_CB_ERROR\\0" "CERT_LENGTH_MISMATCH\\0" "CHANNEL_ID_NOT_P256\\0" "CHANNEL_ID_SIGNATURE_INVALID\\0" "CIPHER_OR_HASH_UNAVAILABLE\\0" "CLIENTHELLO_PARSE_FAILED\\0" "CLIENTHELLO_TLSEXT\\0" "CONNECTION_REJECTED\\0" "CONNECTION_TYPE_NOT_SET\\0" "CUSTOM_EXTENSION_ERROR\\0" "DATA_LENGTH_TOO_LONG\\0" "DECRYPTION_FAILED\\0" "DECRYPTION_FAILED_OR_BAD_RECORD_MAC\\0" "DH_PUBLIC_VALUE_LENGTH_IS_WRONG\\0" "DH_P_TOO_LONG\\0" "DIGEST_CHECK_FAILED\\0" "DTLS_MESSAGE_TOO_BIG\\0" "ECC_CERT_NOT_FOR_SIGNING\\0" "EMS_STATE_INCONSISTENT\\0" "ENCRYPTED_LENGTH_TOO_LONG\\0" "ERROR_ADDING_EXTENSION\\0" "ERROR_IN_RECEIVED_CIPHER_LIST\\0" "ERROR_PARSING_EXTENSION\\0" "EXCESSIVE_MESSAGE_SIZE\\0" "EXTRA_DATA_IN_MESSAGE\\0" "FRAGMENT_MISMATCH\\0" "GOT_NEXT_PROTO_WITHOUT_EXTENSION\\0" "HANDSHAKE_FAILURE_ON_CLIENT_HELLO\\0" "HTTPS_PROXY_REQUEST\\0" "HTTP_REQUEST\\0" "INAPPROPRIATE_FALLBACK\\0" "INVALID_COMMAND\\0" "INVALID_MESSAGE\\0" "INVALID_OUTER_RECORD_TYPE\\0" "INVALID_SSL_SESSION\\0" "INVALID_TICKET_KEYS_LENGTH\\0" "LENGTH_MISMATCH\\0" "LIBRARY_HAS_NO_CIPHERS\\0" "MISSING_EXTENSION\\0" "MISSING_RSA_CERTIFICATE\\0" "MISSING_TMP_DH_KEY\\0" "MISSING_TMP_ECDH_KEY\\0" "MIXED_SPECIAL_OPERATOR_WITH_GROUPS\\0" "MTU_TOO_SMALL\\0" "NEGOTIATED_BOTH_NPN_AND_ALPN\\0" "NESTED_GROUP\\0" "NO_CERTIFICATES_RETURNED\\0" "NO_CERTIFICATE_ASSIGNED\\0" "NO_CERTIFICATE_SET\\0" "NO_CIPHERS_AVAILABLE\\0" "NO_CIPHERS_PASSED\\0" "NO_CIPHER_MATCH\\0" "NO_COMPRESSION_SPECIFIED\\0" "NO_METHOD_SPECIFIED\\0" "NO_P256_SUPPORT\\0" "NO_PRIVATE_KEY_ASSIGNED\\0" "NO_RENEGOTIATION\\0" "NO_REQUIRED_DIGEST\\0" "NO_SHARED_CIPHER\\0" "NULL_SSL_CTX\\0" "NULL_SSL_METHOD_PASSED\\0" "OLD_SESSION_CIPHER_NOT_RETURNED\\0" "OLD_SESSION_VERSION_NOT_RETURNED\\0" "PARSE_TLSEXT\\0" "PATH_TOO_LONG\\0" "PEER_DID_NOT_RETURN_A_CERTIFICATE\\0" "PEER_ERROR_UNSUPPORTED_CERTIFICATE_TYPE\\0" "PROTOCOL_IS_SHUTDOWN\\0" "PSK_IDENTITY_NOT_FOUND\\0" "PSK_NO_CLIENT_CB\\0" "PSK_NO_SERVER_CB\\0" "READ_TIMEOUT_EXPIRED\\0" "RECORD_LENGTH_MISMATCH\\0" "RECORD_TOO_LARGE\\0" "RENEGOTIATION_ENCODING_ERR\\0" "RENEGOTIATION_MISMATCH\\0" "REQUIRED_CIPHER_MISSING\\0" "RESUMED_EMS_SESSION_WITHOUT_EMS_EXTENSION\\0" "RESUMED_NON_EMS_SESSION_WITH_EMS_EXTENSION\\0" "SCSV_RECEIVED_WHEN_RENEGOTIATING\\0" "SERVERHELLO_TLSEXT\\0" "SESSION_ID_CONTEXT_UNINITIALIZED\\0" "SESSION_MAY_NOT_BE_CREATED\\0" "SHUTDOWN_WHILE_IN_INIT\\0" "SIGNATURE_ALGORITHMS_EXTENSION_SENT_BY_SERVER\\0" "SRTP_COULD_NOT_ALLOCATE_PROFILES\\0" "SRTP_UNKNOWN_PROTECTION_PROFILE\\0" "SSL3_EXT_INVALID_SERVERNAME\\0" "SSLV3_ALERT_BAD_CERTIFICATE\\0" "SSLV3_ALERT_BAD_RECORD_MAC\\0" "SSLV3_ALERT_CERTIFICATE_EXPIRED\\0" "SSLV3_ALERT_CERTIFICATE_REVOKED\\0" "SSLV3_ALERT_CERTIFICATE_UNKNOWN\\0" "SSLV3_ALERT_CLOSE_NOTIFY\\0" "SSLV3_ALERT_DECOMPRESSION_FAILURE\\0" "SSLV3_ALERT_HANDSHAKE_FAILURE\\0" "SSLV3_ALERT_ILLEGAL_PARAMETER\\0" "SSLV3_ALERT_NO_CERTIFICATE\\0" "SSLV3_ALERT_UNEXPECTED_MESSAGE\\0" "SSLV3_ALERT_UNSUPPORTED_CERTIFICATE\\0" "SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION\\0" "SSL_HANDSHAKE_FAILURE\\0" "SSL_SESSION_ID_CONTEXT_TOO_LONG\\0" "TLSV1_ALERT_ACCESS_DENIED\\0" "TLSV1_ALERT_DECODE_ERROR\\0" "TLSV1_ALERT_DECRYPTION_FAILED\\0" "TLSV1_ALERT_DECRYPT_ERROR\\0" "TLSV1_ALERT_EXPORT_RESTRICTION\\0" "TLSV1_ALERT_INAPPROPRIATE_FALLBACK\\0" "TLSV1_ALERT_INSUFFICIENT_SECURITY\\0" "TLSV1_ALERT_INTERNAL_ERROR\\0" "TLSV1_ALERT_NO_RENEGOTIATION\\0" "TLSV1_ALERT_PROTOCOL_VERSION\\0" "TLSV1_ALERT_RECORD_OVERFLOW\\0" "TLSV1_ALERT_UNKNOWN_CA\\0" "TLSV1_ALERT_USER_CANCELLED\\0" "TLSV1_BAD_CERTIFICATE_HASH_VALUE\\0" "TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE\\0" "TLSV1_CERTIFICATE_UNOBTAINABLE\\0" "TLSV1_UNRECOGNIZED_NAME\\0" "TLSV1_UNSUPPORTED_EXTENSION\\0" "TLS_PEER_DID_NOT_RESPOND_WITH_CERTIFICATE_LIST\\0" "TLS_RSA_ENCRYPTED_VALUE_LENGTH_IS_WRONG\\0" "TOO_MANY_EMPTY_FRAGMENTS\\0" "TOO_MANY_WARNING_ALERTS\\0" "UNABLE_TO_FIND_ECDH_PARAMETERS\\0" "UNEXPECTED_EXTENSION\\0" "UNEXPECTED_MESSAGE\\0" "UNEXPECTED_OPERATOR_IN_GROUP\\0" "UNEXPECTED_RECORD\\0" "UNKNOWN_ALERT_TYPE\\0" "UNKNOWN_CERTIFICATE_TYPE\\0" "UNKNOWN_CIPHER_RETURNED\\0" "UNKNOWN_CIPHER_TYPE\\0" "UNKNOWN_KEY_EXCHANGE_TYPE\\0" "UNKNOWN_PROTOCOL\\0" "UNKNOWN_SSL_VERSION\\0" "UNKNOWN_STATE\\0" "UNSAFE_LEGACY_RENEGOTIATION_DISABLED\\0" "UNSUPPORTED_COMPRESSION_ALGORITHM\\0" "UNSUPPORTED_ELLIPTIC_CURVE\\0" "UNSUPPORTED_PROTOCOL\\0" "WRONG_CERTIFICATE_TYPE\\0" "WRONG_CIPHER_RETURNED\\0" "WRONG_CURVE\\0" "WRONG_MESSAGE_TYPE\\0" "WRONG_SIGNATURE_TYPE\\0" "WRONG_SSL_VERSION\\0" "WRONG_VERSION_NUMBER\\0" "X509_LIB\\0" "X509_VERIFICATION_SETUP_PROBLEMS\\0" "AKID_MISMATCH\\0" "BAD_PKCS7_VERSION\\0" "BAD_X509_FILETYPE\\0" "BASE64_DECODE_ERROR\\0" "CANT_CHECK_DH_KEY\\0" "CERT_ALREADY_IN_HASH_TABLE\\0" "CRL_ALREADY_DELTA\\0" "CRL_VERIFY_FAILURE\\0" "IDP_MISMATCH\\0" "INVALID_DIRECTORY\\0" "INVALID_FIELD_NAME\\0" "INVALID_PSS_PARAMETERS\\0" "INVALID_TRUST\\0" "ISSUER_MISMATCH\\0" "KEY_TYPE_MISMATCH\\0" "KEY_VALUES_MISMATCH\\0" "LOADING_CERT_DIR\\0" "LOADING_DEFAULTS\\0" "NAME_TOO_LONG\\0" "NEWER_CRL_NOT_NEWER\\0" "NOT_PKCS7_SIGNED_DATA\\0" "NO_CERTIFICATES_INCLUDED\\0" "NO_CERT_SET_FOR_US_TO_VERIFY\\0" "NO_CRLS_INCLUDED\\0" "NO_CRL_NUMBER\\0" "PUBLIC_KEY_DECODE_ERROR\\0" "PUBLIC_KEY_ENCODE_ERROR\\0" "SHOULD_RETRY\\0" "UNKNOWN_KEY_TYPE\\0" "UNKNOWN_PURPOSE_ID\\0" "UNKNOWN_TRUST_ID\\0" "WRONG_LOOKUP_TYPE\\0" "BAD_IP_ADDRESS\\0" "BAD_OBJECT\\0" "BN_DEC2BN_ERROR\\0" "BN_TO_ASN1_INTEGER_ERROR\\0" "CANNOT_FIND_FREE_FUNCTION\\0" "DIRNAME_ERROR\\0" "DISTPOINT_ALREADY_SET\\0" "DUPLICATE_ZONE_ID\\0" "ERROR_CONVERTING_ZONE\\0" "ERROR_CREATING_EXTENSION\\0" "ERROR_IN_EXTENSION\\0" "EXPECTED_A_SECTION_NAME\\0" "EXTENSION_EXISTS\\0" "EXTENSION_NAME_ERROR\\0" "EXTENSION_NOT_FOUND\\0" "EXTENSION_SETTING_NOT_SUPPORTED\\0" "EXTENSION_VALUE_ERROR\\0" "ILLEGAL_EMPTY_EXTENSION\\0" "ILLEGAL_HEX_DIGIT\\0" "INCORRECT_POLICY_SYNTAX_TAG\\0" "INVALID_BOOLEAN_STRING\\0" "INVALID_EXTENSION_STRING\\0" "INVALID_MULTIPLE_RDNS\\0" "INVALID_NAME\\0" "INVALID_NULL_ARGUMENT\\0" "INVALID_NULL_NAME\\0" "INVALID_NULL_VALUE\\0" "INVALID_NUMBERS\\0" "INVALID_OBJECT_IDENTIFIER\\0" "INVALID_OPTION\\0" "INVALID_POLICY_IDENTIFIER\\0" "INVALID_PROXY_POLICY_SETTING\\0" "INVALID_PURPOSE\\0" "INVALID_SECTION\\0" "INVALID_SYNTAX\\0" "ISSUER_DECODE_ERROR\\0" "NEED_ORGANIZATION_AND_NUMBERS\\0" "NO_CONFIG_DATABASE\\0" "NO_ISSUER_CERTIFICATE\\0" "NO_ISSUER_DETAILS\\0" "NO_POLICY_IDENTIFIER\\0" "NO_PROXY_CERT_POLICY_LANGUAGE_DEFINED\\0" "NO_PUBLIC_KEY\\0" "NO_SUBJECT_DETAILS\\0" "ODD_NUMBER_OF_DIGITS\\0" "OPERATION_NOT_DEFINED\\0" "OTHERNAME_ERROR\\0" "POLICY_LANGUAGE_ALREADY_DEFINED\\0" "POLICY_PATH_LENGTH\\0" "POLICY_PATH_LENGTH_ALREADY_DEFINED\\0" "POLICY_WHEN_PROXY_LANGUAGE_REQUIRES_NO_POLICY\\0" "SECTION_NOT_FOUND\\0" "UNABLE_TO_GET_ISSUER_DETAILS\\0" "UNABLE_TO_GET_ISSUER_KEYID\\0" "UNKNOWN_BIT_STRING_ARGUMENT\\0" "UNKNOWN_EXTENSION\\0" "UNKNOWN_EXTENSION_NAME\\0" "UNKNOWN_OPTION\\0" "UNSUPPORTED_OPTION\\0" "USER_TOO_LONG\\0" ""; EOF END_OF_COMMAND end Update err_data.c to BoringSSL commit 8d343b44bbab # BoringSSL CocoaPods podspec # Copyright 2015, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Pod::Spec.new do |s| s.name = 'BoringSSL' version = '5.0' s.version = version s.summary = 'BoringSSL is a fork of OpenSSL that is designed to meet Google’s needs.' # Adapted from the homepage: s.description = <<-DESC BoringSSL is a fork of OpenSSL that is designed to meet Google’s needs. Although BoringSSL is an open source project, it is not intended for general use, as OpenSSL is. We don’t recommend that third parties depend upon it. Doing so is likely to be frustrating because there are no guarantees of API stability. Only the latest version of this pod is supported, and every new version is a new major version. We update Google libraries and programs that use BoringSSL as needed when deciding to make API changes. This allows us to mostly avoid compromises in the name of compatibility. It works for us, but it may not work for you. As a Cocoapods pod, it has the advantage over OpenSSL's pods that the library doesn't need to be precompiled. This eliminates the 10 - 20 minutes of wait the first time a user does "pod install", lets it be used as a dynamic framework (pending solution of Cocoapods' issue #4605), and works with bitcode automatically. It's also thought to be smaller than OpenSSL (which takes 1MB - 2MB per ARM architecture), but we don't have specific numbers yet. BoringSSL arose because Google used OpenSSL for many years in various ways and, over time, built up a large number of patches that were maintained while tracking upstream OpenSSL. As Google’s product portfolio became more complex, more copies of OpenSSL sprung up and the effort involved in maintaining all these patches in multiple places was growing steadily. Currently BoringSSL is the SSL library in Chrome/Chromium, Android (but it’s not part of the NDK) and a number of other apps/programs. DESC s.homepage = 'https://boringssl.googlesource.com/boringssl/' s.documentation_url = 'https://commondatastorage.googleapis.com/chromium-boringssl-docs/headers.html' s.license = { :type => 'Mixed', :file => 'LICENSE' } # "The name and email addresses of the library maintainers, not the Podspec maintainer." s.authors = 'Adam Langley', 'David Benjamin', 'Matt Braithwaite' s.source = { :git => 'https://boringssl.googlesource.com/boringssl', :tag => "version_for_cocoapods_#{version}" } name = 'openssl' # When creating a dynamic framework, name it openssl.framework instead of BoringSSL.framework. # This lets users write their includes like `#include <openssl/ssl.h>` as opposed to `#include # <BoringSSL/ssl.h>`. s.module_name = name # When creating a dynamic framework, copy the headers under `include/openssl/` into the root of # the `Headers/` directory of the framework (i.e., not under `Headers/include/openssl`). # # TODO(jcanizales): Debug why this doesn't work on macOS. s.header_mappings_dir = 'include/openssl' # The above has an undesired effect when creating a static library: It forces users to write # includes like `#include <BoringSSL/ssl.h>`. `s.header_dir` adds a path prefix to that, and # because Cocoapods lets omit the pod name when including headers of static libraries, the # following lets users write `#include <openssl/ssl.h>`. s.header_dir = name # The module map and umbrella header created automatically by Cocoapods don't work for C libraries # like this one. The following file, and a correct umbrella header, are created on the fly by the # `prepare_command` of this pod. s.module_map = 'include/openssl/module.modulemap' # We don't need to inhibit all warnings; only -Wno-shorten-64-to-32. But Cocoapods' linter doesn't # want that for some reason. s.compiler_flags = '-DOPENSSL_NO_ASM', '-GCC_WARN_INHIBIT_ALL_WARNINGS', '-w' s.requires_arc = false # Like many other C libraries, BoringSSL has its public headers under `include/<libname>/` and its # sources and private headers in other directories outside `include/`. Cocoapods' linter doesn't # allow any header to be listed outside the `header_mappings_dir` (even though doing so works in # practice). Because we need our `header_mappings_dir` to be `include/openssl/` for the reason # mentioned above, we work around the linter limitation by dividing the pod into two subspecs, one # for public headers and the other for implementation. Each gets its own `header_mappings_dir`, # making the linter happy. s.subspec 'Interface' do |ss| ss.header_mappings_dir = 'include/openssl' ss.source_files = 'include/openssl/*.h' end s.subspec 'Implementation' do |ss| ss.header_mappings_dir = '.' ss.source_files = 'ssl/*.{h,c}', 'ssl/**/*.{h,c}', '*.{h,c}', 'crypto/*.{h,c}', 'crypto/**/*.{h,c}' ss.private_header_files = 'ssl/*.h', 'ssl/**/*.h', '*.h', 'crypto/*.h', 'crypto/**/*.h' ss.exclude_files = '**/*_test.*', '**/test_*.*', '**/test/*.*' ss.dependency "#{s.name}/Interface", version end s.prepare_command = <<-END_OF_COMMAND # Replace "const BIGNUM *I" in rsa.h with a lowercase i, as the former fails when including # OpenSSL in a Swift bridging header (complex.h defines "I", and it's as if the compiler # included it in every bridged header). sed -E -i '.back' 's/\\*I,/*i,/g' include/openssl/rsa.h # Replace `#include "../crypto/internal.h"` in e_tls.c with `#include "../internal.h"`. The # former assumes crypto/ is in the headers search path, which is hard to enforce when using # dynamic frameworks. The latters always works, being relative to the current file. sed -E -i '.back' 's/crypto\\///g' crypto/cipher/e_tls.c # Add a module map and an umbrella header cat > include/openssl/umbrella.h <<EOF #include "ssl.h" #include "crypto.h" #include "aes.h" /* The following macros are defined by base.h. The latter is the first file included by the other headers. */ #if defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64) # include "arm_arch.h" #endif #include "asn1.h" #include "asn1_mac.h" #include "asn1t.h" #include "blowfish.h" #include "cast.h" #include "chacha.h" #include "cmac.h" #include "conf.h" #include "cpu.h" #include "curve25519.h" #include "des.h" #include "dtls1.h" #include "hkdf.h" #include "md4.h" #include "md5.h" #include "newhope.h" #include "obj_mac.h" #include "objects.h" #include "opensslv.h" #include "ossl_typ.h" #include "pkcs12.h" #include "pkcs7.h" #include "pkcs8.h" #include "poly1305.h" #include "rand.h" #include "rc4.h" #include "ripemd.h" #include "safestack.h" #include "srtp.h" #include "time_support.h" #include "x509.h" #include "x509v3.h" EOF cat > include/openssl/module.modulemap <<EOF framework module openssl { umbrella header "umbrella.h" export * module * { export * } } EOF # #include <inttypes.h> fails to compile when building a dynamic framework. libgit2 in # https://github.com/libgit2/libgit2/commit/1ddada422caf8e72ba97dca2568d2bf879fed5f2 and libvpx # in https://chromium.googlesource.com/webm/libvpx/+/1bec0c5a7e885ec792f6bb658eb3f34ad8f37b15 # work around it by removing the include. We need four of its macros, so we expand them here. sed -E -i '.back' '/<inttypes.h>/d' include/openssl/bn.h sed -E -i '.back' 's/PRIu32/"u"/g' include/openssl/bn.h sed -E -i '.back' 's/PRIx32/"x"/g' include/openssl/bn.h sed -E -i '.back' 's/PRIu64/"llu"/g' include/openssl/bn.h sed -E -i '.back' 's/PRIx64/"llx"/g' include/openssl/bn.h # This is a bit ridiculous, but requiring people to install Go in order to build is slightly # more ridiculous IMO. To save you from scrolling, this is the last part of the podspec. # TODO(jcanizales): Translate err_data_generate.go into a Bash or Ruby script. cat > err_data.c <<EOF /* Copyright (c) 2015, Google Inc. * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ /* This file was generated by err_data_generate.go. */ #include <openssl/base.h> #include <openssl/err.h> #include <openssl/type_check.h> OPENSSL_COMPILE_ASSERT(ERR_LIB_NONE == 1, library_values_changed_1); OPENSSL_COMPILE_ASSERT(ERR_LIB_SYS == 2, library_values_changed_2); OPENSSL_COMPILE_ASSERT(ERR_LIB_BN == 3, library_values_changed_3); OPENSSL_COMPILE_ASSERT(ERR_LIB_RSA == 4, library_values_changed_4); OPENSSL_COMPILE_ASSERT(ERR_LIB_DH == 5, library_values_changed_5); OPENSSL_COMPILE_ASSERT(ERR_LIB_EVP == 6, library_values_changed_6); OPENSSL_COMPILE_ASSERT(ERR_LIB_BUF == 7, library_values_changed_7); OPENSSL_COMPILE_ASSERT(ERR_LIB_OBJ == 8, library_values_changed_8); OPENSSL_COMPILE_ASSERT(ERR_LIB_PEM == 9, library_values_changed_9); OPENSSL_COMPILE_ASSERT(ERR_LIB_DSA == 10, library_values_changed_10); OPENSSL_COMPILE_ASSERT(ERR_LIB_X509 == 11, library_values_changed_11); OPENSSL_COMPILE_ASSERT(ERR_LIB_ASN1 == 12, library_values_changed_12); OPENSSL_COMPILE_ASSERT(ERR_LIB_CONF == 13, library_values_changed_13); OPENSSL_COMPILE_ASSERT(ERR_LIB_CRYPTO == 14, library_values_changed_14); OPENSSL_COMPILE_ASSERT(ERR_LIB_EC == 15, library_values_changed_15); OPENSSL_COMPILE_ASSERT(ERR_LIB_SSL == 16, library_values_changed_16); OPENSSL_COMPILE_ASSERT(ERR_LIB_BIO == 17, library_values_changed_17); OPENSSL_COMPILE_ASSERT(ERR_LIB_PKCS7 == 18, library_values_changed_18); OPENSSL_COMPILE_ASSERT(ERR_LIB_PKCS8 == 19, library_values_changed_19); OPENSSL_COMPILE_ASSERT(ERR_LIB_X509V3 == 20, library_values_changed_20); OPENSSL_COMPILE_ASSERT(ERR_LIB_RAND == 21, library_values_changed_21); OPENSSL_COMPILE_ASSERT(ERR_LIB_ENGINE == 22, library_values_changed_22); OPENSSL_COMPILE_ASSERT(ERR_LIB_OCSP == 23, library_values_changed_23); OPENSSL_COMPILE_ASSERT(ERR_LIB_UI == 24, library_values_changed_24); OPENSSL_COMPILE_ASSERT(ERR_LIB_COMP == 25, library_values_changed_25); OPENSSL_COMPILE_ASSERT(ERR_LIB_ECDSA == 26, library_values_changed_26); OPENSSL_COMPILE_ASSERT(ERR_LIB_ECDH == 27, library_values_changed_27); OPENSSL_COMPILE_ASSERT(ERR_LIB_HMAC == 28, library_values_changed_28); OPENSSL_COMPILE_ASSERT(ERR_LIB_DIGEST == 29, library_values_changed_29); OPENSSL_COMPILE_ASSERT(ERR_LIB_CIPHER == 30, library_values_changed_30); OPENSSL_COMPILE_ASSERT(ERR_LIB_HKDF == 31, library_values_changed_31); OPENSSL_COMPILE_ASSERT(ERR_LIB_USER == 32, library_values_changed_32); OPENSSL_COMPILE_ASSERT(ERR_NUM_LIBS == 33, library_values_changed_num); const uint32_t kOpenSSLReasonValues[] = { 0xc320838, 0xc328852, 0xc330861, 0xc338871, 0xc340880, 0xc348899, 0xc3508a5, 0xc3588c2, 0xc3608d4, 0xc3688e2, 0xc3708f2, 0xc3788ff, 0xc38090f, 0xc38891a, 0xc390930, 0xc39893f, 0xc3a0953, 0xc3a8845, 0xc3b00ea, 0x10320845, 0x103293ab, 0x103313b7, 0x103393d0, 0x103413e3, 0x10348e8b, 0x10350c19, 0x103593f6, 0x1036140b, 0x1036941e, 0x1037143d, 0x10379456, 0x1038146b, 0x10389489, 0x10391498, 0x103994b4, 0x103a14cf, 0x103a94de, 0x103b14fa, 0x103b9515, 0x103c152c, 0x103c80ea, 0x103d153d, 0x103d9551, 0x103e1570, 0x103e957f, 0x103f1596, 0x103f95a9, 0x10400bea, 0x104095bc, 0x104115da, 0x104195ed, 0x10421607, 0x10429617, 0x1043162b, 0x10439641, 0x10441659, 0x1044966e, 0x10451682, 0x10459694, 0x104605fb, 0x1046893f, 0x104716a9, 0x104796c0, 0x104816d5, 0x104896e3, 0x14320bcd, 0x14328bdb, 0x14330bea, 0x14338bfc, 0x143400ac, 0x143480ea, 0x18320083, 0x18328ee1, 0x183300ac, 0x18338ef7, 0x18340f0b, 0x183480ea, 0x18350f20, 0x18358f38, 0x18360f4d, 0x18368f61, 0x18370f85, 0x18378f9b, 0x18380faf, 0x18388fbf, 0x18390a57, 0x18398fcf, 0x183a0fe4, 0x183a8ff8, 0x183b0c25, 0x183b9005, 0x183c1017, 0x183c9022, 0x183d1032, 0x183d9043, 0x183e1054, 0x183e9066, 0x183f108f, 0x183f90a8, 0x184010c0, 0x184086d3, 0x203210e7, 0x243210f3, 0x24328985, 0x24331105, 0x24339112, 0x2434111f, 0x24349131, 0x24351140, 0x2435915d, 0x2436116a, 0x24369178, 0x24371186, 0x24379194, 0x2438119d, 0x243891aa, 0x243911bd, 0x28320c0d, 0x28328c25, 0x28330bea, 0x28338c38, 0x28340c19, 0x283480ac, 0x283500ea, 0x2c3227cb, 0x2c32a7d9, 0x2c3327eb, 0x2c33a7fd, 0x2c342811, 0x2c34a823, 0x2c35283e, 0x2c35a850, 0x2c362863, 0x2c36832d, 0x2c372870, 0x2c37a882, 0x2c382895, 0x2c38a8ac, 0x2c3928ba, 0x2c39a8ca, 0x2c3a28dc, 0x2c3aa8f0, 0x2c3b2901, 0x2c3ba920, 0x2c3c2934, 0x2c3ca94a, 0x2c3d2963, 0x2c3da980, 0x2c3e2991, 0x2c3ea99f, 0x2c3f29b7, 0x2c3fa9cf, 0x2c4029dc, 0x2c4090e7, 0x2c4129ed, 0x2c41aa00, 0x2c4210c0, 0x2c42aa11, 0x2c430720, 0x2c43a912, 0x30320000, 0x30328015, 0x3033001f, 0x30338038, 0x3034004a, 0x30348064, 0x3035006b, 0x30358083, 0x30360094, 0x303680ac, 0x303700b9, 0x303780c8, 0x303800ea, 0x303880f7, 0x3039010a, 0x30398125, 0x303a013a, 0x303a814e, 0x303b0162, 0x303b8173, 0x303c018c, 0x303c81a9, 0x303d01b7, 0x303d81cb, 0x303e01db, 0x303e81f4, 0x303f0204, 0x303f8217, 0x30400226, 0x30408232, 0x30410247, 0x30418257, 0x3042026e, 0x3042827b, 0x3043028e, 0x3043829d, 0x304402b2, 0x304482d3, 0x304502e6, 0x304582f9, 0x30460312, 0x3046832d, 0x3047034a, 0x30478363, 0x30480371, 0x30488382, 0x30490391, 0x304983a9, 0x304a03bb, 0x304a83cf, 0x304b03ee, 0x304b8401, 0x304c040c, 0x304c841d, 0x304d0429, 0x304d843f, 0x304e044d, 0x304e8463, 0x304f0475, 0x304f8487, 0x3050049a, 0x305084ad, 0x305104be, 0x305184ce, 0x305204e6, 0x305284fb, 0x30530513, 0x30538527, 0x3054053f, 0x30548558, 0x30550571, 0x3055858e, 0x30560599, 0x305685b1, 0x305705c1, 0x305785d2, 0x305805e5, 0x305885fb, 0x30590604, 0x30598619, 0x305a062c, 0x305a863b, 0x305b065b, 0x305b866a, 0x305c068b, 0x305c86a7, 0x305d06b3, 0x305d86d3, 0x305e06ef, 0x305e8700, 0x305f0716, 0x305f8720, 0x34320b47, 0x34328b5b, 0x34330b78, 0x34338b8b, 0x34340b9a, 0x34348bb7, 0x3c320083, 0x3c328c62, 0x3c330c7b, 0x3c338c96, 0x3c340cb3, 0x3c348cdd, 0x3c350cf8, 0x3c358d1e, 0x3c360d37, 0x3c368d4f, 0x3c370d60, 0x3c378d6e, 0x3c380d7b, 0x3c388d8f, 0x3c390c25, 0x3c398da3, 0x3c3a0db7, 0x3c3a88ff, 0x3c3b0dc7, 0x3c3b8de2, 0x3c3c0df4, 0x3c3c8e0a, 0x3c3d0e14, 0x3c3d8e28, 0x3c3e0e36, 0x3c3e8e5b, 0x3c3f0c4e, 0x3c3f8e44, 0x3c4000ac, 0x3c4080ea, 0x3c410cce, 0x3c418d0d, 0x403216fa, 0x40329710, 0x4033173e, 0x40339748, 0x4034175f, 0x4034977d, 0x4035178d, 0x4035979f, 0x403617ac, 0x403697b8, 0x403717cd, 0x403797df, 0x403817ea, 0x403897fc, 0x40390e8b, 0x4039980c, 0x403a181f, 0x403a9840, 0x403b1851, 0x403b9861, 0x403c0064, 0x403c8083, 0x403d186d, 0x403d9883, 0x403e1892, 0x403e98a5, 0x403f18bf, 0x403f98cd, 0x404018e2, 0x404098f6, 0x40411913, 0x4041992e, 0x40421947, 0x4042995a, 0x4043196e, 0x40439986, 0x4044199d, 0x404480ac, 0x404519b2, 0x404599c4, 0x404619e8, 0x40469a08, 0x40471a16, 0x40479a3d, 0x40481a52, 0x40489a6b, 0x40491a82, 0x40499a9c, 0x404a1ab3, 0x404a9ad1, 0x404b1ae9, 0x404b9b00, 0x404c1b16, 0x404c9b28, 0x404d1b49, 0x404d9b6b, 0x404e1b7f, 0x404e9b8c, 0x404f1ba3, 0x404f9bb3, 0x40501bdd, 0x40509bf1, 0x40511c0c, 0x40519c1c, 0x40521c33, 0x40529c45, 0x40531c5d, 0x40539c70, 0x40541c85, 0x40549ca8, 0x40551cb6, 0x40559cd3, 0x40561ce0, 0x40569cf9, 0x40571d11, 0x40579d24, 0x40581d39, 0x40589d4b, 0x40591d7a, 0x40599d93, 0x405a1da7, 0x405a9db7, 0x405b1dcf, 0x405b9de0, 0x405c1df3, 0x405c9e04, 0x405d1e11, 0x405d9e28, 0x405e1e48, 0x405e8a95, 0x405f1e69, 0x405f9e76, 0x40601e84, 0x40609ea6, 0x40611ece, 0x40619ee3, 0x40621efa, 0x40629f0b, 0x40631f1c, 0x40639f31, 0x40641f48, 0x40649f59, 0x40651f74, 0x40659f8b, 0x40661fa3, 0x40669fcd, 0x40671ff8, 0x4067a019, 0x4068202c, 0x4068a04d, 0x4069207f, 0x4069a0ad, 0x406a20ce, 0x406aa0ee, 0x406b2276, 0x406ba299, 0x406c22af, 0x406ca4db, 0x406d250a, 0x406da532, 0x406e254b, 0x406ea563, 0x406f2582, 0x406fa597, 0x407025aa, 0x4070a5c7, 0x40710800, 0x4071a5d9, 0x407225ec, 0x4072a605, 0x4073261d, 0x4073936d, 0x40742631, 0x4074a64b, 0x4075265c, 0x4075a670, 0x4076267e, 0x407691aa, 0x407726a3, 0x4077a6c5, 0x407826e0, 0x4078a719, 0x40792730, 0x4079a746, 0x407a2752, 0x407aa765, 0x407b277a, 0x407ba78c, 0x407c27a1, 0x407ca7aa, 0x407d2068, 0x407d9bc3, 0x407e26f5, 0x407e9d5b, 0x407f1a2a, 0x41f421a1, 0x41f92233, 0x41fe2126, 0x41fea302, 0x41ff23f3, 0x420321ba, 0x420821dc, 0x4208a218, 0x4209210a, 0x4209a252, 0x420a2161, 0x420aa141, 0x420b2181, 0x420ba1fa, 0x420c240f, 0x420ca2cf, 0x420d22e9, 0x420da320, 0x4212233a, 0x421723d6, 0x4217a37c, 0x421c239e, 0x421f2359, 0x42212426, 0x422623b9, 0x422b24bf, 0x422ba488, 0x422c24a7, 0x422ca462, 0x422d2441, 0x4432072b, 0x4432873a, 0x44330746, 0x44338754, 0x44340767, 0x44348778, 0x4435077f, 0x44358789, 0x4436079c, 0x443687b2, 0x443707c4, 0x443787d1, 0x443807e0, 0x443887e8, 0x44390800, 0x4439880e, 0x443a0821, 0x4c3211d4, 0x4c3291e4, 0x4c3311f7, 0x4c339217, 0x4c3400ac, 0x4c3480ea, 0x4c351223, 0x4c359231, 0x4c36124d, 0x4c369260, 0x4c37126f, 0x4c37927d, 0x4c381292, 0x4c38929e, 0x4c3912be, 0x4c3992e8, 0x4c3a1301, 0x4c3a931a, 0x4c3b05fb, 0x4c3b9333, 0x4c3c1345, 0x4c3c9354, 0x4c3d136d, 0x4c3d937c, 0x4c3e1389, 0x50322a23, 0x5032aa32, 0x50332a3d, 0x5033aa4d, 0x50342a66, 0x5034aa80, 0x50352a8e, 0x5035aaa4, 0x50362ab6, 0x5036aacc, 0x50372ae5, 0x5037aaf8, 0x50382b10, 0x5038ab21, 0x50392b36, 0x5039ab4a, 0x503a2b6a, 0x503aab80, 0x503b2b98, 0x503babaa, 0x503c2bc6, 0x503cabdd, 0x503d2bf6, 0x503dac0c, 0x503e2c19, 0x503eac2f, 0x503f2c41, 0x503f8382, 0x50402c54, 0x5040ac64, 0x50412c7e, 0x5041ac8d, 0x50422ca7, 0x5042acc4, 0x50432cd4, 0x5043ace4, 0x50442cf3, 0x5044843f, 0x50452d07, 0x5045ad25, 0x50462d38, 0x5046ad4e, 0x50472d60, 0x5047ad75, 0x50482d9b, 0x5048ada9, 0x50492dbc, 0x5049add1, 0x504a2de7, 0x504aadf7, 0x504b2e17, 0x504bae2a, 0x504c2e4d, 0x504cae7b, 0x504d2e8d, 0x504daeaa, 0x504e2ec5, 0x504eaee1, 0x504f2ef3, 0x504faf0a, 0x50502f19, 0x505086ef, 0x50512f2c, 0x58320ec9, 0x68320e8b, 0x68328c25, 0x68330c38, 0x68338e99, 0x68340ea9, 0x683480ea, 0x6c320e67, 0x6c328bfc, 0x6c330e72, 0x74320a0b, 0x78320970, 0x78328985, 0x78330991, 0x78338083, 0x783409a0, 0x783489b5, 0x783509d4, 0x783589f6, 0x78360a0b, 0x78368a21, 0x78370a31, 0x78378a44, 0x78380a57, 0x78388a69, 0x78390a76, 0x78398a95, 0x783a0aaa, 0x783a8ab8, 0x783b0ac2, 0x783b8ad6, 0x783c0aed, 0x783c8b02, 0x783d0b19, 0x783d8b2e, 0x783e0a84, 0x7c3210d6, }; const size_t kOpenSSLReasonValuesLen = sizeof(kOpenSSLReasonValues) / sizeof(kOpenSSLReasonValues[0]); const char kOpenSSLReasonStringData[] = "ASN1_LENGTH_MISMATCH\\0" "AUX_ERROR\\0" "BAD_GET_ASN1_OBJECT_CALL\\0" "BAD_OBJECT_HEADER\\0" "BMPSTRING_IS_WRONG_LENGTH\\0" "BN_LIB\\0" "BOOLEAN_IS_WRONG_LENGTH\\0" "BUFFER_TOO_SMALL\\0" "CONTEXT_NOT_INITIALISED\\0" "DECODE_ERROR\\0" "DEPTH_EXCEEDED\\0" "DIGEST_AND_KEY_TYPE_NOT_SUPPORTED\\0" "ENCODE_ERROR\\0" "ERROR_GETTING_TIME\\0" "EXPECTING_AN_ASN1_SEQUENCE\\0" "EXPECTING_AN_INTEGER\\0" "EXPECTING_AN_OBJECT\\0" "EXPECTING_A_BOOLEAN\\0" "EXPECTING_A_TIME\\0" "EXPLICIT_LENGTH_MISMATCH\\0" "EXPLICIT_TAG_NOT_CONSTRUCTED\\0" "FIELD_MISSING\\0" "FIRST_NUM_TOO_LARGE\\0" "HEADER_TOO_LONG\\0" "ILLEGAL_BITSTRING_FORMAT\\0" "ILLEGAL_BOOLEAN\\0" "ILLEGAL_CHARACTERS\\0" "ILLEGAL_FORMAT\\0" "ILLEGAL_HEX\\0" "ILLEGAL_IMPLICIT_TAG\\0" "ILLEGAL_INTEGER\\0" "ILLEGAL_NESTED_TAGGING\\0" "ILLEGAL_NULL\\0" "ILLEGAL_NULL_VALUE\\0" "ILLEGAL_OBJECT\\0" "ILLEGAL_OPTIONAL_ANY\\0" "ILLEGAL_OPTIONS_ON_ITEM_TEMPLATE\\0" "ILLEGAL_TAGGED_ANY\\0" "ILLEGAL_TIME_VALUE\\0" "INTEGER_NOT_ASCII_FORMAT\\0" "INTEGER_TOO_LARGE_FOR_LONG\\0" "INVALID_BIT_STRING_BITS_LEFT\\0" "INVALID_BMPSTRING_LENGTH\\0" "INVALID_DIGIT\\0" "INVALID_MODIFIER\\0" "INVALID_NUMBER\\0" "INVALID_OBJECT_ENCODING\\0" "INVALID_SEPARATOR\\0" "INVALID_TIME_FORMAT\\0" "INVALID_UNIVERSALSTRING_LENGTH\\0" "INVALID_UTF8STRING\\0" "LIST_ERROR\\0" "MISSING_ASN1_EOS\\0" "MISSING_EOC\\0" "MISSING_SECOND_NUMBER\\0" "MISSING_VALUE\\0" "MSTRING_NOT_UNIVERSAL\\0" "MSTRING_WRONG_TAG\\0" "NESTED_ASN1_ERROR\\0" "NESTED_ASN1_STRING\\0" "NON_HEX_CHARACTERS\\0" "NOT_ASCII_FORMAT\\0" "NOT_ENOUGH_DATA\\0" "NO_MATCHING_CHOICE_TYPE\\0" "NULL_IS_WRONG_LENGTH\\0" "OBJECT_NOT_ASCII_FORMAT\\0" "ODD_NUMBER_OF_CHARS\\0" "SECOND_NUMBER_TOO_LARGE\\0" "SEQUENCE_LENGTH_MISMATCH\\0" "SEQUENCE_NOT_CONSTRUCTED\\0" "SEQUENCE_OR_SET_NEEDS_CONFIG\\0" "SHORT_LINE\\0" "STREAMING_NOT_SUPPORTED\\0" "STRING_TOO_LONG\\0" "STRING_TOO_SHORT\\0" "TAG_VALUE_TOO_HIGH\\0" "TIME_NOT_ASCII_FORMAT\\0" "TOO_LONG\\0" "TYPE_NOT_CONSTRUCTED\\0" "TYPE_NOT_PRIMITIVE\\0" "UNEXPECTED_EOC\\0" "UNIVERSALSTRING_IS_WRONG_LENGTH\\0" "UNKNOWN_FORMAT\\0" "UNKNOWN_MESSAGE_DIGEST_ALGORITHM\\0" "UNKNOWN_SIGNATURE_ALGORITHM\\0" "UNKNOWN_TAG\\0" "UNSUPPORTED_ANY_DEFINED_BY_TYPE\\0" "UNSUPPORTED_PUBLIC_KEY_TYPE\\0" "UNSUPPORTED_TYPE\\0" "WRONG_PUBLIC_KEY_TYPE\\0" "WRONG_TAG\\0" "WRONG_TYPE\\0" "BAD_FOPEN_MODE\\0" "BROKEN_PIPE\\0" "CONNECT_ERROR\\0" "ERROR_SETTING_NBIO\\0" "INVALID_ARGUMENT\\0" "IN_USE\\0" "KEEPALIVE\\0" "NBIO_CONNECT_ERROR\\0" "NO_HOSTNAME_SPECIFIED\\0" "NO_PORT_SPECIFIED\\0" "NO_SUCH_FILE\\0" "NULL_PARAMETER\\0" "SYS_LIB\\0" "UNABLE_TO_CREATE_SOCKET\\0" "UNINITIALIZED\\0" "UNSUPPORTED_METHOD\\0" "WRITE_TO_READ_ONLY_BIO\\0" "ARG2_LT_ARG3\\0" "BAD_ENCODING\\0" "BAD_RECIPROCAL\\0" "BIGNUM_TOO_LONG\\0" "BITS_TOO_SMALL\\0" "CALLED_WITH_EVEN_MODULUS\\0" "DIV_BY_ZERO\\0" "EXPAND_ON_STATIC_BIGNUM_DATA\\0" "INPUT_NOT_REDUCED\\0" "INVALID_RANGE\\0" "NEGATIVE_NUMBER\\0" "NOT_A_SQUARE\\0" "NOT_INITIALIZED\\0" "NO_INVERSE\\0" "PRIVATE_KEY_TOO_LARGE\\0" "P_IS_NOT_PRIME\\0" "TOO_MANY_ITERATIONS\\0" "TOO_MANY_TEMPORARY_VARIABLES\\0" "AES_KEY_SETUP_FAILED\\0" "BAD_DECRYPT\\0" "BAD_KEY_LENGTH\\0" "CTRL_NOT_IMPLEMENTED\\0" "CTRL_OPERATION_NOT_IMPLEMENTED\\0" "DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH\\0" "INITIALIZATION_ERROR\\0" "INPUT_NOT_INITIALIZED\\0" "INVALID_AD_SIZE\\0" "INVALID_KEY_LENGTH\\0" "INVALID_NONCE_SIZE\\0" "INVALID_OPERATION\\0" "IV_TOO_LARGE\\0" "NO_CIPHER_SET\\0" "NO_DIRECTION_SET\\0" "OUTPUT_ALIASES_INPUT\\0" "TAG_TOO_LARGE\\0" "TOO_LARGE\\0" "UNSUPPORTED_AD_SIZE\\0" "UNSUPPORTED_INPUT_SIZE\\0" "UNSUPPORTED_KEY_SIZE\\0" "UNSUPPORTED_NONCE_SIZE\\0" "UNSUPPORTED_TAG_SIZE\\0" "WRONG_FINAL_BLOCK_LENGTH\\0" "LIST_CANNOT_BE_NULL\\0" "MISSING_CLOSE_SQUARE_BRACKET\\0" "MISSING_EQUAL_SIGN\\0" "NO_CLOSE_BRACE\\0" "UNABLE_TO_CREATE_NEW_SECTION\\0" "VARIABLE_HAS_NO_VALUE\\0" "BAD_GENERATOR\\0" "INVALID_PUBKEY\\0" "MODULUS_TOO_LARGE\\0" "NO_PRIVATE_VALUE\\0" "BAD_Q_VALUE\\0" "BAD_VERSION\\0" "MISSING_PARAMETERS\\0" "NEED_NEW_SETUP_VALUES\\0" "BIGNUM_OUT_OF_RANGE\\0" "COORDINATES_OUT_OF_RANGE\\0" "D2I_ECPKPARAMETERS_FAILURE\\0" "EC_GROUP_NEW_BY_NAME_FAILURE\\0" "GROUP2PKPARAMETERS_FAILURE\\0" "GROUP_MISMATCH\\0" "I2D_ECPKPARAMETERS_FAILURE\\0" "INCOMPATIBLE_OBJECTS\\0" "INVALID_COFACTOR\\0" "INVALID_COMPRESSED_POINT\\0" "INVALID_COMPRESSION_BIT\\0" "INVALID_ENCODING\\0" "INVALID_FIELD\\0" "INVALID_FORM\\0" "INVALID_GROUP_ORDER\\0" "INVALID_PRIVATE_KEY\\0" "MISSING_PRIVATE_KEY\\0" "NON_NAMED_CURVE\\0" "PKPARAMETERS2GROUP_FAILURE\\0" "POINT_AT_INFINITY\\0" "POINT_IS_NOT_ON_CURVE\\0" "SLOT_FULL\\0" "UNDEFINED_GENERATOR\\0" "UNKNOWN_GROUP\\0" "UNKNOWN_ORDER\\0" "WRONG_CURVE_PARAMETERS\\0" "WRONG_ORDER\\0" "KDF_FAILED\\0" "POINT_ARITHMETIC_FAILURE\\0" "BAD_SIGNATURE\\0" "NOT_IMPLEMENTED\\0" "RANDOM_NUMBER_GENERATION_FAILED\\0" "OPERATION_NOT_SUPPORTED\\0" "COMMAND_NOT_SUPPORTED\\0" "DIFFERENT_KEY_TYPES\\0" "DIFFERENT_PARAMETERS\\0" "EXPECTING_AN_EC_KEY_KEY\\0" "EXPECTING_AN_RSA_KEY\\0" "EXPECTING_A_DSA_KEY\\0" "ILLEGAL_OR_UNSUPPORTED_PADDING_MODE\\0" "INVALID_DIGEST_LENGTH\\0" "INVALID_DIGEST_TYPE\\0" "INVALID_KEYBITS\\0" "INVALID_MGF1_MD\\0" "INVALID_PADDING_MODE\\0" "INVALID_PSS_SALTLEN\\0" "KEYS_NOT_SET\\0" "NO_DEFAULT_DIGEST\\0" "NO_KEY_SET\\0" "NO_MDC2_SUPPORT\\0" "NO_NID_FOR_CURVE\\0" "NO_OPERATION_SET\\0" "NO_PARAMETERS_SET\\0" "OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE\\0" "OPERATON_NOT_INITIALIZED\\0" "UNKNOWN_PUBLIC_KEY_TYPE\\0" "UNSUPPORTED_ALGORITHM\\0" "OUTPUT_TOO_LARGE\\0" "UNKNOWN_NID\\0" "BAD_BASE64_DECODE\\0" "BAD_END_LINE\\0" "BAD_IV_CHARS\\0" "BAD_PASSWORD_READ\\0" "CIPHER_IS_NULL\\0" "ERROR_CONVERTING_PRIVATE_KEY\\0" "NOT_DEK_INFO\\0" "NOT_ENCRYPTED\\0" "NOT_PROC_TYPE\\0" "NO_START_LINE\\0" "READ_KEY\\0" "SHORT_HEADER\\0" "UNSUPPORTED_CIPHER\\0" "UNSUPPORTED_ENCRYPTION\\0" "BAD_PKCS12_DATA\\0" "BAD_PKCS12_VERSION\\0" "CIPHER_HAS_NO_OBJECT_IDENTIFIER\\0" "CRYPT_ERROR\\0" "ENCRYPT_ERROR\\0" "ERROR_SETTING_CIPHER_PARAMS\\0" "INCORRECT_PASSWORD\\0" "KEYGEN_FAILURE\\0" "KEY_GEN_ERROR\\0" "METHOD_NOT_SUPPORTED\\0" "MISSING_MAC\\0" "MULTIPLE_PRIVATE_KEYS_IN_PKCS12\\0" "PKCS12_PUBLIC_KEY_INTEGRITY_NOT_SUPPORTED\\0" "PKCS12_TOO_DEEPLY_NESTED\\0" "PRIVATE_KEY_DECODE_ERROR\\0" "PRIVATE_KEY_ENCODE_ERROR\\0" "UNKNOWN_ALGORITHM\\0" "UNKNOWN_CIPHER\\0" "UNKNOWN_CIPHER_ALGORITHM\\0" "UNKNOWN_DIGEST\\0" "UNKNOWN_HASH\\0" "UNSUPPORTED_PRIVATE_KEY_ALGORITHM\\0" "BAD_E_VALUE\\0" "BAD_FIXED_HEADER_DECRYPT\\0" "BAD_PAD_BYTE_COUNT\\0" "BAD_RSA_PARAMETERS\\0" "BLOCK_TYPE_IS_NOT_01\\0" "BN_NOT_INITIALIZED\\0" "CANNOT_RECOVER_MULTI_PRIME_KEY\\0" "CRT_PARAMS_ALREADY_GIVEN\\0" "CRT_VALUES_INCORRECT\\0" "DATA_LEN_NOT_EQUAL_TO_MOD_LEN\\0" "DATA_TOO_LARGE\\0" "DATA_TOO_LARGE_FOR_KEY_SIZE\\0" "DATA_TOO_LARGE_FOR_MODULUS\\0" "DATA_TOO_SMALL\\0" "DATA_TOO_SMALL_FOR_KEY_SIZE\\0" "DIGEST_TOO_BIG_FOR_RSA_KEY\\0" "D_E_NOT_CONGRUENT_TO_1\\0" "EMPTY_PUBLIC_KEY\\0" "FIRST_OCTET_INVALID\\0" "INCONSISTENT_SET_OF_CRT_VALUES\\0" "INTERNAL_ERROR\\0" "INVALID_MESSAGE_LENGTH\\0" "KEY_SIZE_TOO_SMALL\\0" "LAST_OCTET_INVALID\\0" "MUST_HAVE_AT_LEAST_TWO_PRIMES\\0" "NO_PUBLIC_EXPONENT\\0" "NULL_BEFORE_BLOCK_MISSING\\0" "N_NOT_EQUAL_P_Q\\0" "OAEP_DECODING_ERROR\\0" "ONLY_ONE_OF_P_Q_GIVEN\\0" "OUTPUT_BUFFER_TOO_SMALL\\0" "PADDING_CHECK_FAILED\\0" "PKCS_DECODING_ERROR\\0" "SLEN_CHECK_FAILED\\0" "SLEN_RECOVERY_FAILED\\0" "UNKNOWN_ALGORITHM_TYPE\\0" "UNKNOWN_PADDING_TYPE\\0" "VALUE_MISSING\\0" "WRONG_SIGNATURE_LENGTH\\0" "APP_DATA_IN_HANDSHAKE\\0" "ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT\\0" "BAD_ALERT\\0" "BAD_CHANGE_CIPHER_SPEC\\0" "BAD_DATA_RETURNED_BY_CALLBACK\\0" "BAD_DH_P_LENGTH\\0" "BAD_DIGEST_LENGTH\\0" "BAD_ECC_CERT\\0" "BAD_ECPOINT\\0" "BAD_HANDSHAKE_RECORD\\0" "BAD_HELLO_REQUEST\\0" "BAD_LENGTH\\0" "BAD_PACKET_LENGTH\\0" "BAD_RSA_ENCRYPT\\0" "BAD_SRTP_MKI_VALUE\\0" "BAD_SRTP_PROTECTION_PROFILE_LIST\\0" "BAD_SSL_FILETYPE\\0" "BAD_WRITE_RETRY\\0" "BIO_NOT_SET\\0" "CA_DN_LENGTH_MISMATCH\\0" "CA_DN_TOO_LONG\\0" "CCS_RECEIVED_EARLY\\0" "CERTIFICATE_VERIFY_FAILED\\0" "CERT_CB_ERROR\\0" "CERT_LENGTH_MISMATCH\\0" "CHANNEL_ID_NOT_P256\\0" "CHANNEL_ID_SIGNATURE_INVALID\\0" "CIPHER_OR_HASH_UNAVAILABLE\\0" "CLIENTHELLO_PARSE_FAILED\\0" "CLIENTHELLO_TLSEXT\\0" "CONNECTION_REJECTED\\0" "CONNECTION_TYPE_NOT_SET\\0" "CUSTOM_EXTENSION_ERROR\\0" "DATA_LENGTH_TOO_LONG\\0" "DECRYPTION_FAILED\\0" "DECRYPTION_FAILED_OR_BAD_RECORD_MAC\\0" "DH_PUBLIC_VALUE_LENGTH_IS_WRONG\\0" "DH_P_TOO_LONG\\0" "DIGEST_CHECK_FAILED\\0" "DOWNGRADE_DETECTED\\0" "DTLS_MESSAGE_TOO_BIG\\0" "ECC_CERT_NOT_FOR_SIGNING\\0" "EMS_STATE_INCONSISTENT\\0" "ENCRYPTED_LENGTH_TOO_LONG\\0" "ERROR_ADDING_EXTENSION\\0" "ERROR_IN_RECEIVED_CIPHER_LIST\\0" "ERROR_PARSING_EXTENSION\\0" "EXCESSIVE_MESSAGE_SIZE\\0" "EXTRA_DATA_IN_MESSAGE\\0" "FRAGMENT_MISMATCH\\0" "GOT_NEXT_PROTO_WITHOUT_EXTENSION\\0" "HANDSHAKE_FAILURE_ON_CLIENT_HELLO\\0" "HTTPS_PROXY_REQUEST\\0" "HTTP_REQUEST\\0" "INAPPROPRIATE_FALLBACK\\0" "INVALID_COMMAND\\0" "INVALID_MESSAGE\\0" "INVALID_OUTER_RECORD_TYPE\\0" "INVALID_SSL_SESSION\\0" "INVALID_TICKET_KEYS_LENGTH\\0" "LENGTH_MISMATCH\\0" "LIBRARY_HAS_NO_CIPHERS\\0" "MISSING_EXTENSION\\0" "MISSING_RSA_CERTIFICATE\\0" "MISSING_TMP_DH_KEY\\0" "MISSING_TMP_ECDH_KEY\\0" "MIXED_SPECIAL_OPERATOR_WITH_GROUPS\\0" "MTU_TOO_SMALL\\0" "NEGOTIATED_BOTH_NPN_AND_ALPN\\0" "NESTED_GROUP\\0" "NO_CERTIFICATES_RETURNED\\0" "NO_CERTIFICATE_ASSIGNED\\0" "NO_CERTIFICATE_SET\\0" "NO_CIPHERS_AVAILABLE\\0" "NO_CIPHERS_PASSED\\0" "NO_CIPHER_MATCH\\0" "NO_COMMON_SIGNATURE_ALGORITHMS\\0" "NO_COMPRESSION_SPECIFIED\\0" "NO_METHOD_SPECIFIED\\0" "NO_P256_SUPPORT\\0" "NO_PRIVATE_KEY_ASSIGNED\\0" "NO_RENEGOTIATION\\0" "NO_REQUIRED_DIGEST\\0" "NO_SHARED_CIPHER\\0" "NULL_SSL_CTX\\0" "NULL_SSL_METHOD_PASSED\\0" "OLD_SESSION_CIPHER_NOT_RETURNED\\0" "OLD_SESSION_VERSION_NOT_RETURNED\\0" "PARSE_TLSEXT\\0" "PATH_TOO_LONG\\0" "PEER_DID_NOT_RETURN_A_CERTIFICATE\\0" "PEER_ERROR_UNSUPPORTED_CERTIFICATE_TYPE\\0" "PROTOCOL_IS_SHUTDOWN\\0" "PSK_IDENTITY_NOT_FOUND\\0" "PSK_NO_CLIENT_CB\\0" "PSK_NO_SERVER_CB\\0" "READ_TIMEOUT_EXPIRED\\0" "RECORD_LENGTH_MISMATCH\\0" "RECORD_TOO_LARGE\\0" "RENEGOTIATION_ENCODING_ERR\\0" "RENEGOTIATION_MISMATCH\\0" "REQUIRED_CIPHER_MISSING\\0" "RESUMED_EMS_SESSION_WITHOUT_EMS_EXTENSION\\0" "RESUMED_NON_EMS_SESSION_WITH_EMS_EXTENSION\\0" "SCSV_RECEIVED_WHEN_RENEGOTIATING\\0" "SERVERHELLO_TLSEXT\\0" "SESSION_ID_CONTEXT_UNINITIALIZED\\0" "SESSION_MAY_NOT_BE_CREATED\\0" "SHUTDOWN_WHILE_IN_INIT\\0" "SIGNATURE_ALGORITHMS_EXTENSION_SENT_BY_SERVER\\0" "SRTP_COULD_NOT_ALLOCATE_PROFILES\\0" "SRTP_UNKNOWN_PROTECTION_PROFILE\\0" "SSL3_EXT_INVALID_SERVERNAME\\0" "SSLV3_ALERT_BAD_CERTIFICATE\\0" "SSLV3_ALERT_BAD_RECORD_MAC\\0" "SSLV3_ALERT_CERTIFICATE_EXPIRED\\0" "SSLV3_ALERT_CERTIFICATE_REVOKED\\0" "SSLV3_ALERT_CERTIFICATE_UNKNOWN\\0" "SSLV3_ALERT_CLOSE_NOTIFY\\0" "SSLV3_ALERT_DECOMPRESSION_FAILURE\\0" "SSLV3_ALERT_HANDSHAKE_FAILURE\\0" "SSLV3_ALERT_ILLEGAL_PARAMETER\\0" "SSLV3_ALERT_NO_CERTIFICATE\\0" "SSLV3_ALERT_UNEXPECTED_MESSAGE\\0" "SSLV3_ALERT_UNSUPPORTED_CERTIFICATE\\0" "SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION\\0" "SSL_HANDSHAKE_FAILURE\\0" "SSL_SESSION_ID_CONTEXT_TOO_LONG\\0" "TLSV1_ALERT_ACCESS_DENIED\\0" "TLSV1_ALERT_DECODE_ERROR\\0" "TLSV1_ALERT_DECRYPTION_FAILED\\0" "TLSV1_ALERT_DECRYPT_ERROR\\0" "TLSV1_ALERT_EXPORT_RESTRICTION\\0" "TLSV1_ALERT_INAPPROPRIATE_FALLBACK\\0" "TLSV1_ALERT_INSUFFICIENT_SECURITY\\0" "TLSV1_ALERT_INTERNAL_ERROR\\0" "TLSV1_ALERT_NO_RENEGOTIATION\\0" "TLSV1_ALERT_PROTOCOL_VERSION\\0" "TLSV1_ALERT_RECORD_OVERFLOW\\0" "TLSV1_ALERT_UNKNOWN_CA\\0" "TLSV1_ALERT_USER_CANCELLED\\0" "TLSV1_BAD_CERTIFICATE_HASH_VALUE\\0" "TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE\\0" "TLSV1_CERTIFICATE_UNOBTAINABLE\\0" "TLSV1_UNRECOGNIZED_NAME\\0" "TLSV1_UNSUPPORTED_EXTENSION\\0" "TLS_PEER_DID_NOT_RESPOND_WITH_CERTIFICATE_LIST\\0" "TLS_RSA_ENCRYPTED_VALUE_LENGTH_IS_WRONG\\0" "TOO_MANY_EMPTY_FRAGMENTS\\0" "TOO_MANY_WARNING_ALERTS\\0" "UNABLE_TO_FIND_ECDH_PARAMETERS\\0" "UNEXPECTED_EXTENSION\\0" "UNEXPECTED_MESSAGE\\0" "UNEXPECTED_OPERATOR_IN_GROUP\\0" "UNEXPECTED_RECORD\\0" "UNKNOWN_ALERT_TYPE\\0" "UNKNOWN_CERTIFICATE_TYPE\\0" "UNKNOWN_CIPHER_RETURNED\\0" "UNKNOWN_CIPHER_TYPE\\0" "UNKNOWN_KEY_EXCHANGE_TYPE\\0" "UNKNOWN_PROTOCOL\\0" "UNKNOWN_SSL_VERSION\\0" "UNKNOWN_STATE\\0" "UNSAFE_LEGACY_RENEGOTIATION_DISABLED\\0" "UNSUPPORTED_COMPRESSION_ALGORITHM\\0" "UNSUPPORTED_ELLIPTIC_CURVE\\0" "UNSUPPORTED_PROTOCOL\\0" "UNSUPPORTED_PROTOCOL_FOR_CUSTOM_KEY\\0" "WRONG_CERTIFICATE_TYPE\\0" "WRONG_CIPHER_RETURNED\\0" "WRONG_CURVE\\0" "WRONG_MESSAGE_TYPE\\0" "WRONG_SIGNATURE_TYPE\\0" "WRONG_SSL_VERSION\\0" "WRONG_VERSION_NUMBER\\0" "X509_LIB\\0" "X509_VERIFICATION_SETUP_PROBLEMS\\0" "AKID_MISMATCH\\0" "BAD_PKCS7_VERSION\\0" "BAD_X509_FILETYPE\\0" "BASE64_DECODE_ERROR\\0" "CANT_CHECK_DH_KEY\\0" "CERT_ALREADY_IN_HASH_TABLE\\0" "CRL_ALREADY_DELTA\\0" "CRL_VERIFY_FAILURE\\0" "IDP_MISMATCH\\0" "INVALID_DIRECTORY\\0" "INVALID_FIELD_NAME\\0" "INVALID_PSS_PARAMETERS\\0" "INVALID_TRUST\\0" "ISSUER_MISMATCH\\0" "KEY_TYPE_MISMATCH\\0" "KEY_VALUES_MISMATCH\\0" "LOADING_CERT_DIR\\0" "LOADING_DEFAULTS\\0" "NAME_TOO_LONG\\0" "NEWER_CRL_NOT_NEWER\\0" "NOT_PKCS7_SIGNED_DATA\\0" "NO_CERTIFICATES_INCLUDED\\0" "NO_CERT_SET_FOR_US_TO_VERIFY\\0" "NO_CRLS_INCLUDED\\0" "NO_CRL_NUMBER\\0" "PUBLIC_KEY_DECODE_ERROR\\0" "PUBLIC_KEY_ENCODE_ERROR\\0" "SHOULD_RETRY\\0" "UNKNOWN_KEY_TYPE\\0" "UNKNOWN_PURPOSE_ID\\0" "UNKNOWN_TRUST_ID\\0" "WRONG_LOOKUP_TYPE\\0" "BAD_IP_ADDRESS\\0" "BAD_OBJECT\\0" "BN_DEC2BN_ERROR\\0" "BN_TO_ASN1_INTEGER_ERROR\\0" "CANNOT_FIND_FREE_FUNCTION\\0" "DIRNAME_ERROR\\0" "DISTPOINT_ALREADY_SET\\0" "DUPLICATE_ZONE_ID\\0" "ERROR_CONVERTING_ZONE\\0" "ERROR_CREATING_EXTENSION\\0" "ERROR_IN_EXTENSION\\0" "EXPECTED_A_SECTION_NAME\\0" "EXTENSION_EXISTS\\0" "EXTENSION_NAME_ERROR\\0" "EXTENSION_NOT_FOUND\\0" "EXTENSION_SETTING_NOT_SUPPORTED\\0" "EXTENSION_VALUE_ERROR\\0" "ILLEGAL_EMPTY_EXTENSION\\0" "ILLEGAL_HEX_DIGIT\\0" "INCORRECT_POLICY_SYNTAX_TAG\\0" "INVALID_BOOLEAN_STRING\\0" "INVALID_EXTENSION_STRING\\0" "INVALID_MULTIPLE_RDNS\\0" "INVALID_NAME\\0" "INVALID_NULL_ARGUMENT\\0" "INVALID_NULL_NAME\\0" "INVALID_NULL_VALUE\\0" "INVALID_NUMBERS\\0" "INVALID_OBJECT_IDENTIFIER\\0" "INVALID_OPTION\\0" "INVALID_POLICY_IDENTIFIER\\0" "INVALID_PROXY_POLICY_SETTING\\0" "INVALID_PURPOSE\\0" "INVALID_SECTION\\0" "INVALID_SYNTAX\\0" "ISSUER_DECODE_ERROR\\0" "NEED_ORGANIZATION_AND_NUMBERS\\0" "NO_CONFIG_DATABASE\\0" "NO_ISSUER_CERTIFICATE\\0" "NO_ISSUER_DETAILS\\0" "NO_POLICY_IDENTIFIER\\0" "NO_PROXY_CERT_POLICY_LANGUAGE_DEFINED\\0" "NO_PUBLIC_KEY\\0" "NO_SUBJECT_DETAILS\\0" "ODD_NUMBER_OF_DIGITS\\0" "OPERATION_NOT_DEFINED\\0" "OTHERNAME_ERROR\\0" "POLICY_LANGUAGE_ALREADY_DEFINED\\0" "POLICY_PATH_LENGTH\\0" "POLICY_PATH_LENGTH_ALREADY_DEFINED\\0" "POLICY_WHEN_PROXY_LANGUAGE_REQUIRES_NO_POLICY\\0" "SECTION_NOT_FOUND\\0" "UNABLE_TO_GET_ISSUER_DETAILS\\0" "UNABLE_TO_GET_ISSUER_KEYID\\0" "UNKNOWN_BIT_STRING_ARGUMENT\\0" "UNKNOWN_EXTENSION\\0" "UNKNOWN_EXTENSION_NAME\\0" "UNKNOWN_OPTION\\0" "UNSUPPORTED_OPTION\\0" "USER_TOO_LONG\\0" ""; EOF END_OF_COMMAND end
# This file is auto-generated from the current state of the database. Instead of editing this file, # please use the migrations feature of Active Record to incrementally modify your database, and # then regenerate this schema definition. # # Note that this schema.rb definition is the authoritative source for your database schema. If you need # to create the application database on another system, you should be using db:schema:load, not running # all the migrations from scratch. The latter is a flawed and unsustainable approach (the more migrations # you'll amass, the slower it'll run and the greater likelihood for issues). # # It's strongly recommended to check this file into your version control system. ActiveRecord::Schema.define(:version => 20100513142136) do create_table "activity_limits", :force => true do |t| t.string "contributor_type", :null => false t.integer "contributor_id", :null => false t.string "limit_feature", :null => false t.integer "limit_max" t.integer "limit_frequency" t.integer "current_count", :null => false t.datetime "reset_after" t.datetime "promote_after" end create_table "annotation_attributes", :force => true do |t| t.string "name", :null => false t.datetime "created_at" t.datetime "updated_at" end add_index "annotation_attributes", ["name"], :name => "index_annotation_attributes_on_name" create_table "annotation_value_seeds", :force => true do |t| t.integer "attribute_id", :null => false t.string "value", :null => false t.datetime "created_at" t.datetime "updated_at" end add_index "annotation_value_seeds", ["attribute_id"], :name => "index_annotation_value_seeds_on_attribute_id" create_table "annotation_versions", :force => true do |t| t.integer "annotation_id", :null => false t.integer "version", :null => false t.integer "version_creator_id" t.string "source_type", :null => false t.integer "source_id", :null => false t.string "annotatable_type", :limit => 50, :null => false t.integer "annotatable_id", :null => false t.integer "attribute_id", :null => false t.text "value", :null => false t.string "value_type", :limit => 50, :null => false t.datetime "created_at" t.datetime "updated_at" end add_index "annotation_versions", ["annotation_id"], :name => "index_annotation_versions_on_annotation_id" create_table "annotations", :force => true do |t| t.string "source_type", :null => false t.integer "source_id", :null => false t.string "annotatable_type", :limit => 50, :null => false t.integer "annotatable_id", :null => false t.integer "attribute_id", :null => false t.text "value", :null => false t.string "value_type", :limit => 50, :null => false t.integer "version", :null => false t.integer "version_creator_id" t.datetime "created_at" t.datetime "updated_at" end add_index "annotations", ["annotatable_type", "annotatable_id"], :name => "index_annotations_on_annotatable_type_and_annotatable_id" add_index "annotations", ["attribute_id"], :name => "index_annotations_on_attribute_id" add_index "annotations", ["source_type", "source_id"], :name => "index_annotations_on_source_type_and_source_id" create_table "assets", :force => true do |t| t.string "contributor_type" t.integer "contributor_id" t.integer "project_id" t.string "resource_type" t.integer "resource_id" t.string "source_type" t.integer "source_id" t.string "quality" t.integer "policy_id" t.datetime "created_at" t.datetime "updated_at" t.datetime "last_used_at" end create_table "avatars", :force => true do |t| t.string "owner_type" t.integer "owner_id" t.string "original_filename" t.datetime "created_at" t.datetime "updated_at" end create_table "content_blobs", :force => true do |t| t.binary "data", :limit => 2147483647 end create_table "csvarchives", :force => true do |t| t.integer "person_id" t.string "title" t.text "description" t.string "content_type" t.integer "content_blob_id" t.datetime "last_used_at" t.datetime "created_at" t.datetime "updated_at" t.string "filename" t.string "url" t.boolean "complete" t.boolean "failure" t.string "contributor_type" t.integer "contributor_id" end create_table "dataset_lists", :force => true do |t| t.integer "user_search_id" t.integer "dataset_id" t.datetime "created_at" t.datetime "updated_at" end create_table "datasets", :force => true do |t| t.integer "survey_id" t.string "name" t.string "filename" t.string "description" t.datetime "created_at" t.datetime "updated_at" t.string "colour" t.string "key_variable" end create_table "extract_to_extract_links", :force => true do |t| t.integer "source_id", :null => false t.integer "target_id", :null => false end create_table "extract_to_extract_lists", :id => false, :force => true do |t| t.integer "source_extract_id" t.integer "linked_extract_id" t.datetime "created_at" t.datetime "updated_at" end create_table "favourite_group_memberships", :force => true do |t| t.integer "person_id" t.integer "favourite_group_id" t.integer "access_type", :limit => 1 t.datetime "created_at" t.datetime "updated_at" end create_table "favourite_groups", :force => true do |t| t.integer "user_id" t.string "name" t.datetime "created_at" t.datetime "updated_at" end create_table "favourites", :force => true do |t| t.integer "asset_id" t.integer "user_id" t.string "model_name" t.datetime "created_at" t.datetime "updated_at" end create_table "forums", :force => true do |t| t.string "name" t.string "description" t.integer "topics_count", :default => 0 t.integer "posts_count", :default => 0 t.integer "position" t.text "description_html" end create_table "group_memberships", :force => true do |t| t.integer "person_id" t.integer "work_group_id" t.datetime "created_at" t.datetime "updated_at" end create_table "group_memberships_roles", :id => false, :force => true do |t| t.integer "group_membership_id" t.integer "role_id" end create_table "links", :force => true do |t| t.string "subject_type", :null => false t.integer "subject_id", :null => false t.string "predicate", :null => false t.string "object_type", :null => false t.integer "object_id", :null => false t.datetime "created_at" t.datetime "updated_at" t.string "subject_field_name" t.string "object_field_name" end create_table "messages", :force => true do |t| t.integer "from" t.integer "to" t.string "subject" t.text "body" t.integer "reply_id" t.datetime "created_at" t.datetime "read_at" t.text "body_html" t.boolean "deleted_by_sender", :default => false t.boolean "deleted_by_recipient", :default => false end create_table "moderatorships", :force => true do |t| t.integer "forum_id" t.integer "user_id" end add_index "moderatorships", ["forum_id"], :name => "index_moderatorships_on_forum_id" create_table "monitorships", :force => true do |t| t.integer "topic_id" t.integer "user_id" t.boolean "active", :default => true end create_table "people", :force => true do |t| t.datetime "created_at" t.datetime "updated_at" t.string "first_name" t.string "last_name" t.string "email" t.string "phone" t.string "skype_name" t.string "web_page" t.text "description" t.integer "avatar_id" t.integer "status_id", :default => 0 t.boolean "is_pal", :default => false t.boolean "send_notifications", :default => false t.boolean "dormant", :default => false end create_table "permissions", :force => true do |t| t.string "contributor_type" t.integer "contributor_id" t.integer "policy_id" t.integer "access_type", :limit => 1 t.datetime "created_at" t.datetime "updated_at" end create_table "policies", :force => true do |t| t.string "contributor_type" t.integer "contributor_id" t.string "name" t.integer "sharing_scope", :limit => 1 t.integer "access_type", :limit => 1 t.boolean "use_custom_sharing" t.boolean "use_whitelist" t.boolean "use_blacklist" t.datetime "created_at" t.datetime "updated_at" end create_table "posts", :force => true do |t| t.integer "user_id" t.integer "topic_id" t.text "body" t.datetime "created_at" t.datetime "updated_at" t.integer "forum_id" t.text "body_html" end add_index "posts", ["forum_id", "created_at"], :name => "index_posts_on_forum_id" add_index "posts", ["topic_id", "created_at"], :name => "index_posts_on_topic_id" add_index "posts", ["user_id", "created_at"], :name => "index_posts_on_user_id" create_table "projects", :force => true do |t| t.string "name" t.string "web_page" t.string "wiki_page" t.datetime "created_at" t.datetime "updated_at" t.text "description" t.integer "avatar_id" t.integer "default_policy_id" end create_table "relationships", :force => true do |t| t.string "subject_type", :null => false t.integer "subject_id", :null => false t.string "predicate", :null => false t.string "object_type", :null => false t.integer "object_id", :null => false t.datetime "created_at" t.datetime "updated_at" end create_table "script_lists", :force => true do |t| t.integer "csvarchive_id" t.integer "script_id" t.datetime "created_at" t.datetime "updated_at" end create_table "script_to_script_links", :force => true do |t| t.integer "source_id", :null => false t.integer "target_id", :null => false end create_table "scripts", :force => true do |t| t.string "title" t.text "body" t.string "description" t.datetime "created_at" t.datetime "updated_at" t.string "content_type" t.string "contributor_type" t.integer "contributor_id" t.integer "content_blob_id" t.datetime "last_used_at" t.string "original_filename" t.string "method_type" end create_table "search_terms", :force => true do |t| t.string "term" t.datetime "created_at" t.datetime "updated_at" end create_table "search_variable_lists", :force => true do |t| t.integer "user_search_id" t.integer "variable_id" t.datetime "created_at" t.datetime "updated_at" end create_table "sessions", :force => true do |t| t.string "session_id", :null => false t.text "data" t.datetime "created_at" t.datetime "updated_at" end add_index "sessions", ["session_id"], :name => "index_sessions_on_session_id" add_index "sessions", ["updated_at"], :name => "index_sessions_on_updated_at" create_table "survey_lists", :force => true do |t| t.integer "csvarchive_id" t.integer "survey_id" t.datetime "created_at" t.datetime "updated_at" end create_table "survey_to_script_lists", :force => true do |t| t.integer "script_id" t.integer "survey_id" t.datetime "created_at" t.datetime "updated_at" end create_table "surveys", :force => true do |t| t.string "title" t.datetime "created_at" t.datetime "updated_at" t.string "content_type" t.string "contributor_type" t.integer "contributor_id" t.integer "content_blob_id" t.integer "script_id" t.string "original_filename" t.text "description" t.datetime "last_used_at" t.string "year" t.string "surveytype" t.string "UKDA_summary" t.string "headline_report" end create_table "taggings", :force => true do |t| t.integer "tag_id" t.integer "taggable_id" t.integer "tagger_id" t.string "tagger_type" t.string "taggable_type" t.string "context" t.datetime "created_at" end add_index "taggings", ["tag_id"], :name => "index_taggings_on_tag_id" add_index "taggings", ["taggable_id", "taggable_type", "context"], :name => "index_taggings_on_taggable_id_and_taggable_type_and_context" create_table "tags", :force => true do |t| t.string "name" end create_table "topics", :force => true do |t| t.integer "forum_id" t.integer "user_id" t.string "title" t.datetime "created_at" t.datetime "updated_at" t.integer "hits", :default => 0 t.integer "sticky", :default => 0 t.integer "posts_count", :default => 0 t.datetime "replied_at" t.boolean "locked", :default => false t.integer "replied_by" t.integer "last_post_id" end add_index "topics", ["forum_id", "replied_at"], :name => "index_topics_on_forum_id_and_replied_at" add_index "topics", ["forum_id", "sticky", "replied_at"], :name => "index_topics_on_sticky_and_replied_at" add_index "topics", ["forum_id"], :name => "index_topics_on_forum_id" create_table "user_searches", :force => true do |t| t.integer "person_id" t.string "terms" t.datetime "created_at" t.datetime "updated_at" end create_table "users", :force => true do |t| t.string "login" t.string "email" t.string "crypted_password", :limit => 40 t.string "salt", :limit => 40 t.datetime "created_at" t.datetime "updated_at" t.string "remember_token" t.datetime "remember_token_expires_at" t.string "activation_code", :limit => 40 t.datetime "activated_at" t.integer "person_id" t.boolean "is_admin", :default => false t.boolean "can_edit_projects", :default => false t.boolean "can_edit_institutions", :default => false t.string "reset_password_code" t.datetime "reset_password_code_until" t.integer "posts_count", :default => 0 t.datetime "last_seen_at" t.boolean "dormant", :default => false end create_table "variable_linkages", :force => true do |t| t.integer "variable_link_id" t.integer "variable_id" t.datetime "created_at" t.datetime "updated_at" end create_table "variable_links", :force => true do |t| t.integer "person_id" t.string "description" t.datetime "created_at" t.datetime "updated_at" end create_table "variable_lists", :force => true do |t| t.integer "csvarchive_id" t.integer "variable_id" t.datetime "created_at" t.datetime "updated_at" end create_table "variables", :force => true do |t| t.string "name" t.string "value" t.datetime "created_at" t.datetime "updated_at" t.integer "dataset_id" t.string "label" t.integer "csvarchive_id" t.string "category" t.string "dertype" t.text "dermethod" t.text "info" t.string "document" t.string "page" end create_table "watched_variables", :force => true do |t| t.integer "person_id" t.integer "variable_id" t.datetime "created_at" t.datetime "updated_at" end create_table "work_groups", :force => true do |t| t.string "name" t.string "info" t.datetime "created_at" t.datetime "updated_at" end end groups have an owner # This file is auto-generated from the current state of the database. Instead of editing this file, # please use the migrations feature of Active Record to incrementally modify your database, and # then regenerate this schema definition. # # Note that this schema.rb definition is the authoritative source for your database schema. If you need # to create the application database on another system, you should be using db:schema:load, not running # all the migrations from scratch. The latter is a flawed and unsustainable approach (the more migrations # you'll amass, the slower it'll run and the greater likelihood for issues). # # It's strongly recommended to check this file into your version control system. ActiveRecord::Schema.define(:version => 20100520084746) do create_table "activity_limits", :force => true do |t| t.string "contributor_type", :null => false t.integer "contributor_id", :null => false t.string "limit_feature", :null => false t.integer "limit_max" t.integer "limit_frequency" t.integer "current_count", :null => false t.datetime "reset_after" t.datetime "promote_after" end create_table "annotation_attributes", :force => true do |t| t.string "name", :null => false t.datetime "created_at" t.datetime "updated_at" end add_index "annotation_attributes", ["name"], :name => "index_annotation_attributes_on_name" create_table "annotation_value_seeds", :force => true do |t| t.integer "attribute_id", :null => false t.string "value", :null => false t.datetime "created_at" t.datetime "updated_at" end add_index "annotation_value_seeds", ["attribute_id"], :name => "index_annotation_value_seeds_on_attribute_id" create_table "annotation_versions", :force => true do |t| t.integer "annotation_id", :null => false t.integer "version", :null => false t.integer "version_creator_id" t.string "source_type", :null => false t.integer "source_id", :null => false t.string "annotatable_type", :limit => 50, :null => false t.integer "annotatable_id", :null => false t.integer "attribute_id", :null => false t.text "value", :null => false t.string "value_type", :limit => 50, :null => false t.datetime "created_at" t.datetime "updated_at" end add_index "annotation_versions", ["annotation_id"], :name => "index_annotation_versions_on_annotation_id" create_table "annotations", :force => true do |t| t.string "source_type", :null => false t.integer "source_id", :null => false t.string "annotatable_type", :limit => 50, :null => false t.integer "annotatable_id", :null => false t.integer "attribute_id", :null => false t.text "value", :null => false t.string "value_type", :limit => 50, :null => false t.integer "version", :null => false t.integer "version_creator_id" t.datetime "created_at" t.datetime "updated_at" end add_index "annotations", ["annotatable_type", "annotatable_id"], :name => "index_annotations_on_annotatable_type_and_annotatable_id" add_index "annotations", ["attribute_id"], :name => "index_annotations_on_attribute_id" add_index "annotations", ["source_type", "source_id"], :name => "index_annotations_on_source_type_and_source_id" create_table "assets", :force => true do |t| t.string "contributor_type" t.integer "contributor_id" t.integer "project_id" t.string "resource_type" t.integer "resource_id" t.string "source_type" t.integer "source_id" t.string "quality" t.integer "policy_id" t.datetime "created_at" t.datetime "updated_at" t.datetime "last_used_at" end create_table "avatars", :force => true do |t| t.string "owner_type" t.integer "owner_id" t.string "original_filename" t.datetime "created_at" t.datetime "updated_at" end create_table "content_blobs", :force => true do |t| t.binary "data", :limit => 2147483647 end create_table "csvarchives", :force => true do |t| t.integer "person_id" t.string "title" t.text "description" t.string "content_type" t.integer "content_blob_id" t.datetime "last_used_at" t.datetime "created_at" t.datetime "updated_at" t.string "filename" t.string "url" t.boolean "complete" t.boolean "failure" t.string "contributor_type" t.integer "contributor_id" end create_table "dataset_lists", :force => true do |t| t.integer "user_search_id" t.integer "dataset_id" t.datetime "created_at" t.datetime "updated_at" end create_table "datasets", :force => true do |t| t.integer "survey_id" t.string "name" t.string "filename" t.string "description" t.datetime "created_at" t.datetime "updated_at" t.string "colour" t.string "key_variable" end create_table "extract_to_extract_links", :force => true do |t| t.integer "source_id", :null => false t.integer "target_id", :null => false end create_table "extract_to_extract_lists", :id => false, :force => true do |t| t.integer "source_extract_id" t.integer "linked_extract_id" t.datetime "created_at" t.datetime "updated_at" end create_table "favourite_group_memberships", :force => true do |t| t.integer "person_id" t.integer "favourite_group_id" t.integer "access_type", :limit => 1 t.datetime "created_at" t.datetime "updated_at" end create_table "favourite_groups", :force => true do |t| t.integer "user_id" t.string "name" t.datetime "created_at" t.datetime "updated_at" end create_table "favourites", :force => true do |t| t.integer "asset_id" t.integer "user_id" t.string "model_name" t.datetime "created_at" t.datetime "updated_at" end create_table "forums", :force => true do |t| t.string "name" t.string "description" t.integer "topics_count", :default => 0 t.integer "posts_count", :default => 0 t.integer "position" t.text "description_html" end create_table "group_memberships", :force => true do |t| t.integer "person_id" t.integer "work_group_id" t.datetime "created_at" t.datetime "updated_at" end create_table "group_memberships_roles", :id => false, :force => true do |t| t.integer "group_membership_id" t.integer "role_id" end create_table "links", :force => true do |t| t.string "subject_type", :null => false t.integer "subject_id", :null => false t.string "predicate", :null => false t.string "object_type", :null => false t.integer "object_id", :null => false t.datetime "created_at" t.datetime "updated_at" t.string "subject_field_name" t.string "object_field_name" end create_table "messages", :force => true do |t| t.integer "from" t.integer "to" t.string "subject" t.text "body" t.integer "reply_id" t.datetime "created_at" t.datetime "read_at" t.text "body_html" t.boolean "deleted_by_sender", :default => false t.boolean "deleted_by_recipient", :default => false end create_table "moderatorships", :force => true do |t| t.integer "forum_id" t.integer "user_id" end add_index "moderatorships", ["forum_id"], :name => "index_moderatorships_on_forum_id" create_table "monitorships", :force => true do |t| t.integer "topic_id" t.integer "user_id" t.boolean "active", :default => true end create_table "people", :force => true do |t| t.datetime "created_at" t.datetime "updated_at" t.string "first_name" t.string "last_name" t.string "email" t.string "phone" t.string "skype_name" t.string "web_page" t.text "description" t.integer "avatar_id" t.integer "status_id", :default => 0 t.boolean "is_pal", :default => false t.boolean "send_notifications", :default => false t.boolean "dormant", :default => false end create_table "permissions", :force => true do |t| t.string "contributor_type" t.integer "contributor_id" t.integer "policy_id" t.integer "access_type", :limit => 1 t.datetime "created_at" t.datetime "updated_at" end create_table "policies", :force => true do |t| t.string "contributor_type" t.integer "contributor_id" t.string "name" t.integer "sharing_scope", :limit => 1 t.integer "access_type", :limit => 1 t.boolean "use_custom_sharing" t.boolean "use_whitelist" t.boolean "use_blacklist" t.datetime "created_at" t.datetime "updated_at" end create_table "posts", :force => true do |t| t.integer "user_id" t.integer "topic_id" t.text "body" t.datetime "created_at" t.datetime "updated_at" t.integer "forum_id" t.text "body_html" end add_index "posts", ["forum_id", "created_at"], :name => "index_posts_on_forum_id" add_index "posts", ["topic_id", "created_at"], :name => "index_posts_on_topic_id" add_index "posts", ["user_id", "created_at"], :name => "index_posts_on_user_id" create_table "projects", :force => true do |t| t.string "name" t.string "web_page" t.string "wiki_page" t.datetime "created_at" t.datetime "updated_at" t.text "description" t.integer "avatar_id" t.integer "default_policy_id" end create_table "relationships", :force => true do |t| t.string "subject_type", :null => false t.integer "subject_id", :null => false t.string "predicate", :null => false t.string "object_type", :null => false t.integer "object_id", :null => false t.datetime "created_at" t.datetime "updated_at" end create_table "script_lists", :force => true do |t| t.integer "csvarchive_id" t.integer "script_id" t.datetime "created_at" t.datetime "updated_at" end create_table "script_to_script_links", :force => true do |t| t.integer "source_id", :null => false t.integer "target_id", :null => false end create_table "scripts", :force => true do |t| t.string "title" t.text "body" t.string "description" t.datetime "created_at" t.datetime "updated_at" t.string "content_type" t.string "contributor_type" t.integer "contributor_id" t.integer "content_blob_id" t.datetime "last_used_at" t.string "original_filename" t.string "method_type" end create_table "search_terms", :force => true do |t| t.string "term" t.datetime "created_at" t.datetime "updated_at" end create_table "search_variable_lists", :force => true do |t| t.integer "user_search_id" t.integer "variable_id" t.datetime "created_at" t.datetime "updated_at" end create_table "sessions", :force => true do |t| t.string "session_id", :null => false t.text "data" t.datetime "created_at" t.datetime "updated_at" end add_index "sessions", ["session_id"], :name => "index_sessions_on_session_id" add_index "sessions", ["updated_at"], :name => "index_sessions_on_updated_at" create_table "survey_lists", :force => true do |t| t.integer "csvarchive_id" t.integer "survey_id" t.datetime "created_at" t.datetime "updated_at" end create_table "survey_to_script_lists", :force => true do |t| t.integer "script_id" t.integer "survey_id" t.datetime "created_at" t.datetime "updated_at" end create_table "surveys", :force => true do |t| t.string "title" t.datetime "created_at" t.datetime "updated_at" t.string "content_type" t.string "contributor_type" t.integer "contributor_id" t.integer "content_blob_id" t.integer "script_id" t.string "original_filename" t.text "description" t.datetime "last_used_at" t.string "year" t.string "surveytype" t.string "UKDA_summary" t.string "headline_report" end create_table "taggings", :force => true do |t| t.integer "tag_id" t.integer "taggable_id" t.integer "tagger_id" t.string "tagger_type" t.string "taggable_type" t.string "context" t.datetime "created_at" end add_index "taggings", ["tag_id"], :name => "index_taggings_on_tag_id" add_index "taggings", ["taggable_id", "taggable_type", "context"], :name => "index_taggings_on_taggable_id_and_taggable_type_and_context" create_table "tags", :force => true do |t| t.string "name" end create_table "topics", :force => true do |t| t.integer "forum_id" t.integer "user_id" t.string "title" t.datetime "created_at" t.datetime "updated_at" t.integer "hits", :default => 0 t.integer "sticky", :default => 0 t.integer "posts_count", :default => 0 t.datetime "replied_at" t.boolean "locked", :default => false t.integer "replied_by" t.integer "last_post_id" end add_index "topics", ["forum_id", "replied_at"], :name => "index_topics_on_forum_id_and_replied_at" add_index "topics", ["forum_id", "sticky", "replied_at"], :name => "index_topics_on_sticky_and_replied_at" add_index "topics", ["forum_id"], :name => "index_topics_on_forum_id" create_table "user_searches", :force => true do |t| t.integer "person_id" t.string "terms" t.datetime "created_at" t.datetime "updated_at" end create_table "users", :force => true do |t| t.string "login" t.string "email" t.string "crypted_password", :limit => 40 t.string "salt", :limit => 40 t.datetime "created_at" t.datetime "updated_at" t.string "remember_token" t.datetime "remember_token_expires_at" t.string "activation_code", :limit => 40 t.datetime "activated_at" t.integer "person_id" t.boolean "is_admin", :default => false t.boolean "can_edit_projects", :default => false t.boolean "can_edit_institutions", :default => false t.string "reset_password_code" t.datetime "reset_password_code_until" t.integer "posts_count", :default => 0 t.datetime "last_seen_at" t.boolean "dormant", :default => false end create_table "variable_linkages", :force => true do |t| t.integer "variable_link_id" t.integer "variable_id" t.datetime "created_at" t.datetime "updated_at" end create_table "variable_links", :force => true do |t| t.integer "person_id" t.string "description" t.datetime "created_at" t.datetime "updated_at" end create_table "variable_lists", :force => true do |t| t.integer "csvarchive_id" t.integer "variable_id" t.datetime "created_at" t.datetime "updated_at" end create_table "variables", :force => true do |t| t.string "name" t.string "value" t.datetime "created_at" t.datetime "updated_at" t.integer "dataset_id" t.string "label" t.integer "csvarchive_id" t.string "category" t.string "dertype" t.text "dermethod" t.text "info" t.string "document" t.string "page" end create_table "watched_variables", :force => true do |t| t.integer "person_id" t.integer "variable_id" t.datetime "created_at" t.datetime "updated_at" end create_table "work_groups", :force => true do |t| t.string "name" t.string "info" t.datetime "created_at" t.datetime "updated_at" t.integer "user_id" end end
Gem::Specification.new do |s| s.name = 'durable_rules' s.version = '0.34.09' s.summary = "for real time analytics (a Ruby Rules Engine)" s.description = "A lightweight library for real-time, consistent and scalable coordination of events." s.authors = ["Jesus Ruiz"] s.email = "jr3791@live.com" s.homepage = "https://www.github.com/jruizgit/rules" s.license = "MIT" s.require_paths = ["/librb"] s.files = [] s.extensions = Dir["src/rulesrb/extconf.rb"] s.files += Dir["src/rulesrb/*.{c}"] s.files += Dir["src/rules/*.{c,h}"] + Dir["src/rules/Makefile"] s.files += Dir["deps/hiredis/*.{c,h}"] - Dir["deps/hiredis/example*"] + Dir["deps/hiredis/COPYING"] + Dir["deps/hiredis/Makefile"] s.files += Dir["librb/*.rb"] s.files += %w(LICENSE Rakefile) s.add_development_dependency "rake", "10.4.2" s.add_development_dependency "rake-compiler", "~> 0.9.5" s.add_runtime_dependency "timers", "~> 4.0.1" s.add_runtime_dependency "sinatra", "~> 1.4.6" end bumping ruby version Gem::Specification.new do |s| s.name = 'durable_rules' s.version = '0.34.10' s.summary = "for real time analytics (a Ruby Rules Engine)" s.description = "A lightweight library for real-time, consistent and scalable coordination of events." s.authors = ["Jesus Ruiz"] s.email = "jr3791@live.com" s.homepage = "https://www.github.com/jruizgit/rules" s.license = "MIT" s.require_paths = ["/librb"] s.files = [] s.extensions = Dir["src/rulesrb/extconf.rb"] s.files += Dir["src/rulesrb/*.{c}"] s.files += Dir["src/rules/*.{c,h}"] + Dir["src/rules/Makefile"] s.files += Dir["deps/hiredis/*.{c,h}"] - Dir["deps/hiredis/example*"] + Dir["deps/hiredis/COPYING"] + Dir["deps/hiredis/Makefile"] s.files += Dir["librb/*.rb"] s.files += %w(LICENSE Rakefile) s.add_development_dependency "rake", "10.4.2" s.add_development_dependency "rake-compiler", "~> 0.9.5" s.add_runtime_dependency "timers", "~> 4.0.1" s.add_runtime_dependency "sinatra", "~> 1.4.6" end
# frozen_string_literal: true require_relative 'files_delta' require_relative 'gnu_zip' require_relative 'gnu_unzip' require_relative 'tar_reader' require_relative 'tar_writer' require_relative 'traffic_light' require_relative 'utf8_clean' require 'securerandom' require 'timeout' class TimeOutRunner def initialize(externals, id, files, manifest) @externals = externals @id = id @files = files @manifest = manifest @image_name = manifest['image_name'] @max_seconds = manifest['max_seconds'] end attr_reader :id, :files, :image_name, :max_seconds # - - - - - - - - - - - - - - - - - - - - - - def run_cyber_dojo_sh @result = {} run read_traffic_light_file set_traffic_light @result end private include FilesDelta include TrafficLight UID = 41966 # sandbox user - runs /sandbox/cyber-dojo.sh GID = 51966 # sandbox group - runs /sandbox/cyber-dojo.sh SANDBOX_DIR = '/sandbox' # where files are saved to in the container # not /home/sandbox; /sandbox is fast tmp-dir KB = 1024 MB = 1024 * KB GB = 1024 * MB MAX_FILE_SIZE = 50 * KB # of stdout, stderr, created, changed # - - - - - - - - - - - - - - - - - - - - - - def run # prepare the output pipe r_stdout, w_stdout = IO.pipe # prepare the input pipe files_in = sandboxed(files) files_in[unrooted(TEXT_FILENAMES_SH_PATH)] = text_filenames_sh files_in[unrooted(MAIN_SH_PATH)] = main_sh r_stdin, w_stdin = IO.pipe w_stdin.write(into_tgz(files_in)) w_stdin.close files_in.delete(unrooted(MAIN_SH_PATH)) files_in.delete(unrooted(TEXT_FILENAMES_SH_PATH)) stdout,timed_out = nil,nil command = docker_run_cyber_dojo_sh_command pid = Process.spawn(command, pgroup:true, in:r_stdin, out:w_stdout) begin Timeout::timeout(max_seconds) do # [Z] Process.waitpid(pid) timed_out = false end rescue Timeout::Error shell.exec(docker_stop_command) Process_kill_group(pid) Process_detach(pid) timed_out = true ensure w_stdout.close unless w_stdout.closed? stdout = r_stdout.read r_stdout.close end begin sss,files_out = *from_tgz(stdout) created,deleted,changed = *files_delta(files_in, files_out) rescue Zlib::GzipFile::Error sss = empty_sss created,deleted,changed = {},{},{} end @result['run_cyber_dojo_sh'] = { stdout: sss['stdout'], stderr: sss['stderr'], status: sss['status']['content'].to_i, timed_out: timed_out, created: unsandboxed(created), deleted: unsandboxed(deleted).keys.sort, changed: unsandboxed(changed) } end # - - - - - - - - - - - - - - - - - - - - - - def empty_sss { 'stdout' => packaged(''), 'stderr' => packaged(''), 'status' => { 'content' => '42' } } end # - - - - - - - - - - - - - - - - - - - - - - def into_tgz(files) writer = Tar::Writer.new(files) Gnu.zip(writer.tar_file) end def sandboxed(files) # 'hiker.cs' ==> 'sandbox/hiker.cs' files.keys.each_with_object({}) do |filename,h| h["#{unrooted(SANDBOX_DIR)}/#{filename}"] = files[filename] end end def unrooted(path) # Tar does not like absolute pathnames so strip leading / path[1..-1] end # - - - - - - - - - - - - - - - - - - - - - - def from_tgz(tgz) sss,sandbox = {},{} reader = Tar::Reader.new(Gnu.unzip(tgz)) reader.files.each do |filename,content| if %w( stdout stderr status ).include?(filename) sss[filename] = packaged(content) else sandbox[filename] = packaged(content) end end [ sss, sandbox ] end def unsandboxed(files) # 'sandbox/hiker.cs' ==> 'hiker.cs' files.keys.each_with_object({}) do |filename,h| h[filename[SANDBOX_DIR.size..-1]] = files[filename] end end # - - - - - - - - - - - - - - - - - - - - - - TEXT_FILENAMES_SH_PATH = '/tmp/text_filenames.sh' def text_filenames_sh # [X] truncate,file # grep -q is --quiet, we are generating text file names. # grep -v is --invert-match # file incorrectly reports very small files as binary. # tar does not like absolute pathnames so strip leading / <<~SHELL.strip text_filenames() { find #{SANDBOX_DIR} -type f -exec \\ bash -c "is_truncated_text_file {} && unrooted {}" \\; } is_truncated_text_file() { if file --mime-encoding ${1} | grep -qv "${1}:\\sbinary"; then truncate_dont_extend "${1}" true elif [ $(stat -c%s "${1}") -lt 2 ]; then true else false fi } truncate_dont_extend() { if [ $(stat -c%s "${1}") -gt #{MAX_FILE_SIZE} ]; then truncate --size #{MAX_FILE_SIZE+1} "${1}" # [Y] else touch "${1}" fi } unrooted() { echo "${1:1}" } export -f truncate_dont_extend export -f is_truncated_text_file export -f unrooted SHELL end # - - - - - - - - - - - - - - - - - - - - - - MAIN_SH_PATH = '/tmp/main.sh' def main_sh # [X] truncate,file # 1st tar: -C TMP_DIR so stdout/stderr/status are not pathed # 2nd tar: -C / so sandbox files are pathed <<~SHELL.strip source #{TEXT_FILENAMES_SH_PATH} TMP_DIR=$(mktemp -d /tmp/XXXXXX) STDOUT=stdout STDERR=stderr STATUS=status cd #{SANDBOX_DIR} bash ./cyber-dojo.sh \ > "${TMP_DIR}/${STDOUT}" \ 2> "${TMP_DIR}/${STDERR}" echo $? > "${TMP_DIR}/${STATUS}" truncate_dont_extend "${TMP_DIR}/${STDOUT}" truncate_dont_extend "${TMP_DIR}/${STDERR}" TAR_FILE="${TMP_DIR}/cyber-dojo.tar" tar -rf "${TAR_FILE}" -C "${TMP_DIR}" "${STDOUT}" "${STDERR}" "${STATUS}" text_filenames | tar -C / -rf ${TAR_FILE} -T - gzip -c "${TAR_FILE}" SHELL end # - - - - - - - - - - - - - - - - - - - - - - def docker_stop_command "docker stop --time 1 #{container_name}" end def docker_run_cyber_dojo_sh_command # Assumes a tgz of files on stdin. Untars this into the # /sandbox/ dir in the container and runs /sandbox/cyber-dojo.sh # [1] For clang/clang++'s -fsanitize=address # [2] Makes container removal much faster <<~SHELL.strip docker run \ --cap-add=SYS_PTRACE `# [1]` \ #{env_vars(image_name, id)} \ --init `# [2]` \ --interactive \ --name=#{container_name} \ #{TMP_FS_SANDBOX_DIR} \ #{TMP_FS_TMP_DIR} \ --rm \ --user=#{UID}:#{GID} `# [X]` \ #{ulimits(image_name)} \ #{image_name} \ bash -c ' \ tar -C / -zxf - \ && \ bash #{MAIN_SH_PATH}' SHELL end # - - - - - - - - - - - - - - - - - - - - - - def read_traffic_light_file docker_cat_rag_file = <<~SHELL.strip docker run \ --entrypoint=cat \ --rm \ --user=#{UID}:#{GID} \ #{image_name} \ /usr/local/bin/red_amber_green.rb SHELL stdout,stderr,status = shell.exec(docker_cat_rag_file) if status === 0 rag_src = stdout else @result['diagnostic'] = { 'stderr' => stderr } rag_src = nil end @result['rag_src'] = rag_src end # - - - - - - - - - - - - - - - - - - - - - - # container # - - - - - - - - - - - - - - - - - - - - - - def container_name # Add a random-id to the container name. A container-name # based on _only_ the id will fail when a container with # that id exists and is alive. Easily possible in tests. @container_name ||= ['cyber_dojo_runner', id, random_id].join('_') end def random_id HEX_DIGITS.shuffle[0,8].join end HEX_DIGITS = [*('a'..'z'),*('A'..'Z'),*('0'..'9')] # - - - - - - - - - - - - - - - - - - - - - - def env_vars(image_name, id) [ env_var('IMAGE_NAME', image_name), env_var('ID', id), env_var('SANDBOX', SANDBOX_DIR) ].join(SPACE) end # - - - - - - - - - - - - - - - - - - - - - - def env_var(name, value) # value must not contain single-quotes "--env CYBER_DOJO_#{name}='#{value}'" end # - - - - - - - - - - - - - - - - - - - - - - TMP_FS_SANDBOX_DIR = "--tmpfs #{SANDBOX_DIR}:" + 'exec,' + # [1] 'size=50M,' + # [2] "uid=#{UID}," + # [3] "gid=#{GID}" # [3] # Making the sandbox dir a tmpfs should improve speed. # By default, tmp-fs's are setup as secure mountpoints. # If you use only '--tmpfs #{SANDBOX_DIR}' # then a [cat /etc/mtab] will reveal something like # "tmpfs /sandbox tmpfs rw,nosuid,nodev,noexec,relatime,size=10240k 0 0" # o) rw = Mount the filesystem read-write. # o) nosuid = Do not allow set-user-identifier or # set-group-identifier bits to take effect. # o) nodev = Do not interpret character or block special devices. # o) noexec = Do not allow direct execution of any binaries. # o) relatime = Update inode access times relative to modify/change time. # So... # [1] set exec to make binaries and scripts executable. # [2] limit size of tmp-fs. # [3] set ownership [X] TMP_FS_TMP_DIR = '--tmpfs /tmp:exec,size=50M,mode=1777' # Set /tmp sticky-bit # - - - - - - - - - - - - - - - - - - - - - - def ulimits(image_name) # There is no cpu-ulimit... a cpu-ulimit of 10 # seconds could kill a container after only 5 # seconds... The cpu-ulimit assumes one core. # The host system running the docker container # can have multiple cores or use hyperthreading. # So a piece of code running on 2 cores, both 100% # utilized could be killed after 5 seconds. options = [ ulimit('core' , 0 ), # core file size ulimit('fsize' , 16*MB), # file size ulimit('locks' , 128 ), # number of file locks ulimit('nofile', 256 ), # number of files ulimit('nproc' , 128 ), # number of processes ulimit('stack' , 8*MB), # stack size '--memory=512m', # max 512MB ram '--net=none', # no network '--pids-limit=128', # no fork bombs '--security-opt=no-new-privileges', # no escalation ] unless clang?(image_name) # [ulimit data] prevents clang's -fsanitize=address option. options << ulimit('data', 4*GB) # data segment size end options.join(SPACE) end # - - - - - - - - - - - - - - - - - - - - - - def ulimit(name, limit) "--ulimit #{name}=#{limit}" end # - - - - - - - - - - - - - - - - - - - - - - def clang?(image_name) image_name.start_with?('cyberdojofoundation/clang') end # - - - - - - - - - - - - - - - - - - - - - - # process helpers # - - - - - - - - - - - - - - - - - - - - - - KILL_SIGNAL = 9 def Process_kill_group(pid) # The [docker run] process running on the _host_ is # killed by this Process.kill. This does _not_ kill the # cyber-dojo.sh process running _inside_ the docker # container. The container is killed by the docker-daemon # via [docker run]'s --rm option. Process.kill(-KILL_SIGNAL, pid) # -ve means kill process-group rescue Errno::ESRCH # There may no longer be a process at pid (timeout race). # If not, you get an exception Errno::ESRCH: No such process end # - - - - - - - - - - - - - - - - - - - - - - def Process_detach(pid) # Prevents zombie child-process. Don't wait for detach status. Process.detach(pid) # There may no longer be a process at pid (timeout race). # If not, you don't get an exception. end # - - - - - - - - - - - - - - - - - - - - - - # file content helpers # - - - - - - - - - - - - - - - - - - - - - - def packaged(raw_content) content = Utf8.clean(raw_content) { 'content' => truncated(content), 'truncated' => truncated?(content) } end def truncated(content) content[0...MAX_FILE_SIZE] end def truncated?(content) content.size > MAX_FILE_SIZE end # - - - - - - - - - - - - - - - - - - - - - - # externals # - - - - - - - - - - - - - - - - - - - - - - def shell @externals.shell end SPACE = ' ' end #--------------------------------------------------------------- # Notes # # [X] Assumes image_name was built by image_builder with a # Dockerfile augmented by image_dockerfile_augmenter. # https://github.com/cyber-dojo-tools/image_builder # https://github.com/cyber-dojo-tools/image_dockerfile_augmenter # # [Y] Truncate to MAX_FILE_SIZE+1 so truncated?() can detect # and lop off the final extra byte. # # [Z] If image_name is not present on the node, docker will # attempt to pull it. The browser's kata/run_tests ajax # call can timeout before the pull completes; this browser # timeout is different to the Runner.run() call timing out. # # Approval-style test-frameworks compare actual-text against # expected-text held inside a 'golden-master' file and, if the # comparison fails, generate a file holding the actual-text # ready for human inspection. cyber-dojo supports this by # scanning for text files (generated inside the container) # under /sandbox after cyber-dojo.sh has run. # # cyber-dojo.sh's stdout/stderr are now captured inside main.sh # This means if run() times out before cyber-dojo.sh completes # then (currently) STDOUT/STDERR won't be catted and hence no info # will get back to the client (except timed_out=true). # # I tried limiting the size of stdout/stderr "in-place" using... # bash ./cyber-dojo.sh \ # > >(head -c$((50*1024+1)) > "${TMP_DIR}/stdout") \ # 2> >(head -c$((50*1024+1)) > "${TMP_DIR}/stderr") # It seems a head in a pipe can cause problems! Tests failed. # See https://stackoverflow.com/questions/26461014 # There is already a ulimit on files. Make script contents constants # frozen_string_literal: true require_relative 'files_delta' require_relative 'gnu_zip' require_relative 'gnu_unzip' require_relative 'tar_reader' require_relative 'tar_writer' require_relative 'traffic_light' require_relative 'utf8_clean' require 'securerandom' require 'timeout' class TimeOutRunner def initialize(externals, id, files, manifest) @externals = externals @id = id @files = files @manifest = manifest @image_name = manifest['image_name'] @max_seconds = manifest['max_seconds'] end attr_reader :id, :files, :image_name, :max_seconds # - - - - - - - - - - - - - - - - - - - - - - def run_cyber_dojo_sh @result = {} run read_traffic_light_file set_traffic_light @result end private include FilesDelta include TrafficLight UID = 41966 # sandbox user - runs /sandbox/cyber-dojo.sh GID = 51966 # sandbox group - runs /sandbox/cyber-dojo.sh SANDBOX_DIR = '/sandbox' # where files are saved to in the container # not /home/sandbox; /sandbox is fast tmp-dir KB = 1024 MB = 1024 * KB GB = 1024 * MB MAX_FILE_SIZE = 50 * KB # of stdout, stderr, created, changed # - - - - - - - - - - - - - - - - - - - - - - def run # prepare the output pipe r_stdout, w_stdout = IO.pipe # prepare the input pipe files_in = sandboxed(files) files_in[unrooted(TEXT_FILENAMES_SH_PATH)] = TEXT_FILENAMES_SH files_in[unrooted(MAIN_SH_PATH)] = MAIN_SH r_stdin, w_stdin = IO.pipe w_stdin.write(into_tgz(files_in)) w_stdin.close files_in.delete(unrooted(MAIN_SH_PATH)) files_in.delete(unrooted(TEXT_FILENAMES_SH_PATH)) stdout,timed_out = nil,nil command = docker_run_cyber_dojo_sh_command pid = Process.spawn(command, pgroup:true, in:r_stdin, out:w_stdout) begin Timeout::timeout(max_seconds) do # [Z] Process.waitpid(pid) timed_out = false end rescue Timeout::Error shell.exec(docker_stop_command) Process_kill_group(pid) Process_detach(pid) timed_out = true ensure w_stdout.close unless w_stdout.closed? stdout = r_stdout.read r_stdout.close end begin sss,files_out = *from_tgz(stdout) created,deleted,changed = *files_delta(files_in, files_out) rescue Zlib::GzipFile::Error sss = empty_sss created,deleted,changed = {},{},{} end @result['run_cyber_dojo_sh'] = { stdout: sss['stdout'], stderr: sss['stderr'], status: sss['status']['content'].to_i, timed_out: timed_out, created: unsandboxed(created), deleted: unsandboxed(deleted).keys.sort, changed: unsandboxed(changed) } end # - - - - - - - - - - - - - - - - - - - - - - def empty_sss { 'stdout' => packaged(''), 'stderr' => packaged(''), 'status' => { 'content' => '42' } } end # - - - - - - - - - - - - - - - - - - - - - - def into_tgz(files) writer = Tar::Writer.new(files) Gnu.zip(writer.tar_file) end def sandboxed(files) # 'hiker.cs' ==> 'sandbox/hiker.cs' files.keys.each_with_object({}) do |filename,h| h["#{unrooted(SANDBOX_DIR)}/#{filename}"] = files[filename] end end def unrooted(path) # Tar does not like absolute pathnames so strip leading / path[1..-1] end # - - - - - - - - - - - - - - - - - - - - - - def from_tgz(tgz) sss,sandbox = {},{} reader = Tar::Reader.new(Gnu.unzip(tgz)) reader.files.each do |filename,content| if %w( stdout stderr status ).include?(filename) sss[filename] = packaged(content) else sandbox[filename] = packaged(content) end end [ sss, sandbox ] end def unsandboxed(files) # 'sandbox/hiker.cs' ==> 'hiker.cs' files.keys.each_with_object({}) do |filename,h| h[filename[SANDBOX_DIR.size..-1]] = files[filename] end end # - - - - - - - - - - - - - - - - - - - - - - TEXT_FILENAMES_SH_PATH = '/tmp/text_filenames.sh' # [X] truncate,file # grep -q is --quiet, we are generating text file names. # grep -v is --invert-match # file incorrectly reports very small files as binary. # tar does not like absolute pathnames so strip leading / TEXT_FILENAMES_SH = <<~SHELL.strip text_filenames() { find #{SANDBOX_DIR} -type f -exec \\ bash -c "is_truncated_text_file {} && unrooted {}" \\; } is_truncated_text_file() { if file --mime-encoding ${1} | grep -qv "${1}:\\sbinary"; then truncate_dont_extend "${1}" true elif [ $(stat -c%s "${1}") -lt 2 ]; then true else false fi } truncate_dont_extend() { if [ $(stat -c%s "${1}") -gt #{MAX_FILE_SIZE} ]; then truncate --size #{MAX_FILE_SIZE+1} "${1}" # [Y] else touch "${1}" fi } unrooted() { echo "${1:1}" } export -f truncate_dont_extend export -f is_truncated_text_file export -f unrooted SHELL # - - - - - - - - - - - - - - - - - - - - - - MAIN_SH_PATH = '/tmp/main.sh' # [X] truncate,file # 1st tar: -C TMP_DIR so stdout/stderr/status are not pathed # 2nd tar: -C / so sandbox files are pathed MAIN_SH = <<~SHELL.strip source #{TEXT_FILENAMES_SH_PATH} TMP_DIR=$(mktemp -d /tmp/XXXXXX) STDOUT=stdout STDERR=stderr STATUS=status cd #{SANDBOX_DIR} bash ./cyber-dojo.sh \ > "${TMP_DIR}/${STDOUT}" \ 2> "${TMP_DIR}/${STDERR}" echo $? > "${TMP_DIR}/${STATUS}" truncate_dont_extend "${TMP_DIR}/${STDOUT}" truncate_dont_extend "${TMP_DIR}/${STDERR}" TAR_FILE="${TMP_DIR}/cyber-dojo.tar" tar -rf "${TAR_FILE}" -C "${TMP_DIR}" "${STDOUT}" "${STDERR}" "${STATUS}" text_filenames | tar -C / -rf ${TAR_FILE} -T - gzip -c "${TAR_FILE}" SHELL # - - - - - - - - - - - - - - - - - - - - - - def docker_stop_command "docker stop --time 1 #{container_name}" end def docker_run_cyber_dojo_sh_command # Assumes a tgz of files on stdin. Untars this into the # /sandbox/ dir in the container and runs /sandbox/cyber-dojo.sh # [1] For clang/clang++'s -fsanitize=address # [2] Makes container removal much faster <<~SHELL.strip docker run \ --cap-add=SYS_PTRACE `# [1]` \ #{env_vars(image_name, id)} \ --init `# [2]` \ --interactive \ --name=#{container_name} \ #{TMP_FS_SANDBOX_DIR} \ #{TMP_FS_TMP_DIR} \ --rm \ --user=#{UID}:#{GID} `# [X]` \ #{ulimits(image_name)} \ #{image_name} \ bash -c ' \ tar -C / -zxf - \ && \ bash #{MAIN_SH_PATH}' SHELL end # - - - - - - - - - - - - - - - - - - - - - - def read_traffic_light_file docker_cat_rag_file = <<~SHELL.strip docker run \ --entrypoint=cat \ --rm \ --user=#{UID}:#{GID} \ #{image_name} \ /usr/local/bin/red_amber_green.rb SHELL stdout,stderr,status = shell.exec(docker_cat_rag_file) if status === 0 rag_src = stdout else @result['diagnostic'] = { 'stderr' => stderr } rag_src = nil end @result['rag_src'] = rag_src end # - - - - - - - - - - - - - - - - - - - - - - # container # - - - - - - - - - - - - - - - - - - - - - - def container_name # Add a random-id to the container name. A container-name # based on _only_ the id will fail when a container with # that id exists and is alive. Easily possible in tests. @container_name ||= ['cyber_dojo_runner', id, random_id].join('_') end def random_id HEX_DIGITS.shuffle[0,8].join end HEX_DIGITS = [*('a'..'z'),*('A'..'Z'),*('0'..'9')] # - - - - - - - - - - - - - - - - - - - - - - def env_vars(image_name, id) [ env_var('IMAGE_NAME', image_name), env_var('ID', id), env_var('SANDBOX', SANDBOX_DIR) ].join(SPACE) end # - - - - - - - - - - - - - - - - - - - - - - def env_var(name, value) # value must not contain single-quotes "--env CYBER_DOJO_#{name}='#{value}'" end # - - - - - - - - - - - - - - - - - - - - - - TMP_FS_SANDBOX_DIR = "--tmpfs #{SANDBOX_DIR}:" + 'exec,' + # [1] 'size=50M,' + # [2] "uid=#{UID}," + # [3] "gid=#{GID}" # [3] # Making the sandbox dir a tmpfs should improve speed. # By default, tmp-fs's are setup as secure mountpoints. # If you use only '--tmpfs #{SANDBOX_DIR}' # then a [cat /etc/mtab] will reveal something like # "tmpfs /sandbox tmpfs rw,nosuid,nodev,noexec,relatime,size=10240k 0 0" # o) rw = Mount the filesystem read-write. # o) nosuid = Do not allow set-user-identifier or # set-group-identifier bits to take effect. # o) nodev = Do not interpret character or block special devices. # o) noexec = Do not allow direct execution of any binaries. # o) relatime = Update inode access times relative to modify/change time. # So... # [1] set exec to make binaries and scripts executable. # [2] limit size of tmp-fs. # [3] set ownership [X] TMP_FS_TMP_DIR = '--tmpfs /tmp:exec,size=50M,mode=1777' # Set /tmp sticky-bit # - - - - - - - - - - - - - - - - - - - - - - def ulimits(image_name) # There is no cpu-ulimit... a cpu-ulimit of 10 # seconds could kill a container after only 5 # seconds... The cpu-ulimit assumes one core. # The host system running the docker container # can have multiple cores or use hyperthreading. # So a piece of code running on 2 cores, both 100% # utilized could be killed after 5 seconds. options = [ ulimit('core' , 0 ), # core file size ulimit('fsize' , 16*MB), # file size ulimit('locks' , 128 ), # number of file locks ulimit('nofile', 256 ), # number of files ulimit('nproc' , 128 ), # number of processes ulimit('stack' , 8*MB), # stack size '--memory=512m', # max 512MB ram '--net=none', # no network '--pids-limit=128', # no fork bombs '--security-opt=no-new-privileges', # no escalation ] unless clang?(image_name) # [ulimit data] prevents clang's -fsanitize=address option. options << ulimit('data', 4*GB) # data segment size end options.join(SPACE) end # - - - - - - - - - - - - - - - - - - - - - - def ulimit(name, limit) "--ulimit #{name}=#{limit}" end # - - - - - - - - - - - - - - - - - - - - - - def clang?(image_name) image_name.start_with?('cyberdojofoundation/clang') end # - - - - - - - - - - - - - - - - - - - - - - # process helpers # - - - - - - - - - - - - - - - - - - - - - - KILL_SIGNAL = 9 def Process_kill_group(pid) # The [docker run] process running on the _host_ is # killed by this Process.kill. This does _not_ kill the # cyber-dojo.sh process running _inside_ the docker # container. The container is killed by the docker-daemon # via [docker run]'s --rm option. Process.kill(-KILL_SIGNAL, pid) # -ve means kill process-group rescue Errno::ESRCH # There may no longer be a process at pid (timeout race). # If not, you get an exception Errno::ESRCH: No such process end # - - - - - - - - - - - - - - - - - - - - - - def Process_detach(pid) # Prevents zombie child-process. Don't wait for detach status. Process.detach(pid) # There may no longer be a process at pid (timeout race). # If not, you don't get an exception. end # - - - - - - - - - - - - - - - - - - - - - - # file content helpers # - - - - - - - - - - - - - - - - - - - - - - def packaged(raw_content) content = Utf8.clean(raw_content) { 'content' => truncated(content), 'truncated' => truncated?(content) } end def truncated(content) content[0...MAX_FILE_SIZE] end def truncated?(content) content.size > MAX_FILE_SIZE end # - - - - - - - - - - - - - - - - - - - - - - # externals # - - - - - - - - - - - - - - - - - - - - - - def shell @externals.shell end SPACE = ' ' end #--------------------------------------------------------------- # Notes # # [X] Assumes image_name was built by image_builder with a # Dockerfile augmented by image_dockerfile_augmenter. # https://github.com/cyber-dojo-tools/image_builder # https://github.com/cyber-dojo-tools/image_dockerfile_augmenter # # [Y] Truncate to MAX_FILE_SIZE+1 so truncated?() can detect # and lop off the final extra byte. # # [Z] If image_name is not present on the node, docker will # attempt to pull it. The browser's kata/run_tests ajax # call can timeout before the pull completes; this browser # timeout is different to the Runner.run() call timing out. # # Approval-style test-frameworks compare actual-text against # expected-text held inside a 'golden-master' file and, if the # comparison fails, generate a file holding the actual-text # ready for human inspection. cyber-dojo supports this by # scanning for text files (generated inside the container) # under /sandbox after cyber-dojo.sh has run. # # cyber-dojo.sh's stdout/stderr are now captured inside main.sh # This means if run() times out before cyber-dojo.sh completes # then (currently) STDOUT/STDERR won't be catted and hence no info # will get back to the client (except timed_out=true). # # I tried limiting the size of stdout/stderr "in-place" using... # bash ./cyber-dojo.sh \ # > >(head -c$((50*1024+1)) > "${TMP_DIR}/stdout") \ # 2> >(head -c$((50*1024+1)) > "${TMP_DIR}/stderr") # It seems a head in a pipe can cause problems! Tests failed. # See https://stackoverflow.com/questions/26461014 # There is already a ulimit on files.
require 'spec_helper' module RubyEventStore module Outbox RSpec.describe Consumer, db: true do include SchemaHelper let(:redis_url) { ENV["REDIS_URL"] } let(:database_url) { ENV["DATABASE_URL"] } let(:redis) { Redis.new(url: redis_url) } let(:logger_output) { StringIO.new } let(:logger) { Logger.new(logger_output) } let(:default_configuration) { Consumer::Configuration.new(database_url: database_url, redis_url: redis_url, split_keys: ["default", "default2"], message_format: SIDEKIQ5_FORMAT, batch_size: 100) } let(:metrics) { Metrics::Null.new } before(:each) do redis.flushdb end specify "updates enqueued_at" do record = create_record("default", "default") consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) consumer.one_loop record.reload expect(record.enqueued_at).to be_present end specify "push the jobs to sidekiq" do record = create_record("default", "default") clock = TickingClock.new consumer = Consumer.new(default_configuration, clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop record.reload expect(redis.llen("queue:default")).to eq(1) payload_in_redis = JSON.parse(redis.lindex("queue:default", 0)) expect(payload_in_redis).to include(JSON.parse(record.payload)) expect(payload_in_redis["enqueued_at"]).to eq(clock.tick(1).to_f) expect(record.enqueued_at).to eq(clock.tick(1)) expect(result).to eq(true) expect(logger_output.string).to include("Sent 1 messages from outbox table") end specify "push multiple jobs to different queues" do record1 = create_record("default", "default") record2 = create_record("default", "default") record3 = create_record("default2", "default2") clock = TickingClock.new consumer = Consumer.new(default_configuration, clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop expect(redis.llen("queue:default")).to eq(2) expect(redis.llen("queue:default2")).to eq(1) end specify "initiating consumer ensures that queues exist" do consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) consumer.init expect(redis.scard("queues")).to eq(2) expect(redis.smembers("queues")).to match_array(["default", "default2"]) expect(logger_output.string).to include("Initiated RubyEventStore::Outbox v#{RubyEventStore::Outbox::VERSION}") end specify "returns false if no records" do consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) result = consumer.one_loop expect(result).to eq(false) end specify "returns false if didnt aquire lock" do record = create_record("default", "default") consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) clock = TickingClock.new Lock.obtain("default", "some-other-process-uuid", clock: clock) result = consumer.one_loop expect(result).to eq(false) expect(redis.llen("queue:default")).to eq(0) end specify "already processed should be ignored" do record = create_record("default", "default") record.update!(enqueued_at: Time.now.utc) consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) result = consumer.one_loop expect(result).to eq(false) expect(redis.llen("queue:default")).to eq(0) expect(logger_output.string).to be_empty end specify "other format should be ignored" do record = create_record("default", "default", format: "some_unknown_format") consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) result = consumer.one_loop expect(result).to eq(false) expect(redis.llen("queue:default")).to eq(0) expect(logger_output.string).to be_empty end specify "records from other split keys should be ignored" do record = create_record("other_one", "other_one") consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) result = consumer.one_loop expect(result).to eq(false) expect(redis.llen("queue:other_one")).to eq(0) expect(logger_output.string).to be_empty end xspecify "all split keys should be taken if split_keys is nil" do payload = { class: "SomeAsyncHandler", queue: "default", created_at: Time.now.utc, jid: SecureRandom.hex(12), retry: true, args: [{ event_id: "83c3187f-84f6-4da7-8206-73af5aca7cc8", event_type: "RubyEventStore::Event", data: "--- {}\n", metadata: "---\n:timestamp: 2019-09-30 00:00:00.000000000 Z\n", }], } record = Record.create!( split_key: "default", created_at: Time.now.utc, format: "sidekiq5", enqueued_at: nil, payload: payload.to_json, ) consumer = Consumer.new(default_configuration.with(split_keys: nil), logger: logger, metrics: metrics) result = consumer.one_loop expect(result).to eq(true) expect(redis.llen("queue:default")).to eq(1) end specify "#run wait if nothing was changed" do consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) expect(consumer).to receive(:one_loop).and_return(false).ordered expect(consumer).to receive(:one_loop).and_raise("End infinite loop").ordered allow(consumer).to receive(:sleep) expect do consumer.run end.to raise_error("End infinite loop") expect(consumer).to have_received(:sleep).with(0.1) end specify "#run doesnt wait if something changed" do consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) expect(consumer).to receive(:one_loop).and_return(true).ordered expect(consumer).to receive(:one_loop).and_raise("End infinite loop").ordered allow(consumer).to receive(:sleep) expect do consumer.run end.to raise_error("End infinite loop") expect(consumer).not_to have_received(:sleep) end specify "incorrect payload wont cause later messages to schedule" do record1 = create_record("default", "default") record1.update!(payload: "unparsable garbage") record2 = create_record("default", "default") clock = TickingClock.new consumer = Consumer.new(default_configuration, clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop expect(result).to eq(true) expect(record1.reload.enqueued_at).to be_nil expect(record2.reload.enqueued_at).to be_present expect(redis.llen("queue:default")).to eq(1) expect(logger_output.string).to include("JSON::ParserError") end specify "deadlock when obtaining lock just skip that attempt" do expect(Lock).to receive(:lock).and_raise(ActiveRecord::Deadlocked) clock = TickingClock.new consumer = Consumer.new(default_configuration.with(split_keys: ["default"]), clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop expect(logger_output.string).to match(/Obtaining lock .* failed \(deadlock\)/) expect(result).to eq(false) expect(redis.llen("queue:default")).to eq(0) end specify "lock timeout when obtaining lock just skip that attempt" do expect(Lock).to receive(:lock).and_raise(ActiveRecord::LockWaitTimeout) clock = TickingClock.new consumer = Consumer.new(default_configuration.with(split_keys: ["default"]), clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop expect(logger_output.string).to match(/Obtaining lock .* failed \(lock timeout\)/) expect(result).to eq(false) expect(redis.llen("queue:default")).to eq(0) end specify "obtaining taken lock just skip that attempt" do clock = TickingClock.new Lock.obtain("default", "other-process-uuid", clock: clock) consumer = Consumer.new(default_configuration.with(split_keys: ["default"]), clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop expect(logger_output.string).to match(/Obtaining lock .* unsuccessful \(taken\)/) expect(result).to eq(false) expect(redis.llen("queue:default")).to eq(0) end specify "deadlock when releasing lock doesnt do anything" do record = create_record("default", "default") allow(Lock).to receive(:lock).and_wrap_original do |m, *args| if caller.any? {|l| l.include? "`release'"} raise ActiveRecord::Deadlocked else m.call(*args) end end clock = TickingClock.new consumer = Consumer.new(default_configuration.with(split_keys: ["default"]), clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop expect(logger_output.string).to match(/Releasing lock .* failed \(deadlock\)/) expect(result).to eq(true) expect(redis.llen("queue:default")).to eq(1) end specify "lock timeout when releasing lock doesnt do anything" do record = create_record("default", "default") allow(Lock).to receive(:lock).and_wrap_original do |m, *args| if caller.any? {|l| l.include? "`release'"} raise ActiveRecord::LockWaitTimeout else m.call(*args) end end clock = TickingClock.new consumer = Consumer.new(default_configuration.with(split_keys: ["default"]), clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop expect(logger_output.string).to match(/Releasing lock .* failed \(lock timeout\)/) expect(result).to eq(true) expect(redis.llen("queue:default")).to eq(1) end specify "after successful loop, lock is released" do record = create_record("default", "default") clock = TickingClock.new consumer = Consumer.new(default_configuration, clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop lock = Lock.find_by!(split_key: "default") expect(lock.locked_by).to be_nil expect(lock.locked_at).to be_nil end def create_record(queue, split_key, format: "sidekiq5") payload = { class: "SomeAsyncHandler", queue: queue, created_at: Time.now.utc, jid: SecureRandom.hex(12), retry: true, args: [{ event_id: "83c3187f-84f6-4da7-8206-73af5aca7cc8", event_type: "RubyEventStore::Event", data: "--- {}\n", metadata: "---\n:timestamp: 2019-09-30 00:00:00.000000000 Z\n", }], } record = Record.create!( split_key: split_key, created_at: Time.now.utc, format: format, enqueued_at: nil, payload: payload.to_json ) end end end end Add tests require 'spec_helper' module RubyEventStore module Outbox RSpec.describe Consumer, db: true do include SchemaHelper let(:redis_url) { ENV["REDIS_URL"] } let(:database_url) { ENV["DATABASE_URL"] } let(:redis) { Redis.new(url: redis_url) } let(:logger_output) { StringIO.new } let(:logger) { Logger.new(logger_output) } let(:default_configuration) { Consumer::Configuration.new(database_url: database_url, redis_url: redis_url, split_keys: ["default", "default2"], message_format: SIDEKIQ5_FORMAT, batch_size: 100) } let(:metrics) { Metrics::Null.new } before(:each) do redis.flushdb end specify "updates enqueued_at" do record = create_record("default", "default") consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) consumer.one_loop record.reload expect(record.enqueued_at).to be_present end specify "push the jobs to sidekiq" do record = create_record("default", "default") clock = TickingClock.new consumer = Consumer.new(default_configuration, clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop record.reload expect(redis.llen("queue:default")).to eq(1) payload_in_redis = JSON.parse(redis.lindex("queue:default", 0)) expect(payload_in_redis).to include(JSON.parse(record.payload)) expect(payload_in_redis["enqueued_at"]).to eq(clock.tick(1).to_f) expect(record.enqueued_at).to eq(clock.tick(1)) expect(result).to eq(true) expect(logger_output.string).to include("Sent 1 messages from outbox table") end specify "push multiple jobs to different queues" do record1 = create_record("default", "default") record2 = create_record("default", "default") record3 = create_record("default2", "default2") clock = TickingClock.new consumer = Consumer.new(default_configuration, clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop expect(redis.llen("queue:default")).to eq(2) expect(redis.llen("queue:default2")).to eq(1) end specify "initiating consumer ensures that queues exist" do consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) consumer.init expect(redis.scard("queues")).to eq(2) expect(redis.smembers("queues")).to match_array(["default", "default2"]) expect(logger_output.string).to include("Initiated RubyEventStore::Outbox v#{RubyEventStore::Outbox::VERSION}") end specify "returns false if no records" do consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) result = consumer.one_loop expect(result).to eq(false) end specify "returns false if didnt aquire lock" do record = create_record("default", "default") consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) clock = TickingClock.new Lock.obtain("default", "some-other-process-uuid", clock: clock) result = consumer.one_loop expect(result).to eq(false) expect(redis.llen("queue:default")).to eq(0) end specify "already processed should be ignored" do record = create_record("default", "default") record.update!(enqueued_at: Time.now.utc) consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) result = consumer.one_loop expect(result).to eq(false) expect(redis.llen("queue:default")).to eq(0) expect(logger_output.string).to be_empty end specify "other format should be ignored" do record = create_record("default", "default", format: "some_unknown_format") consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) result = consumer.one_loop expect(result).to eq(false) expect(redis.llen("queue:default")).to eq(0) expect(logger_output.string).to be_empty end specify "records from other split keys should be ignored" do record = create_record("other_one", "other_one") consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) result = consumer.one_loop expect(result).to eq(false) expect(redis.llen("queue:other_one")).to eq(0) expect(logger_output.string).to be_empty end xspecify "all split keys should be taken if split_keys is nil" do payload = { class: "SomeAsyncHandler", queue: "default", created_at: Time.now.utc, jid: SecureRandom.hex(12), retry: true, args: [{ event_id: "83c3187f-84f6-4da7-8206-73af5aca7cc8", event_type: "RubyEventStore::Event", data: "--- {}\n", metadata: "---\n:timestamp: 2019-09-30 00:00:00.000000000 Z\n", }], } record = Record.create!( split_key: "default", created_at: Time.now.utc, format: "sidekiq5", enqueued_at: nil, payload: payload.to_json, ) consumer = Consumer.new(default_configuration.with(split_keys: nil), logger: logger, metrics: metrics) result = consumer.one_loop expect(result).to eq(true) expect(redis.llen("queue:default")).to eq(1) end specify "#run wait if nothing was changed" do consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) expect(consumer).to receive(:one_loop).and_return(false).ordered expect(consumer).to receive(:one_loop).and_raise("End infinite loop").ordered allow(consumer).to receive(:sleep) expect do consumer.run end.to raise_error("End infinite loop") expect(consumer).to have_received(:sleep).with(0.1) end specify "#run doesnt wait if something changed" do consumer = Consumer.new(default_configuration, logger: logger, metrics: metrics) expect(consumer).to receive(:one_loop).and_return(true).ordered expect(consumer).to receive(:one_loop).and_raise("End infinite loop").ordered allow(consumer).to receive(:sleep) expect do consumer.run end.to raise_error("End infinite loop") expect(consumer).not_to have_received(:sleep) end specify "incorrect payload wont cause later messages to schedule" do record1 = create_record("default", "default") record1.update!(payload: "unparsable garbage") record2 = create_record("default", "default") clock = TickingClock.new consumer = Consumer.new(default_configuration, clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop expect(result).to eq(true) expect(record1.reload.enqueued_at).to be_nil expect(record2.reload.enqueued_at).to be_present expect(redis.llen("queue:default")).to eq(1) expect(logger_output.string).to include("JSON::ParserError") end specify "deadlock when obtaining lock just skip that attempt" do expect(Lock).to receive(:lock).and_raise(ActiveRecord::Deadlocked) clock = TickingClock.new consumer = Consumer.new(default_configuration.with(split_keys: ["default"]), clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop expect(logger_output.string).to match(/Obtaining lock .* failed \(deadlock\)/) expect(result).to eq(false) expect(redis.llen("queue:default")).to eq(0) end specify "lock timeout when obtaining lock just skip that attempt" do expect(Lock).to receive(:lock).and_raise(ActiveRecord::LockWaitTimeout) clock = TickingClock.new consumer = Consumer.new(default_configuration.with(split_keys: ["default"]), clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop expect(logger_output.string).to match(/Obtaining lock .* failed \(lock timeout\)/) expect(result).to eq(false) expect(redis.llen("queue:default")).to eq(0) end specify "obtaining taken lock just skip that attempt" do clock = TickingClock.new Lock.obtain("default", "other-process-uuid", clock: clock) consumer = Consumer.new(default_configuration.with(split_keys: ["default"]), clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop expect(logger_output.string).to match(/Obtaining lock .* unsuccessful \(taken\)/) expect(result).to eq(false) expect(redis.llen("queue:default")).to eq(0) end specify "deadlock when releasing lock doesnt do anything" do record = create_record("default", "default") allow(Lock).to receive(:lock).and_wrap_original do |m, *args| if caller.any? {|l| l.include? "`release'"} raise ActiveRecord::Deadlocked else m.call(*args) end end clock = TickingClock.new consumer = Consumer.new(default_configuration.with(split_keys: ["default"]), clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop expect(logger_output.string).to match(/Releasing lock .* failed \(deadlock\)/) expect(result).to eq(true) expect(redis.llen("queue:default")).to eq(1) end specify "lock timeout when releasing lock doesnt do anything" do record = create_record("default", "default") allow(Lock).to receive(:lock).and_wrap_original do |m, *args| if caller.any? {|l| l.include? "`release'"} raise ActiveRecord::LockWaitTimeout else m.call(*args) end end clock = TickingClock.new consumer = Consumer.new(default_configuration.with(split_keys: ["default"]), clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop expect(logger_output.string).to match(/Releasing lock .* failed \(lock timeout\)/) expect(result).to eq(true) expect(redis.llen("queue:default")).to eq(1) end specify "after successful loop, lock is released" do record = create_record("default", "default") clock = TickingClock.new consumer = Consumer.new(default_configuration, clock: clock, logger: logger, metrics: metrics) result = consumer.one_loop lock = Lock.find_by!(split_key: "default") expect(lock.locked_by).to be_nil expect(lock.locked_at).to be_nil end specify "lock disappearing in the meantime, doesnt do anything" do record = create_record("default", "default") clock = TickingClock.new consumer = Consumer.new(default_configuration, clock: clock, logger: logger, metrics: metrics) allow(consumer).to receive(:release_lock_for_process).and_wrap_original do |m, *args| Lock.delete_all m.call(*args) end result = consumer.one_loop expect(logger_output.string).to match(/Releasing lock .* failed \(not taken by this process\)/) expect(result).to eq(true) expect(redis.llen("queue:default")).to eq(1) end specify "lock stolen in the meantime, doesnt do anything" do record = create_record("default", "default") clock = TickingClock.new consumer = Consumer.new(default_configuration, clock: clock, logger: logger, metrics: metrics) allow(consumer).to receive(:release_lock_for_process).and_wrap_original do |m, *args| Lock.update_all(locked_by: SecureRandom.uuid) m.call(*args) end result = consumer.one_loop expect(logger_output.string).to match(/Releasing lock .* failed \(not taken by this process\)/) expect(result).to eq(true) expect(redis.llen("queue:default")).to eq(1) end def create_record(queue, split_key, format: "sidekiq5") payload = { class: "SomeAsyncHandler", queue: queue, created_at: Time.now.utc, jid: SecureRandom.hex(12), retry: true, args: [{ event_id: "83c3187f-84f6-4da7-8206-73af5aca7cc8", event_type: "RubyEventStore::Event", data: "--- {}\n", metadata: "---\n:timestamp: 2019-09-30 00:00:00.000000000 Z\n", }], } record = Record.create!( split_key: split_key, created_at: Time.now.utc, format: format, enqueued_at: nil, payload: payload.to_json ) end end end end
Shindo.tests('Storage[:local] | file', [:local]) do pending if Fog.mocking? before do @options = { :local_root => '~/.fog' } end tests('#public_url') do tests('when connection has an endpoint'). returns('http://example.com/files/directory/file.txt') do @options[:endpoint] = 'http://example.com/files' connection = Fog::Storage::Local.new(@options) directory = connection.directories.new(:key => 'directory') file = directory.files.new(:key => 'file.txt') file.public_url end tests('when connection has no endpoint'). returns(nil) do @options[:endpoint] = nil connection = Fog::Storage::Local.new(@options) directory = connection.directories.new(:key => 'directory') file = directory.files.new(:key => 'file.txt') file.public_url end tests('when file path has escapable characters'). returns('http://example.com/files/my%20directory/my%20file.txt') do @options[:endpoint] = 'http://example.com/files' connection = Fog::Storage::Local.new(@options) directory = connection.directories.new(:key => 'my directory') file = directory.files.new(:key => 'my file.txt') file.public_url end end end Retagged tests with strings to be skipped by Shindo Shindo.tests('Storage[:local] | file', ["local"]) do pending if Fog.mocking? before do @options = { :local_root => '~/.fog' } end tests('#public_url') do tests('when connection has an endpoint'). returns('http://example.com/files/directory/file.txt') do @options[:endpoint] = 'http://example.com/files' connection = Fog::Storage::Local.new(@options) directory = connection.directories.new(:key => 'directory') file = directory.files.new(:key => 'file.txt') file.public_url end tests('when connection has no endpoint'). returns(nil) do @options[:endpoint] = nil connection = Fog::Storage::Local.new(@options) directory = connection.directories.new(:key => 'directory') file = directory.files.new(:key => 'file.txt') file.public_url end tests('when file path has escapable characters'). returns('http://example.com/files/my%20directory/my%20file.txt') do @options[:endpoint] = 'http://example.com/files' connection = Fog::Storage::Local.new(@options) directory = connection.directories.new(:key => 'my directory') file = directory.files.new(:key => 'my file.txt') file.public_url end end end
created small integration test for user search require 'spec_helper' describe 'full text search' do include PavlovSupport let(:current_user) { create :user } before do ElasticSearch.stub synchronous: true end def create_user name u = User.new( username: name, password: 'password', password_confirmation: 'password', first_name: name, last_name: name) u.approved = true u.agrees_tos = true u.agreed_tos_on = DateTime.now u.email = "#{name}@#{name}.com" u.confirmed_at = DateTime.now u.save or fail "oh noes" end describe 'searching for users' do it 'when nothing is added, nothing is found' do as(current_user) do |pavlov| users = pavlov.interactor :search_user, keywords: 'mark' expect(users).to eq [] end end it 'when a user is added he is found' do as(current_user) do |pavlov| create_user('mark') users = pavlov.interactor :search_user, keywords: 'mark' expect(users.map(&:username)).to eq ['mark'] end end it 'after reindex a user is still found' do as(current_user) do |pavlov| create_user('mark') pavlov.interactor :'full_text_search/reindex' users = pavlov.interactor :search_user, keywords: 'mark' expect(users.map(&:username)).to eq ['mark'] end end end end
# Generated by juwelier # DO NOT EDIT THIS FILE DIRECTLY # Instead, edit Juwelier::Tasks in Rakefile, and run 'rake gemspec' # -*- encoding: utf-8 -*- # stub: nhr-api-ruby-client 0.1.0 ruby lib Gem::Specification.new do |s| s.name = "nhr-api-ruby-client" s.version = "0.1.0" s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= s.require_paths = ["lib"] s.authors = ["kaogaau"] s.date = "2016-09-20" s.description = "The ruby gem about NHR Device" s.email = "kaogaau@gmail.com" s.extra_rdoc_files = [ "LICENSE.txt", "README.rdoc" ] s.files = [ ".document", "Gemfile", "Gemfile.lock", "LICENSE.txt", "README.rdoc", "Rakefile", "VERSION", "lib/.DS_Store", "lib/nhr-api-ruby-client.rb", "nhr-api-ruby-client.gemspec" ] s.homepage = "http://github.com/kaogaau/nhr-api-ruby-client" s.licenses = ["MIT"] s.rubygems_version = "2.5.1" s.summary = "The ruby gem about NHR Device" if s.respond_to? :specification_version then s.specification_version = 4 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_runtime_dependency(%q<rest-client>, [">= 0"]) s.add_runtime_dependency(%q<json>, ["~> 1.8.1"]) s.add_development_dependency(%q<shoulda>, [">= 0"]) s.add_development_dependency(%q<rdoc>, ["~> 3.12"]) s.add_development_dependency(%q<bundler>, ["~> 1.0"]) s.add_development_dependency(%q<juwelier>, ["~> 2.1.0"]) s.add_development_dependency(%q<simplecov>, [">= 0"]) else s.add_dependency(%q<rest-client>, [">= 0"]) s.add_dependency(%q<json>, ["~> 1.8.1"]) s.add_dependency(%q<shoulda>, [">= 0"]) s.add_dependency(%q<rdoc>, ["~> 3.12"]) s.add_dependency(%q<bundler>, ["~> 1.0"]) s.add_dependency(%q<juwelier>, ["~> 2.1.0"]) s.add_dependency(%q<simplecov>, [">= 0"]) end else s.add_dependency(%q<rest-client>, [">= 0"]) s.add_dependency(%q<json>, ["~> 1.8.1"]) s.add_dependency(%q<shoulda>, [">= 0"]) s.add_dependency(%q<rdoc>, ["~> 3.12"]) s.add_dependency(%q<bundler>, ["~> 1.0"]) s.add_dependency(%q<juwelier>, ["~> 2.1.0"]) s.add_dependency(%q<simplecov>, [">= 0"]) end end Regenerate gemspec for version 0.1.1 # Generated by juwelier # DO NOT EDIT THIS FILE DIRECTLY # Instead, edit Juwelier::Tasks in Rakefile, and run 'rake gemspec' # -*- encoding: utf-8 -*- # stub: nhr-api-ruby-client 0.1.1 ruby lib Gem::Specification.new do |s| s.name = "nhr-api-ruby-client" s.version = "0.1.1" s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= s.require_paths = ["lib"] s.authors = ["kaogaau"] s.date = "2016-11-10" s.description = "The ruby gem about NHR Device" s.email = "kaogaau@gmail.com" s.extra_rdoc_files = [ "LICENSE.txt", "README.rdoc" ] s.files = [ ".document", "Gemfile", "Gemfile.lock", "LICENSE.txt", "README.rdoc", "Rakefile", "VERSION", "lib/.DS_Store", "lib/nhr-api-ruby-client.rb", "nhr-api-ruby-client.gemspec" ] s.homepage = "http://github.com/kaogaau/nhr-api-ruby-client" s.licenses = ["MIT"] s.rubygems_version = "2.5.1" s.summary = "The ruby gem about NHR Device" if s.respond_to? :specification_version then s.specification_version = 4 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_runtime_dependency(%q<rest-client>, [">= 0"]) s.add_runtime_dependency(%q<json>, ["~> 1.8.1"]) s.add_development_dependency(%q<shoulda>, [">= 0"]) s.add_development_dependency(%q<rdoc>, ["~> 3.12"]) s.add_development_dependency(%q<bundler>, ["~> 1.0"]) s.add_development_dependency(%q<juwelier>, ["~> 2.1.0"]) s.add_development_dependency(%q<simplecov>, [">= 0"]) else s.add_dependency(%q<rest-client>, [">= 0"]) s.add_dependency(%q<json>, ["~> 1.8.1"]) s.add_dependency(%q<shoulda>, [">= 0"]) s.add_dependency(%q<rdoc>, ["~> 3.12"]) s.add_dependency(%q<bundler>, ["~> 1.0"]) s.add_dependency(%q<juwelier>, ["~> 2.1.0"]) s.add_dependency(%q<simplecov>, [">= 0"]) end else s.add_dependency(%q<rest-client>, [">= 0"]) s.add_dependency(%q<json>, ["~> 1.8.1"]) s.add_dependency(%q<shoulda>, [">= 0"]) s.add_dependency(%q<rdoc>, ["~> 3.12"]) s.add_dependency(%q<bundler>, ["~> 1.0"]) s.add_dependency(%q<juwelier>, ["~> 2.1.0"]) s.add_dependency(%q<simplecov>, [">= 0"]) end end
Pod::Spec.new do |s| s.name = "ReflectedStringConvertible" s.version = "1.2.0" s.summary = "A protocol that allows any class to be printed as if it were a struct or a JSON object." s.homepage = "https://github.com/mattcomi/ReflectedStringConvertible" s.license = { :type => "MIT", :file => "LICENSE" } s.author = { "Matt Comi" => "mattcomi@gmail.com" } s.source = { :git => "https://github.com/mattcomi/ReflectedStringConvertible.git", :tag => "#{s.version}"} s.source_files = "ReflectedStringConvertible/*.{swift}" s.requires_arc = true s.ios.deployment_target = '11.0' s.osx.deployment_target = '10.10' end Updated podspec. Pod::Spec.new do |s| s.name = "ReflectedStringConvertible" s.version = "1.2.0" s.summary = "A protocol that allows any class to be printed as if it were a struct or a JSON object." s.homepage = "https://github.com/mattcomi/ReflectedStringConvertible" s.license = { :type => "MIT", :file => "LICENSE" } s.author = { "Matt Comi" => "mattcomi@gmail.com" } s.source = { :git => "https://github.com/mattcomi/ReflectedStringConvertible.git", :tag => "#{s.version}"} s.source_files = "ReflectedStringConvertible/*.{swift}" s.requires_arc = true s.ios.deployment_target = '11.0' s.osx.deployment_target = '10.13' end
class Homestead def Homestead.configure(config, settings) # Set The VM Provider ENV['VAGRANT_DEFAULT_PROVIDER'] = settings["provider"] ||= "virtualbox" # Configure Local Variable To Access Scripts From Remote Location scriptDir = File.dirname(__FILE__) # Prevent TTY Errors config.ssh.shell = "bash -c 'BASH_ENV=/etc/profile exec bash'" # Configure The Box config.vm.box = "laravel/homestead" config.vm.hostname = settings["hostname"] ||= "homestead" # Configure A Private Network IP config.vm.network :private_network, ip: settings["ip"] ||= "192.168.10.10" # Configure A Few VirtualBox Settings config.vm.provider "virtualbox" do |vb| vb.name = settings["name"] ||= "homestead" vb.customize ["modifyvm", :id, "--memory", settings["memory"] ||= "2048"] vb.customize ["modifyvm", :id, "--cpus", settings["cpus"] ||= "1"] vb.customize ["modifyvm", :id, "--natdnsproxy1", "on"] vb.customize ["modifyvm", :id, "--natdnshostresolver1", "on"] vb.customize ["modifyvm", :id, "--ostype", "Ubuntu_64"] end # Configure A Few VMware Settings ["vmware_fusion", "vmware_workstation"].each do |vmware| config.vm.provider vmware do |v| v.vmx["displayName"] = "homestead" v.vmx["memsize"] = settings["memory"] ||= 2048 v.vmx["numvcpus"] = settings["cpus"] ||= 1 v.vmx["guestOS"] = "ubuntu-64" end end # Standardize Ports Naming Schema if (settings.has_key?("ports")) settings["ports"].each do |port| port["guest"] ||= port["to"] port["host"] ||= port["send"] port["protocol"] ||= "tcp" end else settings["ports"] = [] end # Default Port Forwarding default_ports = { 80 => 8000, 443 => 44300, 3306 => 33060, 5432 => 54320 } # Use Default Port Forwarding Unless Overridden default_ports.each do |guest, host| unless settings["ports"].any? { |mapping| mapping["guest"] == guest } config.vm.network "forwarded_port", guest: guest, host: host end end # Add Custom Ports From Configuration if settings.has_key?("ports") settings["ports"].each do |port| config.vm.network "forwarded_port", guest: port["guest"], host: port["host"], protocol: port["protocol"] end end # Configure The Public Key For SSH Access if settings.include? 'authorize' config.vm.provision "shell" do |s| s.inline = "echo $1 | grep -xq \"$1\" /home/vagrant/.ssh/authorized_keys || echo $1 | tee -a /home/vagrant/.ssh/authorized_keys" s.args = [File.read(File.expand_path(settings["authorize"]))] end end # Copy The SSH Private Keys To The Box if settings.include? 'keys' settings["keys"].each do |key| config.vm.provision "shell" do |s| s.privileged = false s.inline = "echo \"$1\" > /home/vagrant/.ssh/$2 && chmod 600 /home/vagrant/.ssh/$2" s.args = [File.read(File.expand_path(key)), key.split('/').last] end end end # Register All Of The Configured Shared Folders if settings.include? 'folders' settings["folders"].each do |folder| mount_opts = [] if (folder["type"] == "nfs") mount_opts = folder["mount_opts"] ? folder["mount_opts"] : ['actimeo=1'] end config.vm.synced_folder folder["map"], folder["to"], type: folder["type"] ||= nil, mount_options: mount_opts end end # Install All The Configured Nginx Sites config.vm.provision "shell" do |s| s.path = scriptDir + "/clear-nginx.sh" end settings["sites"].each do |site| config.vm.provision "shell" do |s| if (site.has_key?("hhvm") && site["hhvm"]) s.path = scriptDir + "/serve-hhvm.sh" s.args = [site["map"], site["to"], site["port"] ||= "80", site["ssl"] ||= "443"] else s.path = scriptDir + "/serve.sh" s.args = [site["map"], site["to"], site["port"] ||= "80", site["ssl"] ||= "443"] end end end # Configure All Of The Configured Databases if settings.has_key?("databases") settings["databases"].each do |db| config.vm.provision "shell" do |s| s.path = scriptDir + "/create-mysql.sh" s.args = [db] end config.vm.provision "shell" do |s| s.path = scriptDir + "/create-postgres.sh" s.args = [db] end end end # Configure All Of The Server Environment Variables config.vm.provision "shell" do |s| s.path = scriptDir + "/clear-variables.sh" end if settings.has_key?("variables") settings["variables"].each do |var| config.vm.provision "shell" do |s| s.inline = "echo \"\nenv[$1] = '$2'\" >> /etc/php5/fpm/php-fpm.conf" s.args = [var["key"], var["value"]] end config.vm.provision "shell" do |s| s.inline = "echo \"\n# Set Homestead Environment Variable\nexport $1=$2\" >> /home/vagrant/.profile" s.args = [var["key"], var["value"]] end end config.vm.provision "shell" do |s| s.inline = "service php5-fpm restart" end end # Update Composer On Every Provision config.vm.provision "shell" do |s| s.inline = "/usr/local/bin/composer self-update" end # Configure Blackfire.io if settings.has_key?("blackfire") config.vm.provision "shell" do |s| s.path = scriptDir + "/blackfire.sh" s.args = [ settings["blackfire"][0]["id"], settings["blackfire"][0]["token"], settings["blackfire"][0]["client-id"], settings["blackfire"][0]["client-token"] ] end end end end Added auto correction for port collisions for multiple vagrant vms running concurrently class Homestead def Homestead.configure(config, settings) # Set The VM Provider ENV['VAGRANT_DEFAULT_PROVIDER'] = settings["provider"] ||= "virtualbox" # Configure Local Variable To Access Scripts From Remote Location scriptDir = File.dirname(__FILE__) # Prevent TTY Errors config.ssh.shell = "bash -c 'BASH_ENV=/etc/profile exec bash'" # Configure The Box config.vm.box = "laravel/homestead" config.vm.hostname = settings["hostname"] ||= "homestead" # Configure A Private Network IP config.vm.network :private_network, ip: settings["ip"] ||= "192.168.10.10" # Configure A Few VirtualBox Settings config.vm.provider "virtualbox" do |vb| vb.name = settings["name"] ||= "homestead" vb.customize ["modifyvm", :id, "--memory", settings["memory"] ||= "2048"] vb.customize ["modifyvm", :id, "--cpus", settings["cpus"] ||= "1"] vb.customize ["modifyvm", :id, "--natdnsproxy1", "on"] vb.customize ["modifyvm", :id, "--natdnshostresolver1", "on"] vb.customize ["modifyvm", :id, "--ostype", "Ubuntu_64"] end # Configure A Few VMware Settings ["vmware_fusion", "vmware_workstation"].each do |vmware| config.vm.provider vmware do |v| v.vmx["displayName"] = "homestead" v.vmx["memsize"] = settings["memory"] ||= 2048 v.vmx["numvcpus"] = settings["cpus"] ||= 1 v.vmx["guestOS"] = "ubuntu-64" end end # Standardize Ports Naming Schema if (settings.has_key?("ports")) settings["ports"].each do |port| port["guest"] ||= port["to"] port["host"] ||= port["send"] port["protocol"] ||= "tcp" end else settings["ports"] = [] end # Default Port Forwarding default_ports = { 80 => 8000, 443 => 44300, 3306 => 33060, 5432 => 54320 } # Use Default Port Forwarding Unless Overridden default_ports.each do |guest, host| unless settings["ports"].any? { |mapping| mapping["guest"] == guest } config.vm.network "forwarded_port", guest: guest, host: host, auto_correct: true end end # Add Custom Ports From Configuration if settings.has_key?("ports") settings["ports"].each do |port| config.vm.network "forwarded_port", guest: port["guest"], host: port["host"], protocol: port["protocol"], auto_correct: true end end # Configure The Public Key For SSH Access if settings.include? 'authorize' config.vm.provision "shell" do |s| s.inline = "echo $1 | grep -xq \"$1\" /home/vagrant/.ssh/authorized_keys || echo $1 | tee -a /home/vagrant/.ssh/authorized_keys" s.args = [File.read(File.expand_path(settings["authorize"]))] end end # Copy The SSH Private Keys To The Box if settings.include? 'keys' settings["keys"].each do |key| config.vm.provision "shell" do |s| s.privileged = false s.inline = "echo \"$1\" > /home/vagrant/.ssh/$2 && chmod 600 /home/vagrant/.ssh/$2" s.args = [File.read(File.expand_path(key)), key.split('/').last] end end end # Register All Of The Configured Shared Folders if settings.include? 'folders' settings["folders"].each do |folder| mount_opts = [] if (folder["type"] == "nfs") mount_opts = folder["mount_opts"] ? folder["mount_opts"] : ['actimeo=1'] end config.vm.synced_folder folder["map"], folder["to"], type: folder["type"] ||= nil, mount_options: mount_opts end end # Install All The Configured Nginx Sites config.vm.provision "shell" do |s| s.path = scriptDir + "/clear-nginx.sh" end settings["sites"].each do |site| config.vm.provision "shell" do |s| if (site.has_key?("hhvm") && site["hhvm"]) s.path = scriptDir + "/serve-hhvm.sh" s.args = [site["map"], site["to"], site["port"] ||= "80", site["ssl"] ||= "443"] else s.path = scriptDir + "/serve.sh" s.args = [site["map"], site["to"], site["port"] ||= "80", site["ssl"] ||= "443"] end end end # Configure All Of The Configured Databases if settings.has_key?("databases") settings["databases"].each do |db| config.vm.provision "shell" do |s| s.path = scriptDir + "/create-mysql.sh" s.args = [db] end config.vm.provision "shell" do |s| s.path = scriptDir + "/create-postgres.sh" s.args = [db] end end end # Configure All Of The Server Environment Variables config.vm.provision "shell" do |s| s.path = scriptDir + "/clear-variables.sh" end if settings.has_key?("variables") settings["variables"].each do |var| config.vm.provision "shell" do |s| s.inline = "echo \"\nenv[$1] = '$2'\" >> /etc/php5/fpm/php-fpm.conf" s.args = [var["key"], var["value"]] end config.vm.provision "shell" do |s| s.inline = "echo \"\n# Set Homestead Environment Variable\nexport $1=$2\" >> /home/vagrant/.profile" s.args = [var["key"], var["value"]] end end config.vm.provision "shell" do |s| s.inline = "service php5-fpm restart" end end # Update Composer On Every Provision config.vm.provision "shell" do |s| s.inline = "/usr/local/bin/composer self-update" end # Configure Blackfire.io if settings.has_key?("blackfire") config.vm.provision "shell" do |s| s.path = scriptDir + "/blackfire.sh" s.args = [ settings["blackfire"][0]["id"], settings["blackfire"][0]["token"], settings["blackfire"][0]["client-id"], settings["blackfire"][0]["client-token"] ] end end end end
#!/Users/simon/.rvm/rubies/ruby-1.9.2-p180/bin/ruby # RTVE tiler # =============== # ./rtve.rb [environment] [election_id] # # environments: [development, production] # election_id: [election id table primary keys] require 'rubygems' require 'pg' require 'typhoeus' require 'json' require 'fileutils' # sanity check arguments ENVR = ARGV[0] election_id = ARGV[1] if ENVR != 'development' && ENVR != 'production' puts "ruby map_tiles.rb [environment] [electoral process id (optional)]" puts "environments: [development, production]" Process.exit!(true) end # set app settings and boot user = 123 setup = {:development => {:host => 'localhost', :user => 'publicuser', :dbname => "cartodb_dev_user_#{user}_db"}, :production => {:host => '10.211.14.63', :user => 'postgres', :dbname => "cartodb_user_#{user}_db"}} settings = setup[ENVR.to_sym] conn = PGconn.open(settings) pos = conn.exec "SELECT * from procesos_electorales ORDER BY anyo, mes ASC"; # menu screen if election_id == nil begin puts "\nRTVE Tile Generator" puts "===================\n\n" puts "Electoral Processes: \n\n" printf("%-5s %5s %5s \n", "id", "anyo", "mes") puts "-" * 19 pos.each do |p| printf("%-5s %5s %5s \n", p["cartodb_id"], p["anyo"], p["mes"]) end ids = pos.map { |x| x['cartodb_id'] } print "\nChoose a electoral process to render (#{ids.sort.join(", ")}) [q=quit]: " election_id = STDIN.gets.chomp Process.exit if election_id == 'q' raise "invalid id" unless ids.include?(election_id) rescue puts "\n** ERROR: please enter a correct procesos electorales id \n\n" retry end end # version path base_path = "/mnt/www/data/tiles" versions = Dir["#{base_path}/*/"] v_dir = versions.map{|x| x.split("/").last} v_next = v_dir.size == 0 ? 0 : v_dir.max.to_i+1 version_path = "#{base_path}/#{v_next}" # render each one entered election_ids = election_id.split(",") election_ids.each do |election_id| # Create denomalised version of GADM4 table with votes, and party names puts "Generating map_tiles_data table for election id: #{election_id}..." sql = <<-EOS DROP TABLE IF EXISTS map_tiles_data; CREATE TABLE map_tiles_data AS ( SELECT g.cartodb_id as gid, v.primer_partido_id,pp1.name primer_nombre,v.primer_partido_percent,v.primer_partido_votos, v.segundo_partido_id,pp2.name segundo_nombre,v.segundo_partido_percent,v.segundo_partido_votos, v.tercer_partido_id,pp3.name tercer_nombre,v.tercer_partido_percent,v.tercer_partido_votos, g.the_geom, g.the_geom_webmercator, CASE WHEN pp1.name ='PSOE' THEN CASE WHEN v.primer_partido_percent >= 75 THEN 'red_H' WHEN (v.primer_partido_percent >= 50) AND (v.primer_partido_percent < 75) THEN 'red_M' WHEN (v.primer_partido_percent >= 0) AND (v.primer_partido_percent < 50) THEN 'red_L' END WHEN pp1.name = 'PP' THEN CASE WHEN v.primer_partido_percent >= 75 THEN 'blue_H' WHEN (v.primer_partido_percent >= 50) AND (v.primer_partido_percent < 75) THEN 'blue_M' WHEN (v.primer_partido_percent >= 0) AND (v.primer_partido_percent < 50) THEN 'blue_L' END WHEN pp1.name IN ('CIU', 'AP', 'IU', 'INDEP', 'CDS', 'PAR', 'EAJ-PNV', 'PA', 'BNG', 'PDP', 'ERC-AM', 'ESQUERRA-AM', 'ERC', 'EA', 'HB', 'PRC', 'PR', 'UV', 'EAJ-PNV/EA', 'EH', 'EAJ-ONV', 'PNV', 'EH-A') THEN pp1.name ELSE 'unknown' END as color FROM ine_poly AS g LEFT OUTER JOIN (SELECT * FROM votaciones_por_municipio WHERE proceso_electoral_id=#{election_id}) AS v ON g.ine_muni_int=v.codinemuni AND g.ine_prov_int = v.codineprov LEFT OUTER JOIN partidos_politicos AS pp1 ON pp1.cartodb_id = v.primer_partido_id LEFT OUTER JOIN partidos_politicos AS pp2 ON pp2.cartodb_id = v.segundo_partido_id LEFT OUTER JOIN partidos_politicos AS pp3 ON pp3.cartodb_id = v.tercer_partido_id); ALTER TABLE map_tiles_data ADD PRIMARY KEY (gid); CREATE INDEX map_tiles_data_the_geom_webmercator_idx ON map_tiles_data USING gist(the_geom_webmercator); CREATE INDEX map_tiles_data_the_geom_idx ON map_tiles_data USING gist(the_geom); EOS conn.exec sql # there are 2 bounding boxes at each zoom level. one for spain, one for canaries tile_extents = [ {:zoom => 6, :xmin => 30, :ymin => 23, :xmax => 32, :ymax => 25}, {:zoom => 6, :xmin => 28, :ymin => 26, :xmax => 29, :ymax => 27}, {:zoom => 7, :xmin => 60, :ymin => 46, :xmax => 65, :ymax => 50}, {:zoom => 7, :xmin => 57, :ymin => 52, :xmax => 59, :ymax => 54}, {:zoom => 8, :xmin => 120, :ymin => 92, :xmax => 131, :ymax => 101}, {:zoom => 8, :xmin => 114, :ymin => 105, :xmax => 118, :ymax => 108}, {:zoom => 9, :xmin => 241, :ymin => 185, :xmax => 263, :ymax => 203}, {:zoom => 9, :xmin => 229, :ymin => 211, :xmax => 237, :ymax => 216}, {:zoom => 10, :xmin => 482, :ymin => 370, :xmax => 526, :ymax => 407}, {:zoom => 10, :xmin => 458, :ymin => 422, :xmax => 475, :ymax => 433}, {:zoom => 11, :xmin => 964, :ymin => 741, :xmax => 1052, :ymax => 815}, {:zoom => 11, :xmin => 916, :ymin => 844, :xmax => 951, :ymax => 866}, {:zoom => 12, :xmin => 1929, :ymin => 1483, :xmax => 2105, :ymax => 1631}, {:zoom => 12, :xmin => 1832, :ymin => 1688, :xmax => 1902, :ymax => 1732}, ] save_path = "#{version_path}/#{election_id}" tile_url = "http://ec2-50-16-103-51.compute-1.amazonaws.com/tiles" hydra = Typhoeus::Hydra.new(:max_concurrency => 200) time_start = Time.now start_tiles = 0 total_tiles = tile_extents.inject(0) do |sum, extent| sum += ((extent[:xmax] - extent[:xmin] + 1) * (extent[:ymax] - extent[:ymin] + 1)) sum end puts "creating tile path: #{save_path}" FileUtils.mkdir_p save_path puts "Saving tiles for map_tiles_data to #{save_path}..." # tile_extents.each do |extent| # (extent[:xmin]..extent[:xmax]).to_a.each do |x| # (extent[:ymin]..extent[:ymax]).to_a.each do |y| # file_name = "#{x}_#{y}_#{extent[:zoom]}_#{election_id}.png" # if File.exists? "#{save_path}/#{file_name}" # total_tiles -= 1 # else # file_url = "#{tile_url}/#{x}/#{y}/#{extent[:zoom]}/users/#{user}/layers/gadm1%7Cmap_tiles_data%7Cine_poly%7Cgadm2%7Cgadm1" # tile_request = Typhoeus::Request.new(file_url) # tile_request.on_complete do |response| # start_tiles += 1 # File.open("#{save_path}/#{file_name}", "w+") do|f| # f.write response.body # #puts file_url # puts "#{start_tiles}/#{total_tiles}: #{save_path}/#{file_name}" # end # end # hydra.queue tile_request # end # end # end # end hydra.run time_end = Time.now secs = time_end - time_start puts "Total time: #{sprintf("%.2f", secs)} seconds (#{sprintf("%.2f", secs/60.0)} mins). #{sprintf("%.2f", total_tiles/secs)} tiles per second." end add file saving step #!/Users/simon/.rvm/rubies/ruby-1.9.2-p180/bin/ruby # RTVE tiler # =============== # ./rtve.rb [environment] [election_id] # # environments: [development, production] # election_id: [election id table primary keys] require 'rubygems' require 'pg' require 'typhoeus' require 'json' require 'fileutils' # sanity check arguments ENVR = ARGV[0] election_id = ARGV[1] if ENVR != 'development' && ENVR != 'production' puts "ruby map_tiles.rb [environment] [electoral process id (optional)]" puts "environments: [development, production]" Process.exit!(true) end # set app settings and boot user = 123 setup = {:development => {:host => 'localhost', :user => 'publicuser', :dbname => "cartodb_dev_user_#{user}_db"}, :production => {:host => '10.211.14.63', :user => 'postgres', :dbname => "cartodb_user_#{user}_db"}} settings = setup[ENVR.to_sym] conn = PGconn.open(settings) pos = conn.exec "SELECT * from procesos_electorales ORDER BY anyo, mes ASC"; # menu screen if election_id == nil begin puts "\nRTVE Tile Generator" puts "===================\n\n" puts "Electoral Processes: \n\n" printf("%-5s %5s %5s \n", "id", "anyo", "mes") puts "-" * 19 pos.each do |p| printf("%-5s %5s %5s \n", p["cartodb_id"], p["anyo"], p["mes"]) end ids = pos.map { |x| x['cartodb_id'] } print "\nChoose a electoral process to render (#{ids.sort.join(", ")}) [q=quit]: " election_id = STDIN.gets.chomp Process.exit if election_id == 'q' raise "invalid id" unless ids.include?(election_id) rescue puts "\n** ERROR: please enter a correct procesos electorales id \n\n" retry end end # version path base_path = "/mnt/www/data/tiles" versions = Dir["#{base_path}/*/"] v_dir = versions.map{|x| x.split("/").last} v_next = v_dir.size == 0 ? 0 : v_dir.max.to_i+1 version_path = "#{base_path}/#{v_next}" # render each one entered election_ids = election_id.split(",") election_ids.each do |election_id| # Create denomalised version of GADM4 table with votes, and party names puts "Generating map_tiles_data table for election id: #{election_id}..." sql = <<-EOS DROP TABLE IF EXISTS map_tiles_data; CREATE TABLE map_tiles_data AS ( SELECT g.cartodb_id as gid, v.primer_partido_id,pp1.name primer_nombre,v.primer_partido_percent,v.primer_partido_votos, v.segundo_partido_id,pp2.name segundo_nombre,v.segundo_partido_percent,v.segundo_partido_votos, v.tercer_partido_id,pp3.name tercer_nombre,v.tercer_partido_percent,v.tercer_partido_votos, g.the_geom, g.the_geom_webmercator, CASE WHEN pp1.name ='PSOE' THEN CASE WHEN v.primer_partido_percent >= 75 THEN 'red_H' WHEN (v.primer_partido_percent >= 50) AND (v.primer_partido_percent < 75) THEN 'red_M' WHEN (v.primer_partido_percent >= 0) AND (v.primer_partido_percent < 50) THEN 'red_L' END WHEN pp1.name = 'PP' THEN CASE WHEN v.primer_partido_percent >= 75 THEN 'blue_H' WHEN (v.primer_partido_percent >= 50) AND (v.primer_partido_percent < 75) THEN 'blue_M' WHEN (v.primer_partido_percent >= 0) AND (v.primer_partido_percent < 50) THEN 'blue_L' END WHEN pp1.name IN ('CIU', 'AP', 'IU', 'INDEP', 'CDS', 'PAR', 'EAJ-PNV', 'PA', 'BNG', 'PDP', 'ERC-AM', 'ESQUERRA-AM', 'ERC', 'EA', 'HB', 'PRC', 'PR', 'UV', 'EAJ-PNV/EA', 'EH', 'EAJ-ONV', 'PNV', 'EH-A') THEN pp1.name ELSE 'unknown' END as color FROM ine_poly AS g LEFT OUTER JOIN (SELECT * FROM votaciones_por_municipio WHERE proceso_electoral_id=#{election_id}) AS v ON g.ine_muni_int=v.codinemuni AND g.ine_prov_int = v.codineprov LEFT OUTER JOIN partidos_politicos AS pp1 ON pp1.cartodb_id = v.primer_partido_id LEFT OUTER JOIN partidos_politicos AS pp2 ON pp2.cartodb_id = v.segundo_partido_id LEFT OUTER JOIN partidos_politicos AS pp3 ON pp3.cartodb_id = v.tercer_partido_id); ALTER TABLE map_tiles_data ADD PRIMARY KEY (gid); CREATE INDEX map_tiles_data_the_geom_webmercator_idx ON map_tiles_data USING gist(the_geom_webmercator); CREATE INDEX map_tiles_data_the_geom_idx ON map_tiles_data USING gist(the_geom); EOS conn.exec sql # there are 2 bounding boxes at each zoom level. one for spain, one for canaries tile_extents = [ {:zoom => 6, :xmin => 30, :ymin => 23, :xmax => 32, :ymax => 25}, {:zoom => 6, :xmin => 28, :ymin => 26, :xmax => 29, :ymax => 27}, {:zoom => 7, :xmin => 60, :ymin => 46, :xmax => 65, :ymax => 50}, {:zoom => 7, :xmin => 57, :ymin => 52, :xmax => 59, :ymax => 54}, {:zoom => 8, :xmin => 120, :ymin => 92, :xmax => 131, :ymax => 101}, {:zoom => 8, :xmin => 114, :ymin => 105, :xmax => 118, :ymax => 108}, {:zoom => 9, :xmin => 241, :ymin => 185, :xmax => 263, :ymax => 203}, {:zoom => 9, :xmin => 229, :ymin => 211, :xmax => 237, :ymax => 216}, {:zoom => 10, :xmin => 482, :ymin => 370, :xmax => 526, :ymax => 407}, {:zoom => 10, :xmin => 458, :ymin => 422, :xmax => 475, :ymax => 433}, {:zoom => 11, :xmin => 964, :ymin => 741, :xmax => 1052, :ymax => 815}, {:zoom => 11, :xmin => 916, :ymin => 844, :xmax => 951, :ymax => 866}, {:zoom => 12, :xmin => 1929, :ymin => 1483, :xmax => 2105, :ymax => 1631}, {:zoom => 12, :xmin => 1832, :ymin => 1688, :xmax => 1902, :ymax => 1732}, ] save_path = "#{version_path}/#{election_id}" tile_url = "http://ec2-50-16-103-51.compute-1.amazonaws.com/tiles" hydra = Typhoeus::Hydra.new(:max_concurrency => 200) time_start = Time.now start_tiles = 0 total_tiles = tile_extents.inject(0) do |sum, extent| sum += ((extent[:xmax] - extent[:xmin] + 1) * (extent[:ymax] - extent[:ymin] + 1)) sum end puts "creating tile path: #{save_path}" FileUtils.mkdir_p save_path puts "Saving tiles for map_tiles_data to #{save_path}..." tile_extents.each do |extent| (extent[:xmin]..extent[:xmax]).to_a.each do |x| (extent[:ymin]..extent[:ymax]).to_a.each do |y| file_name = "#{x}_#{y}_#{extent[:zoom]}_#{election_id}.png" if File.exists? "#{save_path}/#{file_name}" total_tiles -= 1 else file_url = "#{tile_url}/#{x}/#{y}/#{extent[:zoom]}/users/#{user}/layers/gadm1%7Cmap_tiles_data%7Cine_poly%7Cgadm2%7Cgadm1" tile_request = Typhoeus::Request.new(file_url) tile_request.on_complete do |response| start_tiles += 1 File.open("#{save_path}/#{file_name}", "w+") do|f| f.write response.body #puts file_url puts "#{start_tiles}/#{total_tiles}: #{save_path}/#{file_name}" end end hydra.queue tile_request end end end end hydra.run time_end = Time.now secs = time_end - time_start puts "Total time: #{sprintf("%.2f", secs)} seconds (#{sprintf("%.2f", secs/60.0)} mins). #{sprintf("%.2f", total_tiles/secs)} tiles per second." end
$:.push File.expand_path("../lib", __FILE__) # Maintain your gem's version: require "oauth-rest-security/version" # Describe your gem and declare its dependencies: Gem::Specification.new do |s| s.name = "oauth-rest-security" s.version = OauthRestSecurity::VERSION s.authors = ["Maximiliano Dello Russo"] s.email = ["maxidr@gmail.com"] #s.homepage = "TODO" s.summary = "TODO: Summary of OauthRestSecurity." #s.description = "TODO: Description of OauthRestSecurity." s.files = Dir["{app,config,db,lib}/**/*"] + ["MIT-LICENSE", "Rakefile", "README.rdoc"] s.add_dependency "rails", "~> 3.1.3" s.add_dependency 'rest-client' s.add_dependency 'oauth' s.add_development_dependency "sqlite3" s.add_development_dependency 'rspec' s.add_development_dependency 'rspec-rails' s.add_development_dependency 'yard' s.add_development_dependency 'rest-client' end Fixed gemspec $:.push File.expand_path("../lib", __FILE__) # Maintain your gem's version: require "oauth-rest-security/version" # Describe your gem and declare its dependencies: Gem::Specification.new do |s| s.name = "oauth-rest-security" s.version = OauthRestSecurity::VERSION s.authors = ["Maximiliano Dello Russo"] s.email = ["maxidr@gmail.com"] #s.homepage = "TODO" s.summary = "Summary of OauthRestSecurity." #s.description = "TODO: Description of OauthRestSecurity." s.files = Dir["{app,config,db,lib}/**/*"] + ["MIT-LICENSE", "Rakefile", "README.rdoc"] s.add_dependency "rails", "~> 3.1.3" s.add_dependency 'rest-client' s.add_dependency 'oauth' s.add_development_dependency "sqlite3" s.add_development_dependency 'rspec' s.add_development_dependency 'rspec-rails' s.add_development_dependency 'yard' s.add_development_dependency 'rest-client' end
lib = File.expand_path('lib', __dir__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'dragonfly_pdf/version' Gem::Specification.new do |spec| spec.name = 'dragonfly_pdf' spec.version = DragonflyPdf::VERSION spec.authors = ['Tomas Celizna'] spec.email = ['tomas.celizna@gmail.com'] spec.summary = 'Dragonfly PDF analysers and processors.' spec.description = 'Dragonfly PDF analysers and processors.' spec.homepage = 'https://github.com/tomasc/dragonfly_pdf' spec.license = 'MIT' spec.files = `git ls-files -z`.split("\x0") spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } spec.test_files = spec.files.grep(%r{^(test|spec|features)/}) spec.require_paths = ['lib'] spec.add_dependency 'dragonfly', '~> 1.0' spec.add_dependency 'dragonfly_libvips', '~> 2.4.0' spec.add_development_dependency 'bundler', '~> 1.12' spec.add_development_dependency 'guard' spec.add_development_dependency 'guard-minitest' spec.add_development_dependency 'minitest', '~> 5.0' spec.add_development_dependency 'minitest-reporters' spec.add_development_dependency 'rake', '~> 10.0' spec.add_development_dependency 'pdf-reader' end Cleanup gemspec lib = File.expand_path('lib', __dir__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'dragonfly_pdf/version' Gem::Specification.new do |spec| spec.name = 'dragonfly_pdf' spec.version = DragonflyPdf::VERSION spec.authors = ['Tomas Celizna'] spec.email = ['tomas.celizna@gmail.com'] spec.summary = 'Dragonfly PDF analysers and processors.' spec.description = 'Dragonfly PDF analysers and processors.' spec.homepage = 'https://github.com/tomasc/dragonfly_pdf' spec.license = 'MIT' spec.files = Dir["{lib}/**/*", "CHANGELOG.md", "Rakefile", "README.md"] spec.require_paths = ['lib'] spec.add_dependency 'dragonfly', '~> 1.0' spec.add_dependency 'dragonfly_libvips', '~> 2.4.0' spec.add_development_dependency 'bundler', '~> 1.12' spec.add_development_dependency 'guard' spec.add_development_dependency 'guard-minitest' spec.add_development_dependency 'minitest', '~> 5.0' spec.add_development_dependency 'minitest-reporters' spec.add_development_dependency 'rake', '~> 10.0' spec.add_development_dependency 'pdf-reader' end
# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'string_to_ipa/version' Gem::Specification.new do |spec| spec.name = "string_to_ipa" spec.version = StringToIpa::VERSION spec.authors = ["Hilary Stohs-Krause"] spec.email = ["hilarysk@gmail.com"] spec.summary = %q{Write a short summary, because Rubygems requires one.} #fill these out spec.description = %q{ Write a longer description or delete this line.} spec.homepage = "http://example.com" spec.license = "MIT" spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } spec.bindir = "exe" spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } spec.require_paths = ["lib"] if spec.respond_to?(:metadata) spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com' to prevent pushes to rubygems.org, or delete to allow pushes to any server." end spec.add_dependency "sqlite3" spec.add_development_dependency "bundler", "~> 1.9" spec.add_development_dependency "rake", "~> 10.0" end adds info to gemspec # coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'string_to_ipa/version' Gem::Specification.new do |spec| spec.name = "string_to_ipa" spec.version = StringToIpa::VERSION spec.authors = ["Hilary Stohs-Krause"] spec.email = ["hilarysk@gmail.com"] spec.summary = %q{A minimalist gem that converts a string to the International Phonetic Alphabet.} spec.description = %q{This gem relies on a modified version of the open-source Carnegie Mellon University Pronouncing Dictionary (converting words to IPA instead of Arpabet).} spec.homepage = "http://example.com" spec.license = "MIT" spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } spec.bindir = "exe" spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } spec.require_paths = ["lib"] if spec.respond_to?(:metadata) spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com' to prevent pushes to rubygems.org, or delete to allow pushes to any server." end spec.add_dependency "sqlite3" spec.add_development_dependency "bundler", "~> 1.9" spec.add_development_dependency "rake", "~> 10.0" end
# # Cookbook Name:: bcpc # Recipe:: powerdns # # Copyright 2013, Bloomberg Finance L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # include_recipe "bcpc::nova-head" make_config('mysql-pdns-user', "pdns") make_config('mysql-pdns-password', secure_password) %w{pdns-server pdns-backend-mysql}.each do |pkg| package pkg do action :upgrade end end template "/etc/powerdns/pdns.conf" do source "pdns.conf.erb" owner "root" group "root" mode 00600 notifies :restart, "service[pdns]", :delayed end ruby_block "powerdns-database-creation" do block do system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = \"#{node[:bcpc][:pdns_dbname]}\"' | grep -q \"#{node[:bcpc][:pdns_dbname]}\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} -e "CREATE DATABASE #{node[:bcpc][:pdns_dbname]} CHARACTER SET utf8 COLLATE utf8_general_ci;" mysql -uroot -p#{get_config('mysql-root-password')} -e "GRANT ALL ON #{node[:bcpc][:pdns_dbname]}.* TO '#{get_config('mysql-pdns-user')}'@'%' IDENTIFIED BY '#{get_config('mysql-pdns-password')}';" mysql -uroot -p#{get_config('mysql-root-password')} -e "GRANT ALL ON #{node[:bcpc][:pdns_dbname]}.* TO '#{get_config('mysql-pdns-user')}'@'localhost' IDENTIFIED BY '#{get_config('mysql-pdns-password')}';" mysql -uroot -p#{get_config('mysql-root-password')} -e "GRANT ALL ON #{node[:bcpc][:nova_dbname]}.* TO '#{get_config('mysql-pdns-user')}'@'%' IDENTIFIED BY '#{get_config('mysql-pdns-password')}';" mysql -uroot -p#{get_config('mysql-root-password')} -e "GRANT ALL ON #{node[:bcpc][:nova_dbname]}.* TO '#{get_config('mysql-pdns-user')}'@'localhost' IDENTIFIED BY '#{get_config('mysql-pdns-password')}';" mysql -uroot -p#{get_config('mysql-root-password')} -e "FLUSH PRIVILEGES;" ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end ruby_block "powerdns-table-domains" do block do reverse_dns_zone = node['bcpc']['floating']['reverse_dns_zone'] || calc_reverse_dns_zone(node['bcpc']['floating']['cidr']) system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = \"#{node[:bcpc][:pdns_dbname]}\" AND TABLE_NAME=\"domains_static\"' | grep -q \"domains_static\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH CREATE TABLE IF NOT EXISTS domains_static ( id INT auto_increment, name VARCHAR(255) NOT NULL, master VARCHAR(128) DEFAULT NULL, last_check INT DEFAULT NULL, type VARCHAR(6) NOT NULL, notified_serial INT DEFAULT NULL, account VARCHAR(40) DEFAULT NULL, primary key (id) ); INSERT INTO domains_static (name, type) values ('#{node[:bcpc][:domain_name]}', 'NATIVE'); INSERT INTO domains_static (name, type) values ('#{reverse_dns_zone}', 'NATIVE'); CREATE UNIQUE INDEX dom_name_index ON domains_static(name); ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end ruby_block "powerdns-table-records" do block do reverse_dns_zone = node['bcpc']['floating']['reverse_dns_zone'] || calc_reverse_dns_zone(node['bcpc']['floating']['cidr']) system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = \"#{node[:bcpc][:pdns_dbname]}\" AND TABLE_NAME=\"records_static\"' | grep -q \"records_static\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH CREATE TABLE IF NOT EXISTS records_static ( id INT auto_increment, domain_id INT DEFAULT NULL, name VARCHAR(255) DEFAULT NULL, type VARCHAR(6) DEFAULT NULL, content VARCHAR(255) DEFAULT NULL, ttl INT DEFAULT NULL, prio INT DEFAULT NULL, change_date INT DEFAULT NULL, primary key(id) ); INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains_static WHERE name='#{node[:bcpc][:domain_name]}'),'#{node[:bcpc][:domain_name]}','localhost root@#{node[:bcpc][:domain_name]} 1','SOA',300,NULL); INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains_static WHERE name='#{node[:bcpc][:domain_name]}'),'#{node[:bcpc][:domain_name]}','#{node[:bcpc][:management][:vip]}','NS',300,NULL); INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains_static WHERE name='#{node[:bcpc][:domain_name]}'),'#{node[:bcpc][:domain_name]}','#{node[:bcpc][:management][:vip]}','A',300,NULL); INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains_static WHERE name='#{reverse_dns_zone}'),'#{reverse_dns_zone}', '#{reverse_dns_zone} root@#{node[:bcpc][:domain_name]} 1','SOA',300,NULL); INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains_static WHERE name='#{reverse_dns_zone}'),'#{reverse_dns_zone}','#{node[:bcpc][:management][:vip]}','NS',300,NULL); CREATE INDEX rec_name_index ON records_static(name); CREATE INDEX nametype_index ON records_static(name,type); CREATE INDEX domain_id ON records_static(domain_id); ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end ruby_block "powerdns-function-dns-name" do block do system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT name FROM mysql.proc WHERE name = \"dns_name\" AND db = \"#{node[:bcpc][:pdns_dbname]}\";' \"#{node[:bcpc][:pdns_dbname]}\" | grep -q \"dns_name\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH delimiter // CREATE FUNCTION dns_name (tenant VARCHAR(64) CHARACTER SET latin1) RETURNS VARCHAR(64) COMMENT 'Returns the project name in a DNS acceptable format. Roughly RFC 1035.' DETERMINISTIC BEGIN SELECT LOWER(tenant) INTO tenant; SELECT REPLACE(tenant, '&', 'and') INTO tenant; SELECT REPLACE(tenant, '_', '-') INTO tenant; SELECT REPLACE(tenant, ' ', '-') INTO tenant; RETURN tenant; END// ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end ruby_block "powerdns-function-ip4_to_ptr_name" do block do system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT name FROM mysql.proc WHERE name = \"ip4_to_ptr_name\" AND db = \"#{node[:bcpc][:pdns_dbname]}\";' \"#{node[:bcpc][:pdns_dbname]}\" | grep -q \"ip4_to_ptr_name\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH delimiter // CREATE FUNCTION ip4_to_ptr_name(ip4 VARCHAR(64) CHARACTER SET latin1) RETURNS VARCHAR(64) COMMENT 'Returns the reversed IP with .in-addr.arpa appended, suitable for use in the name column of PTR records.' DETERMINISTIC BEGIN return concat_ws( '.', SUBSTRING_INDEX( SUBSTRING_INDEX(ip4, '.', 4), '.', -1), SUBSTRING_INDEX( SUBSTRING_INDEX(ip4, '.', 3), '.', -1), SUBSTRING_INDEX( SUBSTRING_INDEX(ip4, '.', 2), '.', -1), SUBSTRING_INDEX( SUBSTRING_INDEX(ip4, '.', 1), '.', -1), 'in-addr.arpa'); END// ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end ruby_block "powerdns-table-domains-view" do block do system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_SCHEMA = \"#{node[:bcpc][:pdns_dbname]}\" AND TABLE_NAME=\"domains\"' | grep -q \"domains\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH CREATE OR REPLACE VIEW domains AS SELECT id,name,master,last_check,type,notified_serial,account FROM domains_static UNION SELECT # rank each project to create an ID and add the maximum ID from the static table (SELECT COUNT(*) FROM keystone.project WHERE y.id <= id) + (SELECT MAX(id) FROM domains_static) AS id, CONCAT(CONCAT(dns_name(y.name), '.'),'#{node[:bcpc][:domain_name]}') AS name, NULL AS master, NULL AS last_check, 'NATIVE' AS type, NULL AS notified_serial, NULL AS account FROM keystone.project y; ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end ruby_block "powerdns-table-records_forward-view" do block do system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_SCHEMA = \"#{node[:bcpc][:pdns_dbname]}\" AND TABLE_NAME=\"records_forward\"' | grep -q \"records_forward\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH CREATE OR REPLACE VIEW records_forward AS SELECT id,domain_id,name,type,content,ttl,prio,change_date FROM records_static UNION # assume we only have 500 or less static records SELECT domains.id+500 AS id, domains.id AS domain_id, domains.name AS name, 'NS' AS type, '#{node[:bcpc][:management][:vip]}' AS content, 300 AS ttl, NULL AS prio, NULL AS change_date FROM domains WHERE id > (SELECT MAX(id) FROM domains_static) UNION # assume we only have 250 or less static domains SELECT domains.id+750 AS id, domains.id AS domain_id, domains.name AS name, 'SOA' AS type, 'localhost root@#{node[:bcpc][:domain_name]} 1' AS content, 300 AS ttl, NULL AS prio, NULL AS change_date FROM domains WHERE id > (SELECT MAX(id) FROM domains_static) UNION # again, assume we only have 250 or less static domains SELECT nova.instances.id+10000 AS id, # query the domain ID from the domains view (SELECT id FROM domains WHERE name=CONCAT(CONCAT((SELECT dns_name(name) FROM keystone.project WHERE id = nova.instances.project_id), '.'),'#{node[:bcpc][:domain_name]}')) AS domain_id, # create the FQDN of the record CONCAT(nova.instances.hostname, CONCAT('.', CONCAT((SELECT dns_name(name) FROM keystone.project WHERE id = nova.instances.project_id), CONCAT('.','#{node[:bcpc][:domain_name]}')))) AS name, 'A' AS type, nova.floating_ips.address AS content, 300 AS ttl, NULL AS type, NULL AS change_date FROM nova.instances, nova.fixed_ips, nova.floating_ips WHERE nova.instances.uuid = nova.fixed_ips.instance_uuid AND nova.floating_ips.fixed_ip_id = nova.fixed_ips.id; ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end ruby_block "powerdns-table-records_reverse-view" do block do reverse_dns_zone = node['bcpc']['floating']['reverse_dns_zone'] || calc_reverse_dns_zone(node['bcpc']['floating']['cidr']) system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_SCHEMA = \"#{node[:bcpc][:pdns_dbname]}\" AND TABLE_NAME=\"records_reverse\"' | grep -q \"records_reverse\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH create or replace view records_reverse as select r.id * -1 as id, d.id as domain_id, ip4_to_ptr_name(r.content) as name, 'PTR' as type, r.name as content, r.ttl, r.prio, r.change_date from records_forward r, domains d where r.type='A' and d.name = '#{reverse_dns_zone}' and ip4_to_ptr_name(r.content) like '%.#{reverse_dns_zone}'; ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end ruby_block "powerdns-table-records-view" do block do system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_SCHEMA = \"#{node[:bcpc][:pdns_dbname]}\" AND TABLE_NAME=\"records\"' | grep -q \"records\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH create or replace view records as select id, domain_id, name, type, content, ttl, prio, change_date from records_forward union all select id, domain_id, name, type, content, ttl, prio, change_date from records_reverse; ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end get_all_nodes.each do |server| ruby_block "create-dns-entry-#{server['hostname']}" do block do system "mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} -e 'SELECT name FROM records_static' | grep -q \"#{server['hostname']}.#{node[:bcpc][:domain_name]}\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains WHERE name='#{node[:bcpc][:domain_name]}'),'#{server['hostname']}.#{node[:bcpc][:domain_name]}','#{server['bcpc']['management']['ip']}','A',300,NULL); ] end end end end %w{openstack kibana graphite zabbix}.each do |static| ruby_block "create-management-dns-entry-#{static}" do block do system "mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} -e 'SELECT name FROM records_static' | grep -q \"#{static}.#{node[:bcpc][:domain_name]}\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains WHERE name='#{node[:bcpc][:domain_name]}'),'#{static}.#{node[:bcpc][:domain_name]}','#{node[:bcpc][:management][:vip]}','A',300,NULL); ] end end end end %w{s3}.each do |static| ruby_block "create-floating-dns-entry-#{static}" do block do system "mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} -e 'SELECT name FROM records_static' | grep -q \"#{static}.#{node[:bcpc][:domain_name]}\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains WHERE name='#{node[:bcpc][:domain_name]}'),'#{static}.#{node[:bcpc][:domain_name]}','#{node[:bcpc][:floating][:vip]}','A',300,NULL); ] end end end end template "/etc/powerdns/pdns.d/pdns.local.gmysql" do source "pdns.local.gmysql.erb" owner "pdns" group "root" mode 00640 notifies :restart, "service[pdns]", :immediately end service "pdns" do action [ :enable, :start ] end PowerDNS with optional OpenStack support -- needs to have a node with OpenStack recipe included on first run # # Cookbook Name:: bcpc # Recipe:: powerdns # # Copyright 2013, Bloomberg Finance L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # make_config('mysql-pdns-user', "pdns") make_config('mysql-pdns-password', secure_password) %w{pdns-server pdns-backend-mysql}.each do |pkg| package pkg do action :upgrade end end template "/etc/powerdns/pdns.conf" do source "pdns.conf.erb" owner "root" group "root" mode 00600 notifies :restart, "service[pdns]", :delayed end ruby_block "powerdns-database-creation" do block do system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = \"#{node[:bcpc][:pdns_dbname]}\"' | grep -q \"#{node[:bcpc][:pdns_dbname]}\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} -e "CREATE DATABASE #{node[:bcpc][:pdns_dbname]} CHARACTER SET utf8 COLLATE utf8_general_ci;" mysql -uroot -p#{get_config('mysql-root-password')} -e "GRANT ALL ON #{node[:bcpc][:pdns_dbname]}.* TO '#{get_config('mysql-pdns-user')}'@'%' IDENTIFIED BY '#{get_config('mysql-pdns-password')}';" mysql -uroot -p#{get_config('mysql-root-password')} -e "GRANT ALL ON #{node[:bcpc][:pdns_dbname]}.* TO '#{get_config('mysql-pdns-user')}'@'localhost' IDENTIFIED BY '#{get_config('mysql-pdns-password')}';" mysql -uroot -p#{get_config('mysql-root-password')} -e "FLUSH PRIVILEGES;" ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end ruby_block "powerdns-database-creation-nova-grant" do block do system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SHOW GRANTS for \"#{get_config('mysql-pdns-user')}\";' #{node[:bcpc][:nova_dbname]}'" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} -e "GRANT ALL ON #{node[:bcpc][:nova_dbname]}.* TO '#{get_config('mysql-pdns-user')}'@'%' IDENTIFIED BY '#{get_config('mysql-pdns-password')}';" mysql -uroot -p#{get_config('mysql-root-password')} -e "GRANT ALL ON #{node[:bcpc][:nova_dbname]}.* TO '#{get_config('mysql-pdns-user')}'@'localhost' IDENTIFIED BY '#{get_config('mysql-pdns-password')}';" mysql -uroot -p#{get_config('mysql-root-password')} -e "FLUSH PRIVILEGES;" ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end only_if { get_nodes_for("openstack").length >= 1 } end ruby_block "powerdns-function-dns-name" do block do system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT name FROM mysql.proc WHERE name = \"dns_name\" AND db = \"#{node[:bcpc][:pdns_dbname]}\";' \"#{node[:bcpc][:pdns_dbname]}\" | grep -q \"dns_name\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH delimiter // CREATE FUNCTION dns_name (tenant VARCHAR(64) CHARACTER SET latin1) RETURNS VARCHAR(64) COMMENT 'Returns the project name in a DNS acceptable format. Roughly RFC 1035.' DETERMINISTIC BEGIN SELECT LOWER(tenant) INTO tenant; SELECT REPLACE(tenant, '&', 'and') INTO tenant; SELECT REPLACE(tenant, '_', '-') INTO tenant; SELECT REPLACE(tenant, ' ', '-') INTO tenant; RETURN tenant; END// ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end only_if { get_nodes_for("openstack").length >= 1 } end ruby_block "powerdns-table-domains" do block do reverse_dns_zone = node['bcpc']['floating']['reverse_dns_zone'] || calc_reverse_dns_zone(node['bcpc']['floating']['cidr']) system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = \"#{node[:bcpc][:pdns_dbname]}\" AND TABLE_NAME=\"domains_static\"' | grep -q \"domains_static\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH CREATE TABLE IF NOT EXISTS domains_static ( id INT auto_increment, name VARCHAR(255) NOT NULL, master VARCHAR(128) DEFAULT NULL, last_check INT DEFAULT NULL, type VARCHAR(6) NOT NULL, notified_serial INT DEFAULT NULL, account VARCHAR(40) DEFAULT NULL, primary key (id) ); INSERT INTO domains_static (name, type) values ('#{node[:bcpc][:domain_name]}', 'NATIVE'); INSERT INTO domains_static (name, type) values ('#{reverse_dns_zone}', 'NATIVE'); CREATE UNIQUE INDEX dom_name_index ON domains_static(name); ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end ruby_block "powerdns-table-records" do block do reverse_dns_zone = node['bcpc']['floating']['reverse_dns_zone'] || calc_reverse_dns_zone(node['bcpc']['floating']['cidr']) system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = \"#{node[:bcpc][:pdns_dbname]}\" AND TABLE_NAME=\"records_static\"' | grep -q \"records_static\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH CREATE TABLE IF NOT EXISTS records_static ( id INT auto_increment, domain_id INT DEFAULT NULL, name VARCHAR(255) DEFAULT NULL, type VARCHAR(6) DEFAULT NULL, content VARCHAR(255) DEFAULT NULL, ttl INT DEFAULT NULL, prio INT DEFAULT NULL, change_date INT DEFAULT NULL, primary key(id) ); INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains_static WHERE name='#{node[:bcpc][:domain_name]}'),'#{node[:bcpc][:domain_name]}','localhost root@#{node[:bcpc][:domain_name]} 1','SOA',300,NULL); INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains_static WHERE name='#{node[:bcpc][:domain_name]}'),'#{node[:bcpc][:domain_name]}','#{node[:bcpc][:management][:vip]}','NS',300,NULL); INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains_static WHERE name='#{node[:bcpc][:domain_name]}'),'#{node[:bcpc][:domain_name]}','#{node[:bcpc][:management][:vip]}','A',300,NULL); INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains_static WHERE name='#{reverse_dns_zone}'),'#{reverse_dns_zone}', '#{reverse_dns_zone} root@#{node[:bcpc][:domain_name]} 1','SOA',300,NULL); INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains_static WHERE name='#{reverse_dns_zone}'),'#{reverse_dns_zone}','#{node[:bcpc][:management][:vip]}','NS',300,NULL); CREATE INDEX rec_name_index ON records_static(name); CREATE INDEX nametype_index ON records_static(name,type); CREATE INDEX domain_id ON records_static(domain_id); ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end ruby_block "powerdns-function-ip4_to_ptr_name" do block do system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT name FROM mysql.proc WHERE name = \"ip4_to_ptr_name\" AND db = \"#{node[:bcpc][:pdns_dbname]}\";' \"#{node[:bcpc][:pdns_dbname]}\" | grep -q \"ip4_to_ptr_name\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH delimiter // CREATE FUNCTION ip4_to_ptr_name(ip4 VARCHAR(64) CHARACTER SET latin1) RETURNS VARCHAR(64) COMMENT 'Returns the reversed IP with .in-addr.arpa appended, suitable for use in the name column of PTR records.' DETERMINISTIC BEGIN return concat_ws( '.', SUBSTRING_INDEX( SUBSTRING_INDEX(ip4, '.', 4), '.', -1), SUBSTRING_INDEX( SUBSTRING_INDEX(ip4, '.', 3), '.', -1), SUBSTRING_INDEX( SUBSTRING_INDEX(ip4, '.', 2), '.', -1), SUBSTRING_INDEX( SUBSTRING_INDEX(ip4, '.', 1), '.', -1), 'in-addr.arpa'); END// ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end openstack_domain_view=<<-OS_DOMAIN_VIEW UNION SELECT # rank each project to create an ID and add the maximum ID from the static table (SELECT COUNT(*) FROM keystone.project WHERE y.id <= id) + (SELECT MAX(id) FROM domains_static) AS id, CONCAT(CONCAT(dns_name(y.name), '.'),'#{node[:bcpc][:domain_name]}') AS name, NULL AS master, NULL AS last_check, 'NATIVE' AS type, NULL AS notified_serial, NULL AS account FROM keystone.project y OS_DOMAIN_VIEW ruby_block "powerdns-table-domains-view" do block do if get_nodes_for("openstack").length < 1 then openstack_domain_view="" end system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_SCHEMA = \"#{node[:bcpc][:pdns_dbname]}\" AND TABLE_NAME=\"domains\"' | grep -q \"domains\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH CREATE OR REPLACE VIEW domains AS SELECT id,name,master,last_check,type,notified_serial,account FROM domains_static #{openstack_domain_view}; ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end openstack_records_view=<<-OS_RECORDS_VIEW UNION # assume we only have 500 or less static records SELECT domains.id+500 AS id, domains.id AS domain_id, domains.name AS name, 'NS' AS type, '#{node[:bcpc][:management][:vip]}' AS content, 300 AS ttl, NULL AS prio, NULL AS change_date FROM domains WHERE id > (SELECT MAX(id) FROM domains_static) UNION # assume we only have 250 or less static domains SELECT domains.id+750 AS id, domains.id AS domain_id, domains.name AS name, 'SOA' AS type, 'localhost root@#{node[:bcpc][:domain_name]} 1' AS content, 300 AS ttl, NULL AS prio, NULL AS change_date FROM domains WHERE id > (SELECT MAX(id) FROM domains_static) UNION # again, assume we only have 250 or less static domains SELECT nova.instances.id+10000 AS id, # query the domain ID from the domains view (SELECT id FROM domains WHERE name=CONCAT(CONCAT((SELECT dns_name(name) FROM keystone.project WHERE id = nova.instances.project_id), '.'),'#{node[:bcpc][:domain_name]}')) AS domain_id, # create the FQDN of the record CONCAT(nova.instances.hostname, CONCAT('.', CONCAT((SELECT dns_name(name) FROM keystone.project WHERE id = nova.instances.project_id), CONCAT('.','#{node[:bcpc][:domain_name]}')))) AS name, 'A' AS type, nova.floating_ips.address AS content, 300 AS ttl, NULL AS type, NULL AS change_date FROM nova.instances, nova.fixed_ips, nova.floating_ips WHERE nova.instances.uuid = nova.fixed_ips.instance_uuid AND nova.floating_ips.fixed_ip_id = nova.fixed_ips.id OS_RECORDS_VIEW ruby_block "powerdns-table-records_forward-view" do block do if get_nodes_for("openstack").length < 1 then openstack_records_view="" end system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_SCHEMA = \"#{node[:bcpc][:pdns_dbname]}\" AND TABLE_NAME=\"records_forward\"' | grep -q \"records_forward\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH CREATE OR REPLACE VIEW records_forward AS SELECT id,domain_id,name,type,content,ttl,prio,change_date FROM records_static #{openstack_records_view}; ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end ruby_block "powerdns-table-records_reverse-view" do block do reverse_dns_zone = node['bcpc']['floating']['reverse_dns_zone'] || calc_reverse_dns_zone(node['bcpc']['floating']['cidr']) system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_SCHEMA = \"#{node[:bcpc][:pdns_dbname]}\" AND TABLE_NAME=\"records_reverse\"' | grep -q \"records_reverse\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH create or replace view records_reverse as select r.id * -1 as id, d.id as domain_id, ip4_to_ptr_name(r.content) as name, 'PTR' as type, r.name as content, r.ttl, r.prio, r.change_date from records_forward r, domains d where r.type='A' and d.name = '#{reverse_dns_zone}' and ip4_to_ptr_name(r.content) like '%.#{reverse_dns_zone}'; ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end ruby_block "powerdns-table-records-view" do block do system "mysql -uroot -p#{get_config('mysql-root-password')} -e 'SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_SCHEMA = \"#{node[:bcpc][:pdns_dbname]}\" AND TABLE_NAME=\"records\"' | grep -q \"records\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH create or replace view records as select id, domain_id, name, type, content, ttl, prio, change_date from records_forward union all select id, domain_id, name, type, content, ttl, prio, change_date from records_reverse; ] self.notifies :restart, "service[pdns]", :delayed self.resolve_notification_references end end end get_all_nodes.each do |server| ruby_block "create-dns-entry-#{server['hostname']}" do block do # check if we have a float address float_host_A_record="" if server['bcpc']['management']['ip'] != server['bcpc']['floating']['ip'] then float_host_A_record="INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains WHERE name='#{node[:bcpc][:domain_name]}'),'#{float_host(server['hostname'])}.#{node[:bcpc][:domain_name]}','#{server['bcpc']['floating']['ip']}','A',300,NULL);" end # check if we have a storage address storage_host_A_record="" if server['bcpc']['management']['ip'] != server['bcpc']['storage']['ip'] then storage_host_A_record="INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains WHERE name='#{node[:bcpc][:domain_name]}'),'#{storage_host(server['hostname'])}.#{node[:bcpc][:domain_name]}','#{server['bcpc']['storage']['ip']}','A',300,NULL);" end system "mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} -e 'SELECT name FROM records_static' | grep -q \"#{server['hostname']}.#{node[:bcpc][:domain_name]}\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains WHERE name='#{node[:bcpc][:domain_name]}'),'#{server['hostname']}.#{node[:bcpc][:domain_name]}','#{server['bcpc']['management']['ip']}','A',300,NULL); #{storage_host_A_record} #{float_host_A_record} ] end end end end %w{openstack kibana graphite zabbix}.each do |static| ruby_block "create-management-dns-entry-#{static}" do block do if get_nodes_for(static).length >= 1 then system "mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} -e 'SELECT name FROM records_static' | grep -q \"#{static}.#{node[:bcpc][:domain_name]}\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains WHERE name='#{node[:bcpc][:domain_name]}'),'#{static}.#{node[:bcpc][:domain_name]}','#{node[:bcpc][:management][:vip]}','A',300,NULL); ] end end end end end %w{s3}.each do |static| ruby_block "create-floating-dns-entry-#{static}" do block do if get_nodes_for("ceph-rgw").length >= 1 then system "mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} -e 'SELECT name FROM records_static' | grep -q \"#{static}.#{node[:bcpc][:domain_name]}\"" if not $?.success? then %x[ mysql -uroot -p#{get_config('mysql-root-password')} #{node[:bcpc][:pdns_dbname]} <<-EOH INSERT INTO records_static (domain_id, name, content, type, ttl, prio) VALUES ((SELECT id FROM domains WHERE name='#{node[:bcpc][:domain_name]}'),'#{static}.#{node[:bcpc][:domain_name]}','#{node[:bcpc][:floating][:vip]}','A',300,NULL); ] end end end end end template "/etc/powerdns/pdns.d/pdns.local.gmysql" do source "pdns.local.gmysql.erb" owner "pdns" group "root" mode 00640 notifies :restart, "service[pdns]", :immediately end service "pdns" do action [ :enable, :start ] end
brew 'awscli' brew 'circleci' brew 'colordiff' brew 'coreutils' brew 'direnv' brew 'exa' brew 'fd' brew 'fish' brew 'fzf' brew 'gh' brew 'ghq' brew 'git' brew 'git-secrets' brew 'github-nippou' brew 'glow' brew 'gnupg' brew 'go' brew 'helm' brew 'heroku' brew 'hub' brew 'ipcalc' brew 'jq' brew 'krew' brew 'kubectl' { directory_name 'kubernetes-cli' } brew 'lua-language-server' brew 'mas' brew 'minikube' brew 'neovim' { head true } brew 'node' brew 'packer' brew 'pinentry-mac' brew 'pstree' brew 'python' { use_cellar_option true } brew 'ripgrep' brew 'ruby' brew 'rustup-init' brew 's3-edit' brew 'stern' brew 'terminal-notifier' brew 'terraform' brew 'terraform-ls' brew 'tflint' brew 'tig' brew 'tmux' brew 'tree' brew 'watch' brew 'whalebrew' brew 'yamlls' brew 'yarn' Install rust-analyzer brew 'awscli' brew 'circleci' brew 'colordiff' brew 'coreutils' brew 'direnv' brew 'exa' brew 'fd' brew 'fish' brew 'fzf' brew 'gh' brew 'ghq' brew 'git' brew 'git-secrets' brew 'github-nippou' brew 'glow' brew 'gnupg' brew 'go' brew 'helm' brew 'heroku' brew 'hub' brew 'ipcalc' brew 'jq' brew 'krew' brew 'kubectl' { directory_name 'kubernetes-cli' } brew 'lua-language-server' brew 'mas' brew 'minikube' brew 'neovim' { head true } brew 'node' brew 'packer' brew 'pinentry-mac' brew 'pstree' brew 'python' { use_cellar_option true } brew 'ripgrep' brew 'ruby' brew 'rust-analyzer' brew 'rustup-init' brew 's3-edit' brew 'stern' brew 'terminal-notifier' brew 'terraform' brew 'terraform-ls' brew 'tflint' brew 'tig' brew 'tmux' brew 'tree' brew 'watch' brew 'whalebrew' brew 'yamlls' brew 'yarn'
# # Cookbook Name:: munin # Recipe:: default # # Copyright 2010, OpenStreetMap Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # include_recipe "networking" package "munin-node" service "munin-node" do if node[:lsb][:release].to_f >= 15.10 provider Chef::Provider::Service::Systemd elsif node[:lsb][:release].to_f >= 14.04 provider Chef::Provider::Service::Upstart end action [:enable, :start] supports :status => true, :restart => true, :reload => true end servers = search(:node, "recipes:munin\\:\\:server") # ~FC010 servers.each do |server| server.interfaces(:role => :external) do |interface| firewall_rule "accept-munin-#{server}" do action :accept family interface[:family] source "#{interface[:zone]}:#{interface[:address]}" dest "fw" proto "tcp:syn" dest_ports "munin" source_ports "1024:" end end end template "/etc/munin/munin-node.conf" do source "munin-node.conf.erb" owner "root" group "root" mode 0o644 variables :servers => servers notifies :restart, "service[munin-node]" end remote_directory "/usr/local/share/munin/plugins" do source "plugins" owner "root" group "root" mode 0o755 files_owner "root" files_group "root" files_mode 0o755 purge true end remote_directory "/etc/munin/plugin-conf.d" do source "plugin-conf.d" owner "root" group "munin" mode 0o750 files_owner "root" files_group "root" files_mode 0o644 purge false notifies :restart, "service[munin-node]" end if Dir.glob("/proc/acpi/thermal_zone/*/temperature").empty? munin_plugin "acpi" do action :delete end else munin_plugin "acpi" end # apcpdu_ munin_plugin "cpu" if File.exist?("/sys/devices/system/cpu/cpu0/cpufreq/stats/time_in_state") munin_plugin "cpuspeed" else munin_plugin "cpuspeed" do action :delete end end munin_plugin_conf "df" do template "df.erb" end munin_plugin "df" munin_plugin "df_inode" munin_plugin "diskstats" munin_plugin "entropy" munin_plugin "forks" if node[:kernel][:modules].include?("nf_conntrack") package "conntrack" munin_plugin "fw_conntrack" munin_plugin "fw_forwarded_local" else munin_plugin "fw_conntrack" do action :delete end munin_plugin "fw_forwarded_local" do action :delete end end if File.read("/proc/sys/net/ipv4/ip_forward").chomp == "1" munin_plugin "fw_packets" else munin_plugin "fw_packets" do action :delete end end if File.exist?("/sbin/hpasmcli") munin_plugin "hpasmcli2_temp" do target "hpasmcli2_" end munin_plugin "hpasmcli2_fans" do target "hpasmcli2_" end else munin_plugin "hpasmcli2_temp" do action :delete end munin_plugin "hpasmcli2_fans" do action :delete end end munin_plugin "hpasmcli_temp" do # ~FC005 action :delete end munin_plugin "hpasmcli_fans" do action :delete end munin_plugin "http_loadtime" do action :delete end node[:network][:interfaces].each do |ifname, ifattr| if ifattr[:encapsulation] == "Ethernet" && ifattr[:state] == "up" munin_plugin "if_err_#{ifname}" do target "if_err_" end munin_plugin "if_#{ifname}" do target "if_" end else munin_plugin "if_err_#{ifname}" do action :delete end munin_plugin "if_#{ifname}" do action :delete end end end munin_plugin "interrupts" munin_plugin "iostat" munin_plugin "iostat_ios" if Dir.glob("/dev/ipmi*").empty? munin_plugin_conf "ipmi" do action :delete end munin_plugin "ipmi_fans" do action :delete end munin_plugin "ipmi_temp" do action :delete end munin_plugin "ipmi_power" do action :delete only_if { node[:lsb][:release].to_f >= 14.04 } end else munin_plugin_conf "ipmi" do template "ipmi.erb" end munin_plugin "ipmi_fans" do target "ipmi_" end munin_plugin "ipmi_temp" do target "ipmi_" end munin_plugin "ipmi_power" do target "ipmi_" only_if { node[:lsb][:release].to_f >= 14.04 } end end munin_plugin "irqstats" munin_plugin "load" munin_plugin "memory" munin_plugin "netstat" if node[:kernel][:modules].include?("nfsv3") munin_plugin "nfs_client" else munin_plugin "nfs_client" do action :delete end end if node[:kernel][:modules].include?("nfsv4") munin_plugin "nfs4_client" else munin_plugin "nfs4_client" do action :delete end end if node[:kernel][:modules].include?("nfsd") munin_plugin "nfsd" munin_plugin "nfsd4" else munin_plugin "nfsd" do action :delete end munin_plugin "nfsd4" do action :delete end end munin_plugin "open_files" munin_plugin "open_inodes" munin_plugin "postfix_mailqueue" do action :delete end munin_plugin "postfix_mailvolume" do action :delete end munin_plugin "processes" munin_plugin "proc_pri" sensors_fan = false sensors_temp = false sensors_volt = false Dir.glob("/sys/class/hwmon/hwmon*").each do |hwmon| hwmon = "#{hwmon}/device" unless File.exist?("#{hwmon}/name") sensors_fan = true unless Dir.glob("#{hwmon}/fan*_input").empty? sensors_temp = true unless Dir.glob("#{hwmon}/temp*_input").empty? sensors_volt = true unless Dir.glob("#{hwmon}/in*_input").empty? end if sensors_fan || sensors_temp || sensors_volt package "lm-sensors" end if sensors_fan munin_plugin "sensors_fan" do target "sensors_" end else munin_plugin "sensors_fan" do action :delete end end if sensors_temp munin_plugin "sensors_temp" do target "sensors_" end else munin_plugin "sensors_temp" do action :delete end end if sensors_volt munin_plugin "sensors_volt" do target "sensors_" conf "sensors_volt.erb" end else munin_plugin "sensors_volt" do action :delete end end munin_plugin "swap" munin_plugin "tcp" munin_plugin "threads" munin_plugin "uptime" munin_plugin "users" munin_plugin "vmstat" Don't include the networking recipe It appears to be unnecessary. The cookbook dependency remains since there are firewall_rule providers used. Refs #81 # # Cookbook Name:: munin # Recipe:: default # # Copyright 2010, OpenStreetMap Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package "munin-node" service "munin-node" do if node[:lsb][:release].to_f >= 15.10 provider Chef::Provider::Service::Systemd elsif node[:lsb][:release].to_f >= 14.04 provider Chef::Provider::Service::Upstart end action [:enable, :start] supports :status => true, :restart => true, :reload => true end servers = search(:node, "recipes:munin\\:\\:server") # ~FC010 servers.each do |server| server.interfaces(:role => :external) do |interface| firewall_rule "accept-munin-#{server}" do action :accept family interface[:family] source "#{interface[:zone]}:#{interface[:address]}" dest "fw" proto "tcp:syn" dest_ports "munin" source_ports "1024:" end end end template "/etc/munin/munin-node.conf" do source "munin-node.conf.erb" owner "root" group "root" mode 0o644 variables :servers => servers notifies :restart, "service[munin-node]" end remote_directory "/usr/local/share/munin/plugins" do source "plugins" owner "root" group "root" mode 0o755 files_owner "root" files_group "root" files_mode 0o755 purge true end remote_directory "/etc/munin/plugin-conf.d" do source "plugin-conf.d" owner "root" group "munin" mode 0o750 files_owner "root" files_group "root" files_mode 0o644 purge false notifies :restart, "service[munin-node]" end if Dir.glob("/proc/acpi/thermal_zone/*/temperature").empty? munin_plugin "acpi" do action :delete end else munin_plugin "acpi" end # apcpdu_ munin_plugin "cpu" if File.exist?("/sys/devices/system/cpu/cpu0/cpufreq/stats/time_in_state") munin_plugin "cpuspeed" else munin_plugin "cpuspeed" do action :delete end end munin_plugin_conf "df" do template "df.erb" end munin_plugin "df" munin_plugin "df_inode" munin_plugin "diskstats" munin_plugin "entropy" munin_plugin "forks" if node[:kernel][:modules].include?("nf_conntrack") package "conntrack" munin_plugin "fw_conntrack" munin_plugin "fw_forwarded_local" else munin_plugin "fw_conntrack" do action :delete end munin_plugin "fw_forwarded_local" do action :delete end end if File.read("/proc/sys/net/ipv4/ip_forward").chomp == "1" munin_plugin "fw_packets" else munin_plugin "fw_packets" do action :delete end end if File.exist?("/sbin/hpasmcli") munin_plugin "hpasmcli2_temp" do target "hpasmcli2_" end munin_plugin "hpasmcli2_fans" do target "hpasmcli2_" end else munin_plugin "hpasmcli2_temp" do action :delete end munin_plugin "hpasmcli2_fans" do action :delete end end munin_plugin "hpasmcli_temp" do # ~FC005 action :delete end munin_plugin "hpasmcli_fans" do action :delete end munin_plugin "http_loadtime" do action :delete end node[:network][:interfaces].each do |ifname, ifattr| if ifattr[:encapsulation] == "Ethernet" && ifattr[:state] == "up" munin_plugin "if_err_#{ifname}" do target "if_err_" end munin_plugin "if_#{ifname}" do target "if_" end else munin_plugin "if_err_#{ifname}" do action :delete end munin_plugin "if_#{ifname}" do action :delete end end end munin_plugin "interrupts" munin_plugin "iostat" munin_plugin "iostat_ios" if Dir.glob("/dev/ipmi*").empty? munin_plugin_conf "ipmi" do action :delete end munin_plugin "ipmi_fans" do action :delete end munin_plugin "ipmi_temp" do action :delete end munin_plugin "ipmi_power" do action :delete only_if { node[:lsb][:release].to_f >= 14.04 } end else munin_plugin_conf "ipmi" do template "ipmi.erb" end munin_plugin "ipmi_fans" do target "ipmi_" end munin_plugin "ipmi_temp" do target "ipmi_" end munin_plugin "ipmi_power" do target "ipmi_" only_if { node[:lsb][:release].to_f >= 14.04 } end end munin_plugin "irqstats" munin_plugin "load" munin_plugin "memory" munin_plugin "netstat" if node[:kernel][:modules].include?("nfsv3") munin_plugin "nfs_client" else munin_plugin "nfs_client" do action :delete end end if node[:kernel][:modules].include?("nfsv4") munin_plugin "nfs4_client" else munin_plugin "nfs4_client" do action :delete end end if node[:kernel][:modules].include?("nfsd") munin_plugin "nfsd" munin_plugin "nfsd4" else munin_plugin "nfsd" do action :delete end munin_plugin "nfsd4" do action :delete end end munin_plugin "open_files" munin_plugin "open_inodes" munin_plugin "postfix_mailqueue" do action :delete end munin_plugin "postfix_mailvolume" do action :delete end munin_plugin "processes" munin_plugin "proc_pri" sensors_fan = false sensors_temp = false sensors_volt = false Dir.glob("/sys/class/hwmon/hwmon*").each do |hwmon| hwmon = "#{hwmon}/device" unless File.exist?("#{hwmon}/name") sensors_fan = true unless Dir.glob("#{hwmon}/fan*_input").empty? sensors_temp = true unless Dir.glob("#{hwmon}/temp*_input").empty? sensors_volt = true unless Dir.glob("#{hwmon}/in*_input").empty? end if sensors_fan || sensors_temp || sensors_volt package "lm-sensors" end if sensors_fan munin_plugin "sensors_fan" do target "sensors_" end else munin_plugin "sensors_fan" do action :delete end end if sensors_temp munin_plugin "sensors_temp" do target "sensors_" end else munin_plugin "sensors_temp" do action :delete end end if sensors_volt munin_plugin "sensors_volt" do target "sensors_" conf "sensors_volt.erb" end else munin_plugin "sensors_volt" do action :delete end end munin_plugin "swap" munin_plugin "tcp" munin_plugin "threads" munin_plugin "uptime" munin_plugin "users" munin_plugin "vmstat"
# # Cookbook Name:: nginx # Recipe:: default # Author:: AJ Christensen <aj@junglist.gen.nz> # # Copyright 2008, OpsCode, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package "nginx" directory node[:nginx_log_dir] do mode 0755 owner node[:nginx_user] action :create end directory "/etc/nginx/conf.d" do mode 0755 owner node[:nginx_user] action :create end %w{nxensite nxdissite}.each do |nxscript| template "/usr/sbin/#{nxscript}" do source "#{nxscript}.erb" mode 0755 owner "root" group "root" end end template "nginx.conf" do path "#{node[:nginx_dir]}/nginx.conf" source "nginx.conf.erb" owner "root" group "root" mode 0644 end template "/etc/init.d/nginx" do source "nginx-init.erb" mode 0755 backup false end service "nginx" do supports :status => true, :restart => true, :reload => true action [ :enable, :start ] end delete the default site # # Cookbook Name:: nginx # Recipe:: default # Author:: AJ Christensen <aj@junglist.gen.nz> # # Copyright 2008, OpsCode, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package "nginx" directory node[:nginx_log_dir] do mode 0755 owner node[:nginx_user] action :create end directory "/etc/nginx/conf.d" do mode 0755 owner node[:nginx_user] action :create end %w{nxensite nxdissite}.each do |nxscript| template "/usr/sbin/#{nxscript}" do source "#{nxscript}.erb" mode 0755 owner "root" group "root" end end template "nginx.conf" do path "#{node[:nginx_dir]}/nginx.conf" source "nginx.conf.erb" owner "root" group "root" mode 0644 end template "/etc/init.d/nginx" do source "nginx-init.erb" mode 0755 backup false end file "/etc/nginx/sites-enabled/default" do action :delete end service "nginx" do supports :status => true, :restart => true, :reload => true action [ :enable, :start ] end
# -*- mode: ruby -*- # vi: set ft=ruby : # vi: set shiftwidth=2 tabstop=2 expandtab : # # Cookbook Name:: oraxe # Recipe:: default # # Copyright 2013, YOUR_COMPANY_NAME # # All rights reserved - Do Not Redistribute # add oracle-xe rpm package, run /etc/init.d/oracle_xe configure, add sqldeveloper rpm Signed-off-by: Pattrick Hueper <4f21bae5c784a224b2a1c47b9159cc2235a4d5c1@hueper.net> # -*- mode: ruby -*- # vi: set ft=ruby : # vi: set shiftwidth=2 tabstop=2 expandtab : # # Cookbook Name:: oraxe # Recipe:: default # # Copyright 2013, YOUR_COMPANY_NAME # # All rights reserved - Do Not Redistribute # # package "oracle-rdbms-server-11gR2-preinstall" do action :install end rpm_package "oracle-xe" do source "/vagrant/oraxe_install/Disk1/oracle-xe-*.rpm" action :install end file "test" do path "/tmp/patty_test" content <<-EOF ORACLE_HTTP_PORT=8080 ORACLE_LISTENER_PORT=1521 ORACLE_PASSWORD=oracle ORACLE_CONFIRM_PASSWORD=oracle ORACLE_DBENABLE=y EOF end execute "oracle_xe configure" do command "/etc/init.d/oracle-xe configure responseFile=/tmp/patty_test 2>&1 | tee /tmp/patty_test.out" end file "test_delete" do path "/tmp/patty_test" action :delete end rpm_package "sqldeveloper" do source "/vagrant/oraxe_install/sqldeveloper-*.rpm" action :install end
# Copyright (c) 2014-2016 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require_relative 'ciscotest' require_relative '../lib/cisco_node_utils/portchannel_global' # TestX__CLASS_NAME__X - Minitest for X__CLASS_NAME__X node utility class class TestPortchannelGlobal < CiscoTestCase # TESTS DEFAULT_NAME = 'default' def setup super config 'no port-channel load-balance' unless n6k_platform? config 'no port-channel load-balance ethernet' unless n9k_platform? || n7k_platform? end def teardown config 'no port-channel load-balance' unless n6k_platform? config 'no port-channel load-balance ethernet' unless n9k_platform? || n7k_platform? super end def n7k_platform? /N7/ =~ node.product_id end def n9k_platform? /N(3|9)/ =~ node.product_id end def n6k_platform? /N(5|6)/ =~ node.product_id end def create_portchannel_global(name=DEFAULT_NAME) PortChannelGlobal.new(name) end def test_get_hash_distribution skip('Platform does not support this property') if n6k_platform? || n9k_platform? @global = create_portchannel_global @global.hash_distribution = 'fixed' assert_equal('fixed', @global.hash_distribution) @global.hash_distribution = @global.default_hash_distribution assert_equal(@global.default_hash_distribution, @global.hash_distribution) end def test_get_set_load_defer skip('Platform does not support this property') if n6k_platform? || n9k_platform? @global = create_portchannel_global @global.load_defer = 1000 assert_equal(1000, @global.load_defer) @global.load_defer = @global.default_load_defer assert_equal(@global.default_load_defer, @global.load_defer) end def test_get_set_resilient skip('Platform does not support this property') if n6k_platform? || n7k_platform? @global = create_portchannel_global @global.resilient = true assert_equal(true, @global.resilient) @global.resilient = @global.default_resilient assert_equal(@global.default_resilient, @global.resilient) end def test_get_set_port_channel_load_balance_sym_concat_rot skip('Platform does not support this property') if n6k_platform? || n7k_platform? @global = create_portchannel_global @global.send(:port_channel_load_balance=, 'src-dst', 'ip-l4port', nil, nil, true, true, 4) assert_equal('src-dst', @global.bundle_select) assert_equal('ip-l4port', @global.bundle_hash) assert_equal(true, @global.symmetry) assert_equal(true, @global.concatenation) assert_equal(4, @global.rotate) @global.send( :port_channel_load_balance=, @global.default_bundle_select, @global.default_bundle_hash, nil, nil, @global.default_symmetry, @global.default_concatenation, @global.default_rotate) assert_equal( @global.default_bundle_select, @global.bundle_select) assert_equal( @global.default_bundle_hash, @global.bundle_hash) assert_equal( @global.default_symmetry, @global.symmetry) assert_equal( @global.default_concatenation, @global.concatenation) assert_equal(@global.default_rotate, @global.rotate) end def test_get_set_port_channel_load_balance_hash_poly skip('Platform does not support this property') if n7k_platform? || n9k_platform? @global = create_portchannel_global @global.send(:port_channel_load_balance=, 'src-dst', 'ip-only', 'CRC10c', nil, nil, nil, nil) assert_equal('src-dst', @global.bundle_select) assert_equal('ip-only', @global.bundle_hash) assert_equal('CRC10c', @global.hash_poly) @global.send(:port_channel_load_balance=, 'dst', 'mac', 'CRC10a', nil, nil, nil, nil) assert_equal('dst', @global.bundle_select) assert_equal('mac', @global.bundle_hash) assert_equal('CRC10a', @global.hash_poly) @global.send( :port_channel_load_balance=, @global.default_bundle_select, @global.default_bundle_hash, @global.default_hash_poly, nil, nil, nil, nil) assert_equal( @global.default_bundle_select, @global.bundle_select) assert_equal( @global.default_bundle_hash, @global.bundle_hash) assert_equal(@global.default_hash_poly, @global.hash_poly) end def test_get_set_port_channel_load_balance_asym_rot skip('Platform does not support this property') if n6k_platform? || n9k_platform? @global = create_portchannel_global @global.send(:port_channel_load_balance=, 'src-dst', 'ip-vlan', nil, true, nil, nil, 4) assert_equal('src-dst', @global.bundle_select) assert_equal('ip-vlan', @global.bundle_hash) assert_equal(true, @global.asymmetric) assert_equal(4, @global.rotate) @global.send( :port_channel_load_balance=, @global.default_bundle_select, @global.default_bundle_hash, nil, @global.default_asymmetric, nil, nil, @global.default_rotate) assert_equal( @global.default_bundle_select, @global.bundle_select) assert_equal( @global.default_bundle_hash, @global.bundle_hash) assert_equal( @global.default_asymmetric, @global.asymmetric) assert_equal(@global.default_rotate, @global.rotate) end end Add n3k_in_n3k_mode() platform check # Copyright (c) 2014-2016 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require_relative 'ciscotest' require_relative '../lib/cisco_node_utils/portchannel_global' # TestX__CLASS_NAME__X - Minitest for X__CLASS_NAME__X node utility class class TestPortchannelGlobal < CiscoTestCase # TESTS DEFAULT_NAME = 'default' def setup super config 'no port-channel load-balance' unless n6k_platform? config 'no port-channel load-balance ethernet' unless n9k_platform? || n7k_platform? end def teardown config 'no port-channel load-balance' unless n6k_platform? config 'no port-channel load-balance ethernet' unless n9k_platform? || n7k_platform? super end def n3k_in_n3k_mode? return unless /N3/ =~ node.product_id mode = config('show system switch-mode') # note: an n3k in n9k mode displays: 'system switch-mode n9k' patterns = ['system switch-mode n3k', 'Switch mode configuration is not not applicable'] mode[Regexp.union(patterns)] ? true : false end def n7k_platform? /N7/ =~ node.product_id end def n9k_platform? /N(3|9)/ =~ node.product_id end def n6k_platform? /N(5|6)/ =~ node.product_id end def create_portchannel_global(name=DEFAULT_NAME) PortChannelGlobal.new(name) end def test_get_hash_distribution skip('Platform does not support this property') if n6k_platform? || n9k_platform? @global = create_portchannel_global @global.hash_distribution = 'fixed' assert_equal('fixed', @global.hash_distribution) @global.hash_distribution = @global.default_hash_distribution assert_equal(@global.default_hash_distribution, @global.hash_distribution) end def test_get_set_load_defer skip('Platform does not support this property') if n6k_platform? || n9k_platform? @global = create_portchannel_global @global.load_defer = 1000 assert_equal(1000, @global.load_defer) @global.load_defer = @global.default_load_defer assert_equal(@global.default_load_defer, @global.load_defer) end def test_get_set_resilient skip('Platform does not support this property') if n6k_platform? || n7k_platform? @global = create_portchannel_global @global.resilient = true assert_equal(true, @global.resilient) @global.resilient = @global.default_resilient assert_equal(@global.default_resilient, @global.resilient) end def test_get_set_port_channel_load_balance_sym_concat_rot skip('Platform does not support this property') if n6k_platform? || n7k_platform? || n3k_in_n3k_mode? @global = create_portchannel_global @global.send(:port_channel_load_balance=, 'src-dst', 'ip-l4port', nil, nil, true, true, 4) assert_equal('src-dst', @global.bundle_select) assert_equal('ip-l4port', @global.bundle_hash) assert_equal(true, @global.symmetry) assert_equal(true, @global.concatenation) assert_equal(4, @global.rotate) @global.send( :port_channel_load_balance=, @global.default_bundle_select, @global.default_bundle_hash, nil, nil, @global.default_symmetry, @global.default_concatenation, @global.default_rotate) assert_equal( @global.default_bundle_select, @global.bundle_select) assert_equal( @global.default_bundle_hash, @global.bundle_hash) assert_equal( @global.default_symmetry, @global.symmetry) assert_equal( @global.default_concatenation, @global.concatenation) assert_equal(@global.default_rotate, @global.rotate) end def test_get_set_port_channel_load_balance_hash_poly skip('Platform does not support this property') if n7k_platform? || n9k_platform? @global = create_portchannel_global @global.send(:port_channel_load_balance=, 'src-dst', 'ip-only', 'CRC10c', nil, nil, nil, nil) assert_equal('src-dst', @global.bundle_select) assert_equal('ip-only', @global.bundle_hash) assert_equal('CRC10c', @global.hash_poly) @global.send(:port_channel_load_balance=, 'dst', 'mac', 'CRC10a', nil, nil, nil, nil) assert_equal('dst', @global.bundle_select) assert_equal('mac', @global.bundle_hash) assert_equal('CRC10a', @global.hash_poly) @global.send( :port_channel_load_balance=, @global.default_bundle_select, @global.default_bundle_hash, @global.default_hash_poly, nil, nil, nil, nil) assert_equal( @global.default_bundle_select, @global.bundle_select) assert_equal( @global.default_bundle_hash, @global.bundle_hash) assert_equal(@global.default_hash_poly, @global.hash_poly) end def test_get_set_port_channel_load_balance_asym_rot skip('Platform does not support this property') if n6k_platform? || n9k_platform? @global = create_portchannel_global @global.send(:port_channel_load_balance=, 'src-dst', 'ip-vlan', nil, true, nil, nil, 4) assert_equal('src-dst', @global.bundle_select) assert_equal('ip-vlan', @global.bundle_hash) assert_equal(true, @global.asymmetric) assert_equal(4, @global.rotate) @global.send( :port_channel_load_balance=, @global.default_bundle_select, @global.default_bundle_hash, nil, @global.default_asymmetric, nil, nil, @global.default_rotate) assert_equal( @global.default_bundle_select, @global.bundle_select) assert_equal( @global.default_bundle_hash, @global.bundle_hash) assert_equal( @global.default_asymmetric, @global.asymmetric) assert_equal(@global.default_rotate, @global.rotate) end end
# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'omniauth/zooniverse/version' Gem::Specification.new do |spec| spec.name = "omniauth-zooniverse" spec.version = Omniauth::Zooniverse::VERSION spec.authors = ["Stuart"] spec.email = ["stuart@zooniverse.org"] spec.summary = %q{TODO: Write a short summary. Required.} spec.description = %q{TODO: Write a longer description. Optional.} spec.homepage = "" spec.license = "MIT" spec.files = `git ls-files`.split($/) spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } spec.test_files = spec.files.grep(%r{^(test|spec|features)/}) spec.require_paths = ["lib"] spec.add_development_dependency "bundler", "~> 1.5" spec.add_development_dependency "rake" end Simple descriptions # coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'omniauth/zooniverse/version' Gem::Specification.new do |spec| spec.name = "omniauth-zooniverse" spec.version = Omniauth::Zooniverse::VERSION spec.authors = ["Stuart Lynn"] spec.email = ["stuart@zooniverse.org"] spec.summary = %q{Provides an OAuth2.0 strategy for the zooniverse} spec.description = %q{Allows thrid parties to authenticate against and access resources on the zooniverse platform} spec.homepage = "https://github.com/zooniverse/omniauth-zooniverse" spec.license = "MIT" spec.files = `git ls-files`.split($/) spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } spec.test_files = spec.files.grep(%r{^(test|spec|features)/}) spec.require_paths = ["lib"] spec.add_development_dependency "bundler", "~> 1.5" spec.add_development_dependency "rake" end
require_relative 'engine' require_relative '../../parser/instruction' module Tritium::Engines class Debug::Step attr :instruction attr :children attr :debug attr :object attr :sid require_relative 'steps/node' require_relative 'steps/attribute' require_relative 'steps/text' require_relative 'steps/positional' def initialize(instruction, parent = nil) @instruction = instruction @parent = parent if parent @sid = parent.sid.clone @sid << parent.children.size else @sid = [0] end @child_type = eval(instruction.opens) @children = [] @debug = instruction.to_hash.merge({:step_id => @sid, :objects => [], :children => [], :log => []}) end def execute(obj, env = {}) @object = obj @env = env @debug[:env] = @env.clone @child_time = 0 start = Time.now args = (instruction.args || []).collect do |arg| if arg.is_a?(Tritium::Parser::Instruction) if arg.name == "var" arg = @env[arg.args.first] else arg = @parent.send(arg.name, *arg.args).to_s end end arg end self.send(instruction.name, *(args)) @debug[:total_time_cs] = ((Time.now - start) * 10000).to_i (@debug[:args] = args) if args.size > 0 @debug[:child_time_cs] = @child_time @debug[:time_cs] = @debug[:total_time_cs] - @child_time @debug.delete(:children) if @debug[:children].size == 0 return @object end def execute_children_on(obj) return obj if (instruction.children.size == 0) timer = Time.now children_debug = [] children << instruction.children.collect do |child| step = @child_type.new(child, self) obj = step.execute(obj, @env) children_debug << step.debug step end @debug[:children] << {:steps => children_debug, :name => @name} @child_time += ((Time.now - timer) * 10000).to_i obj end def log(message) @debug[:log] << message.to_s end def mark!(obj = nil) @debug[:objects] << (obj || @object) end # Actual Tritium methods def script @object = execute_children_on(@object) end def var(named) @env[named] ||= "" log("Looking up var #{named} and found #{@env[named].inspect}") @env[named] = execute_children_on(@env[named]) end def match(value, matcher) log "matching #{value} against #{matcher}" if(value =~ Regexp.new(matcher)) log "Match successful!" @object = execute_children_on(object) else log "Match failed." end end def fetch(selector) node.search(selector).first end # If I'm a NodeStep, this should return @object # Otherwise, go up until I find a node. This is # mostly useful for fetch() def node if self.is_a?(Node) || self.is_a?(Positional) return @object else @parent.node end end private def position_node(target, node) case @env["position"] when "bottom" target.add_child(node) when "top" if target.children.size > 0 target.children.first.add_previous_sibling(node) else target.add_child(node) end when "after" target.add_next_sibling(node) when "before" target.add_previous_sibling(node) end end end end OH NOE, WHATSPACE !!!!! require_relative 'engine' require_relative '../../parser/instruction' module Tritium::Engines class Debug::Step attr :instruction attr :children attr :debug attr :object attr :sid require_relative 'steps/node' require_relative 'steps/attribute' require_relative 'steps/text' require_relative 'steps/positional' def initialize(instruction, parent = nil) @instruction = instruction @parent = parent if parent @sid = parent.sid.clone @sid << parent.children.size else @sid = [0] end @child_type = eval(instruction.opens) @children = [] @debug = instruction.to_hash.merge({:step_id => @sid, :objects => [], :children => [], :log => []}) end def execute(obj, env = {}) @object = obj @env = env @debug[:env] = @env.clone @child_time = 0 start = Time.now args = (instruction.args || []).collect do |arg| if arg.is_a?(Tritium::Parser::Instruction) if arg.name == "var" arg = @env[arg.args.first] else arg = @parent.send(arg.name, *arg.args).to_s end end arg end self.send(instruction.name, *(args)) @debug[:total_time_cs] = ((Time.now - start) * 10000).to_i (@debug[:args] = args) if args.size > 0 @debug[:child_time_cs] = @child_time @debug[:time_cs] = @debug[:total_time_cs] - @child_time @debug.delete(:children) if @debug[:children].size == 0 return @object end def execute_children_on(obj) return obj if (instruction.children.size == 0) timer = Time.now children_debug = [] children << instruction.children.collect do |child| step = @child_type.new(child, self) obj = step.execute(obj, @env) children_debug << step.debug step end @debug[:children] << {:steps => children_debug, :name => @name} @child_time += ((Time.now - timer) * 10000).to_i obj end def log(message) @debug[:log] << message.to_s end def mark!(obj = nil) @debug[:objects] << (obj || @object) end # Actual Tritium methods def script @object = execute_children_on(@object) end def var(named) @env[named] ||= "" log("Looking up var #{named} and found #{@env[named].inspect}") @env[named] = execute_children_on(@env[named]) end def match(value, matcher) log "matching #{value} against #{matcher}" if(value =~ Regexp.new(matcher)) log "Match successful!" @object = execute_children_on(object) else log "Match failed." end end def fetch(selector) node.search(selector).first end # If I'm a NodeStep, this should return @object # Otherwise, go up until I find a node. This is # mostly useful for fetch() def node if self.is_a?(Node) || self.is_a?(Positional) return @object else @parent.node end end private def position_node(target, node) case @env["position"] when "bottom" target.add_child(node) when "top" if target.children.size > 0 target.children.first.add_previous_sibling(node) else target.add_child(node) end when "after" target.add_next_sibling(node) when "before" target.add_previous_sibling(node) end end end end
module Comable class Address < ActiveRecord::Base extend Enumerize belongs_to :customer, class_name: Comable::Customer.name, foreign_key: Comable::Customer.table_name.singularize.foreign_key, autosave: false enumerize :assign_key, in: { nothing: nil, bill: 1, ship: 2 }, scope: true end end core: Add validations to address module Comable class Address < ActiveRecord::Base extend Enumerize belongs_to :customer, class_name: Comable::Customer.name, foreign_key: Comable::Customer.table_name.singularize.foreign_key, autosave: false enumerize :assign_key, in: { nothing: nil, bill: 1, ship: 2 }, scope: true validates Comable::Customer.table_name.singularize.foreign_key, presence: true validates :family_name, presence: true, length: { maximum: 255 } validates :first_name, presence: true, length: { maximum: 255 } validates :zip_code, presence: true, length: { maximum: 8 } validates :state_name, presence: true, length: { maximum: 255 } validates :city, presence: true, length: { maximum: 255 } validates :detail, length: { maximum: 255 } validates :phone_number, length: { maximum: 18 } end end
comment response button class AddResponseToComments < ActiveRecord::Migration def change add_column :comments, :response, :text end end
add provider to Identity class AddProviderToIdentity < ActiveRecord::Migration def change add_column :identities, :provider, :string end end
class InsertContactInHeader < ActiveRecord::Migration def change main_menu = Menu.where(name: "main").first link = { controller: "contact", action: "index", title: "Kontakt", preferred_order: 3 } MenuLink.create(link.merge(menu: main_menu)) end end Revert "Add migration that adds kontakt to header" This reverts commit 342e52a66b30ea1d5055141cd50417e303952942.
class CreateFriendlyIdSlugs < ActiveRecord::Migration def change create_table :friendly_id_slugs do |t| t.string :slug, null: false t.integer :sluggable_id, null: false t.string :sluggable_type, limit: 50 t.string :scope t.datetime :created_at end add_index :friendly_id_slugs, :sluggable_id add_index :friendly_id_slugs, [:slug, :sluggable_type], length: { slug: 140, sluggable_type: 50 } add_index :friendly_id_slugs, [:slug, :sluggable_type, :scope], length: { slug: 70, sluggable_type: 50, scope: 70 }, unique: true add_index :friendly_id_slugs, :sluggable_type end end specify migration version class CreateFriendlyIdSlugs < ActiveRecord::Migration[5.0] def change create_table :friendly_id_slugs do |t| t.string :slug, null: false t.integer :sluggable_id, null: false t.string :sluggable_type, limit: 50 t.string :scope t.datetime :created_at end add_index :friendly_id_slugs, :sluggable_id add_index :friendly_id_slugs, [:slug, :sluggable_type], length: { slug: 140, sluggable_type: 50 } add_index :friendly_id_slugs, [:slug, :sluggable_type, :scope], length: { slug: 70, sluggable_type: 50, scope: 70 }, unique: true add_index :friendly_id_slugs, :sluggable_type end end
include T('default/fulldoc/html') module OverrideFileLinks def resolve_links(text) result = '' log.enter_level(Logger::ERROR) { result = super } result end end Template.extra_includes << OverrideFileLinks def init class << options.serializer def serialized_path(object) if CodeObjects::ExtraFileObject === object super.sub(/^file\./, '') else super end end end if options.serializer generate_assets options.delete(:objects) options.files.each {|file| serialize_file(file) } serialize_file(options.readme) end def generate_assets %w( js/jquery.js js/app.js css/style.css css/common.css ).each do |file| asset(file, file(file, true)) end end def serialize_file(file) index = options.files.index(file) outfile = file.name + '.html' options.file = file if file.attributes[:namespace] options.object = Registry.at(file.attributes[:namespace]) end options.object ||= Registry.root if file == options.readme serialize_index(options) else serialize_index(options) if !options.readme && index == 0 Templates::Engine.with_serializer(outfile, options.serializer) do T('layout').run(options) end end options.delete(:file) end Update URL links for readme file in guide template include T('default/fulldoc/html') module OverrideFileLinks def resolve_links(text) result = '' log.enter_level(Logger::ERROR) { result = super } result end def url_for(object, *args) if CodeObjects::ExtraFileObject === object && object == options.readme 'index.html' else super end end end Template.extra_includes << OverrideFileLinks def init class << options.serializer def serialized_path(object) if CodeObjects::ExtraFileObject === object super.sub(/^file\./, '') else super end end end if options.serializer generate_assets options.delete(:objects) options.files.each {|file| serialize_file(file) } serialize_file(options.readme) end def generate_assets %w( js/jquery.js js/app.js css/style.css css/common.css ).each do |file| asset(file, file(file, true)) end end def serialize_file(file) index = options.files.index(file) outfile = file.name + '.html' options.file = file if file.attributes[:namespace] options.object = Registry.at(file.attributes[:namespace]) end options.object ||= Registry.root if file == options.readme serialize_index(options) else serialize_index(options) if !options.readme && index == 0 Templates::Engine.with_serializer(outfile, options.serializer) do T('layout').run(options) end end options.delete(:file) end
#!/usr/bin/ruby # json_stats_fetcher.rb - Publish Ostrich stats to Ganglia. # # The latest version is always available at: # http://github.com/twitter/ostrich/blob/master/src/scripts/json_stats_fetcher.rb # require 'rubygems' require 'getoptlong' require 'socket' require 'json' require 'timeout' require 'open-uri' def report_metric(name, value, units) # Ganglia is very intolerant of metric named with non-standard characters, # where non-standard contains most everything other than letters, numbers and # some common symbols. name = name.gsub(/[^A-Za-z0-9_\-\.]/, "_") if $report_to_ganglia system("gmetric -t float -n \"#{$ganglia_prefix}#{name}\" -v \"#{value}\" -u \"#{units}\" -d #{$stat_timeout}") else puts "#{$ganglia_prefix}#{name}=#{value}" end end $ostrich3 = false # guessed ostrich version $report_to_ganglia = true $ganglia_prefix = '' $stat_timeout = 5*60 $pattern = /^x-/ hostname = "localhost" port = 9989 period = 60 use_web = false def usage(port) puts puts "usage: json_stats_fetcher.rb [options]" puts "options:" puts " -n say what I would report, but don't report it" puts " -w use web interface" puts " -h <hostname> connect to another host (default: localhost)" puts " -i <pattern> ignore all stats matching pattern (default: #{$pattern.inspect})" puts " -p <port> connect to another port (default: #{port})" puts " -P <prefix> optional prefix for ganglia names" puts " -t <period> optional latch period (Ostrich-4.5+)" puts end opts = GetoptLong.new( [ '--help', GetoptLong::NO_ARGUMENT ], [ '-n', GetoptLong::NO_ARGUMENT ], [ '-h', GetoptLong::REQUIRED_ARGUMENT ], [ '-i', GetoptLong::REQUIRED_ARGUMENT ], [ '-p', GetoptLong::REQUIRED_ARGUMENT ], [ '-P', GetoptLong::REQUIRED_ARGUMENT ], [ '-t', GetoptLong::REQUIRED_ARGUMENT ], [ '-w', GetoptLong::NO_ARGUMENT ] ) opts.each do |opt, arg| case opt when '--help' usage(port) exit 0 when '-n' $report_to_ganglia = false when '-h' hostname = arg when '-i' $pattern = /#{arg}/ when '-p' port = arg.to_i when '-P' $ganglia_prefix = arg when '-t' period = arg.to_i when '-w' port = 9990 use_web = true end end stats_dir = "/tmp/stats-#{port}" singleton_file = "#{stats_dir}/json_stats_fetcher_running" Dir.mkdir(stats_dir) rescue nil if File.exist?(singleton_file) puts "NOT RUNNING -- #{singleton_file} exists." puts "Kill other stranded stats checker processes and kill this file to resume." exit 1 end File.open(singleton_file, "w") { |f| f.write("i am running.\n") } begin Timeout::timeout(55) do data = if use_web # Ostrich 4.5+ are latched on a time period args = "period=#{period}" # Ostrich 3 and 4.0 don't reset counters. if $report_to_gangia then # Ostrich 2 uses reset # Ostrich 4.2 uses namespace for similar functionality args += "&reset=1&namespace=ganglia" end url = "http://#{hostname}:#{port}/stats.json?#{args}" open(url).read else socket = TCPSocket.new(hostname, port) socket.puts("stats/json#{' reset' if $report_to_ganglia}") socket.gets end stats = JSON.parse(data) # Ostrich >3 puts these in the metrics begin report_metric("jvm_threads", stats["jvm"]["thread_count"], "threads") report_metric("jvm_daemon_threads", stats["jvm"]["thread_daemon_count"], "threads") report_metric("jvm_heap_used", stats["jvm"]["heap_used"], "bytes") report_metric("jvm_heap_max", stats["jvm"]["heap_max"], "bytes") report_metric("jvm_uptime", (stats["jvm"]["uptime"].to_i rescue 0), "items") rescue NoMethodError $ostrich3 = true end begin stats["counters"].reject { |name, val| name =~ $pattern }.each do |name, value| report_metric(name, (value.to_i rescue 0), "items") end rescue NoMethodError end begin stats["gauges"].reject { |name, val| name =~ $pattern }.each do |name, value| report_metric(name, value, "value") end rescue NoMethodError end begin metricsKey = ($ostrich3) ? "metrics" : "timings" stats[metricsKey].reject { |name, val| name =~ $pattern }.each do |name, timing| report_metric(name, (timing["average"] || 0).to_f / 1000.0, "sec") report_metric("#{name}_stddev", (timing["standard_deviation"] || 0).to_f / 1000.0, "sec") [:p25, :p50, :p75, :p90, :p95, :p99, :p999, :p9999].map(&:to_s).each do |bucket| report_metric("#{name}_#{bucket}", (timing[bucket] || 0).to_f / 1000.0, "sec") if timing[bucket] end end rescue NoMethodError end end ensure File.unlink(singleton_file) end make json_stats_fetcher batch its commands Do one system call containing multiple gmetric calls instead of one gmetric calls per system call. This improves the running time of the script by two orders of magnitude or so (depending on how many stats you have). We were consistently running into the 55-second timeout with enough Ostrich stats, so this should fix that problem for the forseeable future. #!/usr/bin/ruby # json_stats_fetcher.rb - Publish Ostrich stats to Ganglia. # # The latest version is always available at: # http://github.com/twitter/ostrich/blob/master/src/scripts/json_stats_fetcher.rb # require 'rubygems' require 'getoptlong' require 'socket' require 'json' require 'timeout' require 'open-uri' $ostrich3 = false # guessed ostrich version $report_to_ganglia = true $ganglia_prefix = '' $stat_timeout = 5*60 $pattern = /^x-/ hostname = "localhost" port = 9989 period = 60 use_web = false def usage(port) puts puts "usage: json_stats_fetcher.rb [options]" puts "options:" puts " -n say what I would report, but don't report it" puts " -w use web interface" puts " -h <hostname> connect to another host (default: localhost)" puts " -i <pattern> ignore all stats matching pattern (default: #{$pattern.inspect})" puts " -p <port> connect to another port (default: #{port})" puts " -P <prefix> optional prefix for ganglia names" puts " -t <period> optional latch period (Ostrich-4.5+)" puts end opts = GetoptLong.new( [ '--help', GetoptLong::NO_ARGUMENT ], [ '-n', GetoptLong::NO_ARGUMENT ], [ '-h', GetoptLong::REQUIRED_ARGUMENT ], [ '-i', GetoptLong::REQUIRED_ARGUMENT ], [ '-p', GetoptLong::REQUIRED_ARGUMENT ], [ '-P', GetoptLong::REQUIRED_ARGUMENT ], [ '-t', GetoptLong::REQUIRED_ARGUMENT ], [ '-w', GetoptLong::NO_ARGUMENT ] ) opts.each do |opt, arg| case opt when '--help' usage(port) exit 0 when '-n' $report_to_ganglia = false when '-h' hostname = arg when '-i' $pattern = /#{arg}/ when '-p' port = arg.to_i when '-P' $ganglia_prefix = arg when '-t' period = arg.to_i when '-w' port = 9990 use_web = true end end stats_dir = "/tmp/stats-#{port}" singleton_file = "#{stats_dir}/json_stats_fetcher_running" Dir.mkdir(stats_dir) rescue nil if File.exist?(singleton_file) puts "NOT RUNNING -- #{singleton_file} exists." puts "Kill other stranded stats checker processes and kill this file to resume." exit 1 end File.open(singleton_file, "w") { |f| f.write("i am running.\n") } ## we will accumulate all our metrics in here metrics = [] begin Timeout::timeout(55) do data = if use_web # Ostrich 4.5+ are latched on a time period args = "period=#{period}" # Ostrich 3 and 4.0 don't reset counters. if $report_to_gangia then # Ostrich 2 uses reset # Ostrich 4.2 uses namespace for similar functionality args += "&reset=1&namespace=ganglia" end url = "http://#{hostname}:#{port}/stats.json?#{args}" open(url).read else socket = TCPSocket.new(hostname, port) socket.puts("stats/json#{' reset' if $report_to_ganglia}") socket.gets end stats = JSON.parse(data) # Ostrich >3 puts these in the metrics begin metrics << ["jvm_threads", stats["jvm"]["thread_count"], "threads"] metrics << ["jvm_daemon_threads", stats["jvm"]["thread_daemon_count"], "threads"] metrics << ["jvm_heap_used", stats["jvm"]["heap_used"], "bytes"] metrics << ["jvm_heap_max", stats["jvm"]["heap_max"], "bytes"] metrics << ["jvm_uptime", (stats["jvm"]["uptime"].to_i rescue 0), "items"] rescue NoMethodError $ostrich3 = true end begin stats["counters"].reject { |name, val| name =~ $pattern }.each do |name, value| metrics << [name, (value.to_i rescue 0), "items"] end rescue NoMethodError end begin stats["gauges"].reject { |name, val| name =~ $pattern }.each do |name, value| metrics << [name, value, "value"] end rescue NoMethodError end begin metricsKey = ($ostrich3) ? "metrics" : "timings" stats[metricsKey].reject { |name, val| name =~ $pattern }.each do |name, timing| metrics << [name, (timing["average"] || 0).to_f / 1000.0, "sec"] metrics << ["#{name}_stddev", (timing["standard_deviation"] || 0).to_f / 1000.0, "sec"] [:p25, :p50, :p75, :p90, :p95, :p99, :p999, :p9999].map(&:to_s).each do |bucket| metrics << ["#{name}_#{bucket}", (timing[bucket] || 0).to_f / 1000.0, "sec"] if timing[bucket] end end rescue NoMethodError end end ## do stuff with the metrics we've accumulated ## first, munge metric names metrics = metrics.map do |name, value, units| # Ganglia is very intolerant of metric named with non-standard characters, # where non-standard contains most everything other than letters, numbers and # some common symbols. name = name.gsub(/[^A-Za-z0-9_\-\.]/, "_") [name, value, units] end ## now, send to ganglia or print to $stdout if $report_to_ganglia # call gmetric for each metric cmd = metrics.map do |name, value, units| "gmetric -t float -n \"#{$ganglia_prefix}#{name}\" -v \"#{value}\" -u \"#{units}\" -d #{$stat_timeout}" end.join("\n") puts cmd system cmd else # print a report to stdout report = metrics.map do |name, value, units| "#{$ganglia_prefix}#{name}=#{value}" end.join("\n") puts report end ensure File.unlink(singleton_file) end
# Apache 2.0 License # # Copyright (c) 2016 Sebastian Katzer, appPlant GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. module Fifa # Identify planets by regular expressions class Matcher < BasicObject # Link of crumbs CRUMBS_PATTERN = /^[@%]?[^:=]*[:=]?[^@%]+(?:[@%]?[^:=]*[:=]?[^@%]+)*$/.freeze # Single crumb SPLIT_PATTERN = /([@%][^@%]+)/.freeze # Single crumb CRUMB_PATTERN = /^(@|%)?([^:=]*)(.)?(.*)$/.freeze # Initializes the matcher by specified crumbs. # # @param [ String ] matcher The matcher like 'env:prod' # # @return [ Fifa::Matcher ] def initialize(matcher) @string = matcher crumbs = matcher.split(/\s+/) validate(crumbs) @crumbs = crumbs.map { |crumb| Crumbs.new(crumb) } end # Test if the crumbs matches the key:value map. # # @param [ Hash ] map A key:value hash map. # # @return [ Boolean ] def match?(map) @crumbs.all? { |crumb| crumb.match? map } end # The matcher's string representation. # # @return [ String ] def to_s @string.to_s end private # Raise an error if any of the crumbs are in wrong format. # # @param [ Array<String> ] crumbs The crumbs like ['env:prod'] # # @return [ Void ] def validate(crumbs) crumbs.each do |crumb| Kernel.raise "invalid matcher: #{crumb}" unless crumb =~ CRUMBS_PATTERN end end # Multiple crumbs combined by 'and' class Crumbs < BasicObject # Initializes all crumbs. # # @param [ String ] crumbs A crumb like 'env=prod+env=int' # # @return [ Fifa::Matcher::Crumbs ] def initialize(crumbs) @crumbs = crumbs.split(SPLIT_PATTERN) .reject(&:empty?) .map { |crumb| Crumb.new(crumb) } end # Test if the crumbs matches the key:value map. # # @param [ Hash ] map A key:value hash map. # # @return [ Boolean ] def match?(map) @crumbs.all? { |crumb| crumb.match? map } end # Single crumb class Crumb < BasicObject # Initializes a crumb. # # @param [ String ] crmb A crumb like '+env:prod' # # @return [ Fifa::Matcher::Crumb ] def initialize(crumb) match = crumb.match(CRUMB_PATTERN) value = match[3] ? match[4] : match[2] value = "^#{value}$" unless match[3] == ':' @not = match[1] == '%' @key = match[3] ? match[2] : 'id' @exp = Regexp.new(value) end # Test if the crumb matches. # # @param [ Hash ] map A key:value hash map. # # @return [ Boolean ] def match?(map) value = value_for_key(map) @not ? (value !~ @exp) : (value =~ @exp) end private # Get the string parsed value for the given key. # # @param [ Hash ] map Any key-value map. # # @return [ String ] def value_for_key(map) map[@key].to_s end end end end end Fix compatibility with mruby-head # Apache 2.0 License # # Copyright (c) 2016 Sebastian Katzer, appPlant GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. module Fifa # Identify planets by regular expressions class Matcher < BasicObject # Link of crumbs CRUMBS_PATTERN = /^[@%]?[^:=]*[:=]?[^@%]+(?:[@%]?[^:=]*[:=]?[^@%]+)*$/.freeze # Single crumb SPLIT_PATTERN = /([@%][^@%]+)/.freeze # Single crumb CRUMB_PATTERN = /^(@|%)?([^:=]*)(.)?(.*)$/.freeze # Initializes the matcher by specified crumbs. # # @param [ String ] matcher The matcher like 'env:prod' # # @return [ Fifa::Matcher ] def initialize(matcher) @string = matcher crumbs = matcher.split(/\s+/) validate(crumbs) @crumbs = crumbs.map { |crumb| Crumbs.new(crumb) } end # Test if the crumbs matches the key:value map. # # @param [ Hash ] map A key:value hash map. # # @return [ Boolean ] def match?(map) @crumbs.all? { |crumb| crumb.match? map } end # The matcher's string representation. # # @return [ String ] def to_s @string.to_s end private # Raise an error if any of the crumbs are in wrong format. # # @param [ Array<String> ] crumbs The crumbs like ['env:prod'] # # @return [ Void ] def validate(crumbs) crumbs.each do |crumb| Kernel.raise "invalid matcher: #{crumb}" unless CRUMBS_PATTERN =~ crumb end end # Multiple crumbs combined by 'and' class Crumbs < BasicObject # Initializes all crumbs. # # @param [ String ] crumbs A crumb like 'env=prod+env=int' # # @return [ Fifa::Matcher::Crumbs ] def initialize(crumbs) @crumbs = crumbs.split(SPLIT_PATTERN) .reject(&:empty?) .map { |crumb| Crumb.new(crumb) } end # Test if the crumbs matches the key:value map. # # @param [ Hash ] map A key:value hash map. # # @return [ Boolean ] def match?(map) @crumbs.all? { |crumb| crumb.match? map } end # Single crumb class Crumb < BasicObject # Initializes a crumb. # # @param [ String ] crmb A crumb like '+env:prod' # # @return [ Fifa::Matcher::Crumb ] def initialize(crumb) match = CRUMB_PATTERN.match(crumb) value = match[3] ? match[4] : match[2] value = "^#{value}$" unless match[3] == ':' @not = match[1] == '%' @key = match[3] ? match[2] : 'id' @exp = Regexp.new(value) end # Test if the crumb matches. # # @param [ Hash ] map A key:value hash map. # # @return [ Boolean ] def match?(map) value = value_for_key(map) @not ? (@exp !~ value) : (@exp =~ value) end private # Get the string parsed value for the given key. # # @param [ Hash ] map Any key-value map. # # @return [ String ] def value_for_key(map) map[@key].to_s end end end end end
class HttpsClient GET = 'GET ' HTTP_1_1 = ' HTTP/1.1' CRLF = "\r\n" HOST = 'Host: ' CON_CL = 'Connection: close' KV_DELI = ': ' CONTENT_LENGTH = 'Content-Length' CONTENT_LENGTH_DC = CONTENT_LENGTH.downcase TRANSFER_ENCODING = 'Transfer-Encoding' TRANSFER_ENCODING_DC = TRANSFER_ENCODING.downcase CHUNKED = 'chunked' TRAILER = 'Trailer' TRAILER_DC = TRAILER.downcase HEAD = 'HEAD ' POST = 'POST ' FINAL_CHUNK = "0#{CRLF}#{CRLF}" Response = Struct.new(:minor_version, :status, :msg, :headers, :body) def initialize(options = {}) tls_config = options.fetch(:tls_config) do Tls::Config.new end @tls_client = options.fetch(:tls_client) do Tls::Client.new tls_config end @phr = Phr.new @decoder = Phr::ChunkedDecoder.new end def get(url, headers = nil) url = URL.parse(url) buf = nil if headers buf = "#{GET}#{url.path}#{HTTP_1_1}#{CRLF}#{HOST}#{url.host}#{CRLF}#{CON_CL}#{CRLF}" headers.each do |kv| buf << "#{kv[0]}#{KV_DELI}#{kv[1]}#{CRLF}" end buf << CRLF else buf = "#{GET}#{url.path}#{HTTP_1_1}#{CRLF}#{HOST}#{url.host}#{CRLF}#{CON_CL}#{CRLF}#{CRLF}" end @tls_client.connect(url.host, url.port) @tls_client.write(buf) buf = @tls_client.read pret = nil response = nil loop do pret = @phr.parse_response(buf) case pret when Fixnum response = Response.new(@phr.minor_version, @phr.status, @phr.msg, @phr.headers) yield response break when :incomplete buf << @tls_client.read when :parser_error return pret end end response.body = String(buf[pret..-1]) headers = @phr.headers.to_h if headers.key? CONTENT_LENGTH_DC cl = Integer(headers[CONTENT_LENGTH_DC]) yield response yielded = response.body.bytesize until yielded == cl response.body = @tls_client.read(32_768) yield response yielded += response.body.bytesize end elsif headers.key?(TRANSFER_ENCODING_DC) && headers[TRANSFER_ENCODING_DC].casecmp(CHUNKED) == 0 unless headers.key? TRAILER_DC @decoder.consume_trailer(true) end loop do pret = @decoder.decode_chunked(response.body) do |body| yield response end case pret when Fixnum break when :incomplete response.body = @tls_client.read(32_768) when :parser_error return pret end end else yield response loop do response.body = @tls_client.read(32_768) yield response end end self ensure begin @phr.reset @decoder.reset @tls_client.close rescue end end def head(url, headers = nil) url = URL.parse(url) buf = nil if headers buf = "#{HEAD}#{url.path}#{HTTP_1_1}#{CRLF}#{HOST}#{url.host}#{CRLF}#{CON_CL}#{CRLF}" headers.each do |kv| buf << "#{kv[0]}#{KV_DELI}#{kv[1]}#{CRLF}" end buf << CRLF else buf = "#{HEAD}#{url.path}#{HTTP_1_1}#{CRLF}#{HOST}#{url.host}#{CRLF}#{CON_CL}#{CRLF}#{CRLF}" end @tls_client.connect(url.host, url.port) @tls_client.write(buf) buf = @tls_client.read loop do pret = @phr.parse_response(buf) case pret when Fixnum yield Response.new(@phr.minor_version, @phr.status, @phr.msg, @phr.headers) break when :incomplete buf << @tls_client.read when :parser_error return pret end end self ensure begin @phr.reset @decoder.reset @tls_client.close rescue end end def post(url, body, headers = nil) url = URL.parse(url) buf = nil if headers buf = "#{POST}#{url.path}#{HTTP_1_1}#{CRLF}#{HOST}#{url.host}#{CRLF}#{CON_CL}#{CRLF}" headers.each do |kv| buf << "#{kv[0]}#{KV_DELI}#{kv[1]}#{CRLF}" end else buf = "#{POST}#{url.path}#{HTTP_1_1}#{CRLF}#{HOST}#{url.host}#{CRLF}#{CON_CL}#{CRLF}" end case body when String buf << "#{CONTENT_LENGTH}#{KV_DELI}#{body.bytesize}#{CRLF}#{CRLF}" @tls_client.connect(url.host, url.port) @tls_client.write(buf) @tls_client.write(body) when Enumerable buf << "#{TRANSFER_ENCODING}#{KV_DELI}#{CHUNKED}#{CRLF}#{CRLF}" @tls_client.connect(url.host, url.port) @tls_client.write(buf) body.each do |chunk| ch = String(chunk) next if ch.bytesize == 0 @tls_client.write("#{ch.bytesize.to_s(16)}#{CRLF}#{ch}#{CRLF}") end @tls_client.write(FINAL_CHUNK) when Fiber buf << "#{TRANSFER_ENCODING}#{KV_DELI}#{CHUNKED}#{CRLF}#{CRLF}" @tls_client.connect(url.host, url.port) @tls_client.write(buf) while body.alive? && chunk = body.resume ch = String(chunk) next if ch.bytesize == 0 @tls_client.write("#{ch.bytesize.to_s(16)}#{CRLF}#{ch}#{CRLF}") end @tls_client.write(FINAL_CHUNK) else raise ArgumentError, "Cannot handle #{body.class}" end buf = @tls_client.read pret = nil response = nil loop do pret = @phr.parse_response(buf) case pret when Fixnum response = Response.new(@phr.minor_version, @phr.status, @phr.msg, @phr.headers) yield response break when :incomplete buf << @tls_client.read when :parser_error return pret end end response.body = String(buf[pret..-1]) headers = @phr.headers.to_h if headers.key? CONTENT_LENGTH_DC cl = Integer(headers[CONTENT_LENGTH_DC]) yield response yielded = response.body.bytesize until yielded == cl response.body = @tls_client.read(32_768) yield response yielded += response.body.bytesize end elsif headers.key?(TRANSFER_ENCODING_DC) && headers[TRANSFER_ENCODING_DC].casecmp(CHUNKED) == 0 unless headers.key? TRAILER_DC @decoder.consume_trailer(true) end loop do pret = @decoder.decode_chunked(response.body) do |body| yield response end case pret when Fixnum break when :incomplete response.body = @tls_client.read(32_768) when :parser_error return pret end end else yield response loop do response.body = @tls_client.read(32_768) yield response end end self ensure begin @phr.reset @decoder.reset @tls_client.close rescue end end end updates for changes made to mruby-phr class HttpsClient GET = 'GET ' HTTP_1_1 = ' HTTP/1.1' CRLF = "\r\n" HOST = 'Host: ' CON_CL = 'Connection: close' KV_DELI = ': ' CONTENT_LENGTH = 'Content-Length' CONTENT_LENGTH_DC = CONTENT_LENGTH.downcase TRANSFER_ENCODING = 'Transfer-Encoding' TRANSFER_ENCODING_DC = TRANSFER_ENCODING.downcase CHUNKED = 'chunked' TRAILER = 'Trailer' TRAILER_DC = TRAILER.downcase HEAD = 'HEAD ' POST = 'POST ' FINAL_CHUNK = "0#{CRLF}#{CRLF}" Response = Struct.new(:minor_version, :status, :msg, :headers, :body) def initialize(options = {}) tls_config = options.fetch(:tls_config) do Tls::Config.new end @tls_client = options.fetch(:tls_client) do Tls::Client.new tls_config end @phr = Phr.new @decoder = Phr::ChunkedDecoder.new end def get(url, headers = nil) url = URL.parse(url) buf = nil if headers buf = "#{GET}#{url.path}#{HTTP_1_1}#{CRLF}#{HOST}#{url.host}#{CRLF}#{CON_CL}#{CRLF}" headers.each do |kv| buf << "#{kv[0]}#{KV_DELI}#{kv[1]}#{CRLF}" end buf << CRLF else buf = "#{GET}#{url.path}#{HTTP_1_1}#{CRLF}#{HOST}#{url.host}#{CRLF}#{CON_CL}#{CRLF}#{CRLF}" end @tls_client.connect(url.host, url.port) @tls_client.write(buf) buf = @tls_client.read pret = nil response = nil loop do pret = @phr.parse_response(buf) case pret when Fixnum response = Response.new(@phr.minor_version, @phr.status, @phr.msg, @phr.headers) yield response break when :incomplete buf << @tls_client.read when :parser_error return pret end end response.body = String(buf[pret..-1]) headers = @phr.headers.to_h if headers.key? CONTENT_LENGTH_DC cl = Integer(headers[CONTENT_LENGTH_DC]) yield response yielded = response.body.bytesize until yielded == cl response.body = @tls_client.read(32_768) yield response yielded += response.body.bytesize end elsif headers.key?(TRANSFER_ENCODING_DC) && headers[TRANSFER_ENCODING_DC].casecmp(CHUNKED) == 0 unless headers.key? TRAILER_DC @decoder.consume_trailer(true) end loop do pret = @decoder.decode_chunked(response.body) case pret when Fixnum yield response break when :incomplete yield response response.body = @tls_client.read(32_768) when :parser_error return pret end end else yield response loop do response.body = @tls_client.read(32_768) yield response end end self ensure begin @phr.reset @decoder.reset @tls_client.close rescue end end def head(url, headers = nil) url = URL.parse(url) buf = nil if headers buf = "#{HEAD}#{url.path}#{HTTP_1_1}#{CRLF}#{HOST}#{url.host}#{CRLF}#{CON_CL}#{CRLF}" headers.each do |kv| buf << "#{kv[0]}#{KV_DELI}#{kv[1]}#{CRLF}" end buf << CRLF else buf = "#{HEAD}#{url.path}#{HTTP_1_1}#{CRLF}#{HOST}#{url.host}#{CRLF}#{CON_CL}#{CRLF}#{CRLF}" end @tls_client.connect(url.host, url.port) @tls_client.write(buf) buf = @tls_client.read loop do pret = @phr.parse_response(buf) case pret when Fixnum yield Response.new(@phr.minor_version, @phr.status, @phr.msg, @phr.headers) break when :incomplete buf << @tls_client.read when :parser_error return pret end end self ensure begin @phr.reset @decoder.reset @tls_client.close rescue end end def post(url, body, headers = nil) url = URL.parse(url) buf = nil if headers buf = "#{POST}#{url.path}#{HTTP_1_1}#{CRLF}#{HOST}#{url.host}#{CRLF}#{CON_CL}#{CRLF}" headers.each do |kv| buf << "#{kv[0]}#{KV_DELI}#{kv[1]}#{CRLF}" end else buf = "#{POST}#{url.path}#{HTTP_1_1}#{CRLF}#{HOST}#{url.host}#{CRLF}#{CON_CL}#{CRLF}" end case body when String buf << "#{CONTENT_LENGTH}#{KV_DELI}#{body.bytesize}#{CRLF}#{CRLF}" @tls_client.connect(url.host, url.port) @tls_client.write(buf) @tls_client.write(body) when Enumerable buf << "#{TRANSFER_ENCODING}#{KV_DELI}#{CHUNKED}#{CRLF}#{CRLF}" @tls_client.connect(url.host, url.port) @tls_client.write(buf) body.each do |chunk| ch = String(chunk) next if ch.bytesize == 0 @tls_client.write("#{ch.bytesize.to_s(16)}#{CRLF}#{ch}#{CRLF}") end @tls_client.write(FINAL_CHUNK) when Fiber buf << "#{TRANSFER_ENCODING}#{KV_DELI}#{CHUNKED}#{CRLF}#{CRLF}" @tls_client.connect(url.host, url.port) @tls_client.write(buf) while body.alive? && chunk = body.resume ch = String(chunk) next if ch.bytesize == 0 @tls_client.write("#{ch.bytesize.to_s(16)}#{CRLF}#{ch}#{CRLF}") end @tls_client.write(FINAL_CHUNK) else raise ArgumentError, "Cannot handle #{body.class}" end buf = @tls_client.read pret = nil response = nil loop do pret = @phr.parse_response(buf) case pret when Fixnum response = Response.new(@phr.minor_version, @phr.status, @phr.msg, @phr.headers) yield response break when :incomplete buf << @tls_client.read when :parser_error return pret end end response.body = String(buf[pret..-1]) headers = @phr.headers.to_h if headers.key? CONTENT_LENGTH_DC cl = Integer(headers[CONTENT_LENGTH_DC]) yield response yielded = response.body.bytesize until yielded == cl response.body = @tls_client.read(32_768) yield response yielded += response.body.bytesize end elsif headers.key?(TRANSFER_ENCODING_DC) && headers[TRANSFER_ENCODING_DC].casecmp(CHUNKED) == 0 unless headers.key? TRAILER_DC @decoder.consume_trailer(true) end loop do pret = @decoder.decode_chunked(response.body) do |body| yield response end case pret when Fixnum break when :incomplete response.body = @tls_client.read(32_768) when :parser_error return pret end end else yield response loop do response.body = @tls_client.read(32_768) yield response end end self ensure begin @phr.reset @decoder.reset @tls_client.close rescue end end end
add test for nagios::install require_relative '../../../../spec_helper' describe 'nagios::install', :type => :class do it { should contain_package('nagios3') } it { should contain_file('/etc/nagios3/nagios.cfg') } end
# Defines a triangle by referring back to a Mesh and its vertex and index # arrays. class MeshTriangle include Intersectable attr_accessor :p_x, :p_y, :p_z, :n_x, :n_y, :n_z, :mesh # compute triangle spanning vertices # TODO explain beta_gama def initialize(mesh, index) @mesh = mesh facs = mesh.indices[index + 1] verts = mesh.vertices.values_at(facs.x, facs.y, facs.z) norms = mesh.normals.values_at(facs.x, facs.y, facs.z) puts "#{facs} #{verts.map(&:to_s).inspect} #{index}" # spanning triangle points @p_x = verts[0] @p_y = verts[1] @p_z = verts[2] @n_x = norms[0] @n_y = norms[1] @n_z = norms[2] end def intersect(ray) hit_record = nil a_to_b = p_x.s_copy.sub(p_y) a_to_c = p_x.s_copy.sub(p_z) triangle = Matrix3f.new(nil, nil, nil) triangle.set_column_at(1, a_to_b) triangle.set_column_at(2, a_to_c) triangle.set_column_at(3, ray.direction) b = p_x.s_copy.sub(ray.origin) # solve system # beta_gamma_triangle = System.solve3x3System(triangle, b) # TODO please extand functionalitz in oder to work with a # LUP or Cholesky solver # highly unstable under certain circumstances t_inv = triangle.s_copy.invert return nil if t_inv.nil? bgt = t_inv.vectormult(b) return nil if bgt.nil? if inside_triangle?(bgt.x, bgt.y) t = bgt.z ray_dir = ray.direction.s_copy intersection_position = ray_dir.scale(t).add(ray.origin) hit_normal = make_normal(bgt) w_in = ray.direction.s_copy.normalize.negate tangent = p_x.s_copy.sub(p_y).scale(0.5) tan_b = p_x.s_copy.sub(p_z).scale(0.5) tangent.add(tan_b).normalize hash = { t: t, position: intersection_position, normal: hit_normal, tangent: tangent, w: w_in, intersectable: self, material: mesh.material, u: 0.0, v: 0.0 } hit_record = HitRecord.new(hash) end hit_record end private def make_normal(bgt) # note that: alpha + beta + gamma = 1 a = n_x.s_copy.scale(1.0 - bgt.x - bgt.y) b = n_y.s_copy.scale(bgt.x) c = n_z.s_copy.scale(bgt.y) a.add(b).add(c).normalize end # use BC coordinates # was triangle intersected def inside_triangle?(beta, gamma) no_triangle_hit = [beta, gamma].any? do |expression| expression >= 0.0 && 1 >= expression end return false if no_triangle_hit value = gamma + beta value >= 0.0 && 1 >= value end end Suppress console spaming when loading an obj mesh # Defines a triangle by referring back to a Mesh and its vertex and index # arrays. class MeshTriangle include Intersectable attr_accessor :p_x, :p_y, :p_z, :n_x, :n_y, :n_z, :mesh # compute triangle spanning vertices # TODO explain beta_gama def initialize(mesh, index) @mesh = mesh facs = mesh.indices[index + 1] verts = mesh.vertices.values_at(facs.x, facs.y, facs.z) norms = mesh.normals.values_at(facs.x, facs.y, facs.z) # spanning triangle points @p_x = verts[0] @p_y = verts[1] @p_z = verts[2] @n_x = norms[0] @n_y = norms[1] @n_z = norms[2] end def intersect(ray) hit_record = nil a_to_b = p_x.s_copy.sub(p_y) a_to_c = p_x.s_copy.sub(p_z) triangle = Matrix3f.new(nil, nil, nil) triangle.set_column_at(1, a_to_b) triangle.set_column_at(2, a_to_c) triangle.set_column_at(3, ray.direction) b = p_x.s_copy.sub(ray.origin) # solve system # beta_gamma_triangle = System.solve3x3System(triangle, b) # TODO please extand functionalitz in oder to work with a # LUP or Cholesky solver # highly unstable under certain circumstances t_inv = triangle.s_copy.invert return nil if t_inv.nil? bgt = t_inv.vectormult(b) return nil if bgt.nil? if inside_triangle?(bgt.x, bgt.y) t = bgt.z ray_dir = ray.direction.s_copy intersection_position = ray_dir.scale(t).add(ray.origin) hit_normal = make_normal(bgt) w_in = ray.direction.s_copy.normalize.negate tangent = p_x.s_copy.sub(p_y).scale(0.5) tan_b = p_x.s_copy.sub(p_z).scale(0.5) tangent.add(tan_b).normalize hash = { t: t, position: intersection_position, normal: hit_normal, tangent: tangent, w: w_in, intersectable: self, material: mesh.material, u: 0.0, v: 0.0 } hit_record = HitRecord.new(hash) end hit_record end private def make_normal(bgt) # note that: alpha + beta + gamma = 1 a = n_x.s_copy.scale(1.0 - bgt.x - bgt.y) b = n_y.s_copy.scale(bgt.x) c = n_z.s_copy.scale(bgt.y) a.add(b).add(c).normalize end # use BC coordinates # was triangle intersected def inside_triangle?(beta, gamma) no_triangle_hit = [beta, gamma].any? do |expression| expression >= 0.0 && 1 >= expression end return false if no_triangle_hit value = gamma + beta value >= 0.0 && 1 >= value end end
# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'eatabit_rails/version' Gem::Specification.new do |spec| spec.name = "eatabit_rails" spec.version = EatabitRails::VERSION spec.authors = ["Greg Oleksiak"] spec.email = ["greg.oleksiak@gmail.com"] spec.summary = %q{The official gem for the eatabit.io API} spec.description = %q{Taking orders online is easy. (ok, not that easy) but getting the order into the hands of the restaurant...that's hard.} spec.homepage = "http://www.eatabit.io" spec.license = "MIT" # Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or # delete this section to allow pushing this gem to any host. if spec.respond_to?(:metadata) spec.metadata['allowed_push_host'] = "https://rubygems.org" else raise "RubyGems 2.0 or newer is required to protect against public gem pushes." end spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } spec.bindir = "exe" spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } spec.require_paths = ["lib"] spec.add_development_dependency 'bundler', '~> 1.10' spec.add_development_dependency 'rake', '~> 10.0' spec.add_development_dependency 'rspec', '~> 3.3', '>= 3.3.0' spec.add_development_dependency 'vcr', '~> 2.9', '>= 2.9.3' spec.add_development_dependency 'webmock', '~> 1.21', '>= 1.21.0' spec.add_dependency 'rest-client', '~> 1.8', '>= 1.8.0' spec.add_dependency 'json', '~> 1.8', '>= 1.8.3' end Update homepage metadata # coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'eatabit_rails/version' Gem::Specification.new do |spec| spec.name = "eatabit_rails" spec.version = EatabitRails::VERSION spec.authors = ["Greg Oleksiak"] spec.email = ["greg.oleksiak@gmail.com"] spec.summary = %q{The official gem for the eatabit.io API} spec.description = %q{Taking orders online is easy. (ok, not that easy) but getting the order into the hands of the restaurant...that's hard.} spec.homepage = "https://github.com/eatabit/eatabit_rails" spec.license = "MIT" # Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or # delete this section to allow pushing this gem to any host. if spec.respond_to?(:metadata) spec.metadata['allowed_push_host'] = "https://rubygems.org" else raise "RubyGems 2.0 or newer is required to protect against public gem pushes." end spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } spec.bindir = "exe" spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } spec.require_paths = ["lib"] spec.add_development_dependency 'bundler', '~> 1.10' spec.add_development_dependency 'rake', '~> 10.0' spec.add_development_dependency 'rspec', '~> 3.3', '>= 3.3.0' spec.add_development_dependency 'vcr', '~> 2.9', '>= 2.9.3' spec.add_development_dependency 'webmock', '~> 1.21', '>= 1.21.0' spec.add_dependency 'rest-client', '~> 1.8', '>= 1.8.0' spec.add_dependency 'json', '~> 1.8', '>= 1.8.3' end
# Generated by jeweler # DO NOT EDIT THIS FILE DIRECTLY # Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec' # -*- encoding: utf-8 -*- Gem::Specification.new do |s| s.name = %q{pagseguro_catcher} s.version = "0.0.1" s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= s.authors = ["Matias H. Leidemer"] s.date = %q{2011-10-03} s.description = %q{This gem provides a simple way to check and parse the PagSeguro transaction notification.} s.email = %q{matiasleidemer@gmail.com} s.extra_rdoc_files = [ "LICENSE.txt", "README.rdoc" ] s.files = [ ".document", ".rspec", "Gemfile", "Gemfile.lock", "LICENSE.txt", "README.rdoc", "Rakefile", "VERSION", "lib/pagseguro_catcher.rb", "lib/pagseguro_catcher/checker.rb", "lib/pagseguro_catcher/core_ext/Hash.rb", "lib/pagseguro_catcher/parser.rb", "lib/pagseguro_catcher/receiver.rb", "spec/checker_spec.rb", "spec/pagseguro_catcher_spec.rb", "spec/parser_spec.rb", "spec/receiver_spec.rb", "spec/spec_helper.rb", "spec/support/return.xml" ] s.homepage = %q{http://github.com/matiasleidemer/pagseguro_catcher} s.licenses = ["MIT"] s.require_paths = ["lib"] s.rubygems_version = %q{1.3.7} s.summary = %q{A simple gem to parse PagSeguro transaction notification.} if s.respond_to? :specification_version then current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION s.specification_version = 3 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_runtime_dependency(%q<httparty>, [">= 0"]) s.add_runtime_dependency(%q<activesupport>, [">= 3.0.0"]) s.add_runtime_dependency(%q<i18n>, [">= 0"]) s.add_development_dependency(%q<rspec>, ["~> 2.3.0"]) s.add_development_dependency(%q<bundler>, ["~> 1.0.0"]) s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"]) s.add_development_dependency(%q<rcov>, [">= 0"]) s.add_development_dependency(%q<fakeweb>, [">= 0"]) else s.add_dependency(%q<httparty>, [">= 0"]) s.add_dependency(%q<activesupport>, [">= 3.0.0"]) s.add_dependency(%q<i18n>, [">= 0"]) s.add_dependency(%q<rspec>, ["~> 2.3.0"]) s.add_dependency(%q<bundler>, ["~> 1.0.0"]) s.add_dependency(%q<jeweler>, ["~> 1.6.4"]) s.add_dependency(%q<rcov>, [">= 0"]) s.add_dependency(%q<fakeweb>, [">= 0"]) end else s.add_dependency(%q<httparty>, [">= 0"]) s.add_dependency(%q<activesupport>, [">= 3.0.0"]) s.add_dependency(%q<i18n>, [">= 0"]) s.add_dependency(%q<rspec>, ["~> 2.3.0"]) s.add_dependency(%q<bundler>, ["~> 1.0.0"]) s.add_dependency(%q<jeweler>, ["~> 1.6.4"]) s.add_dependency(%q<rcov>, [">= 0"]) s.add_dependency(%q<fakeweb>, [">= 0"]) end end updated gemspec # Generated by jeweler # DO NOT EDIT THIS FILE DIRECTLY # Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec' # -*- encoding: utf-8 -*- Gem::Specification.new do |s| s.name = %q{pagseguro_catcher} s.version = "0.0.1" s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= s.authors = ["Matias H. Leidemer"] s.date = %q{2011-10-04} s.description = %q{This gem provides a simple way to check and parse the PagSeguro transaction notification.} s.email = %q{matiasleidemer@gmail.com} s.extra_rdoc_files = [ "LICENSE.txt", "README.rdoc" ] s.files = [ ".document", ".rspec", "Gemfile", "Gemfile.lock", "LICENSE.txt", "README.rdoc", "Rakefile", "VERSION", "lib/pagseguro_catcher.rb", "lib/pagseguro_catcher/checker.rb", "lib/pagseguro_catcher/constants.rb", "lib/pagseguro_catcher/core_ext/Hash.rb", "lib/pagseguro_catcher/parser.rb", "lib/pagseguro_catcher/parser/amount.rb", "lib/pagseguro_catcher/parser/base.rb", "lib/pagseguro_catcher/receiver.rb", "pagseguro_catcher.gemspec", "spec/checker_spec.rb", "spec/pagseguro_catcher_spec.rb", "spec/parser/amount_spec.rb", "spec/parser/base_spec.rb", "spec/receiver_spec.rb", "spec/spec_helper.rb", "spec/support/return.xml" ] s.homepage = %q{http://github.com/matiasleidemer/pagseguro_catcher} s.licenses = ["MIT"] s.require_paths = ["lib"] s.rubygems_version = %q{1.3.7} s.summary = %q{A simple gem to parse PagSeguro transaction notification.} if s.respond_to? :specification_version then current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION s.specification_version = 3 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_runtime_dependency(%q<httparty>, [">= 0"]) s.add_runtime_dependency(%q<activesupport>, [">= 3.0.0"]) s.add_runtime_dependency(%q<i18n>, [">= 0"]) s.add_development_dependency(%q<rspec>, ["~> 2.3.0"]) s.add_development_dependency(%q<bundler>, ["~> 1.0.0"]) s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"]) s.add_development_dependency(%q<rcov>, [">= 0"]) s.add_development_dependency(%q<fakeweb>, [">= 0"]) else s.add_dependency(%q<httparty>, [">= 0"]) s.add_dependency(%q<activesupport>, [">= 3.0.0"]) s.add_dependency(%q<i18n>, [">= 0"]) s.add_dependency(%q<rspec>, ["~> 2.3.0"]) s.add_dependency(%q<bundler>, ["~> 1.0.0"]) s.add_dependency(%q<jeweler>, ["~> 1.6.4"]) s.add_dependency(%q<rcov>, [">= 0"]) s.add_dependency(%q<fakeweb>, [">= 0"]) end else s.add_dependency(%q<httparty>, [">= 0"]) s.add_dependency(%q<activesupport>, [">= 3.0.0"]) s.add_dependency(%q<i18n>, [">= 0"]) s.add_dependency(%q<rspec>, ["~> 2.3.0"]) s.add_dependency(%q<bundler>, ["~> 1.0.0"]) s.add_dependency(%q<jeweler>, ["~> 1.6.4"]) s.add_dependency(%q<rcov>, [">= 0"]) s.add_dependency(%q<fakeweb>, [">= 0"]) end end
lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'opt_parse_validator/version' Gem::Specification.new do |s| s.name = 'opt_parse_validator' s.version = OptParseValidator::VERSION s.platform = Gem::Platform::RUBY s.required_ruby_version = '>= 2.5' s.authors = ['WPScanTeam'] s.email = ['team@wpscan.org'] s.summary = 'Ruby OptionParser Validators' s.description = 'Implementation of validators for the ruby OptionParser lib. ' \ 'Mainly used in the CMSScanner gem to define the cli options available' s.homepage = 'https://github.com/wpscanteam/OptParseValidator' s.license = 'MIT' s.files = Dir.glob('lib/**/*') + %w[LICENSE README.md] s.test_files = [] s.require_paths = ['lib'] s.add_dependency 'activesupport', '>= 5.2', '< 6.1.0' s.add_dependency 'addressable', '>= 2.5', '< 2.8' s.add_development_dependency 'bundler', '>= 1.6' s.add_development_dependency 'rake', '~> 13.0' s.add_development_dependency 'rspec', '~> 3.9.0' s.add_development_dependency 'rspec-its', '~> 1.3.0' s.add_development_dependency 'rubocop', '~> 0.86.0' s.add_development_dependency 'rubocop-performance', '~> 1.7.0' s.add_development_dependency 'simplecov', '~> 0.18.2' s.add_development_dependency 'simplecov-lcov', '~> 0.8.0' end Update rubocop requirement from ~> 0.86.0 to ~> 0.87.0 Updates the requirements on [rubocop](https://github.com/rubocop-hq/rubocop) to permit the latest version. - [Release notes](https://github.com/rubocop-hq/rubocop/releases) - [Changelog](https://github.com/rubocop-hq/rubocop/blob/master/CHANGELOG.md) - [Commits](https://github.com/rubocop-hq/rubocop/compare/v0.86.0...v0.87.0) Signed-off-by: dependabot-preview[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com> lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'opt_parse_validator/version' Gem::Specification.new do |s| s.name = 'opt_parse_validator' s.version = OptParseValidator::VERSION s.platform = Gem::Platform::RUBY s.required_ruby_version = '>= 2.5' s.authors = ['WPScanTeam'] s.email = ['team@wpscan.org'] s.summary = 'Ruby OptionParser Validators' s.description = 'Implementation of validators for the ruby OptionParser lib. ' \ 'Mainly used in the CMSScanner gem to define the cli options available' s.homepage = 'https://github.com/wpscanteam/OptParseValidator' s.license = 'MIT' s.files = Dir.glob('lib/**/*') + %w[LICENSE README.md] s.test_files = [] s.require_paths = ['lib'] s.add_dependency 'activesupport', '>= 5.2', '< 6.1.0' s.add_dependency 'addressable', '>= 2.5', '< 2.8' s.add_development_dependency 'bundler', '>= 1.6' s.add_development_dependency 'rake', '~> 13.0' s.add_development_dependency 'rspec', '~> 3.9.0' s.add_development_dependency 'rspec-its', '~> 1.3.0' s.add_development_dependency 'rubocop', '~> 0.87.0' s.add_development_dependency 'rubocop-performance', '~> 1.7.0' s.add_development_dependency 'simplecov', '~> 0.18.2' s.add_development_dependency 'simplecov-lcov', '~> 0.8.0' end
# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'opt_parse_validator/version' Gem::Specification.new do |s| s.name = 'opt_parse_validator' s.version = OptParseValidator::VERSION s.platform = Gem::Platform::RUBY s.required_ruby_version = '>= 2.3' s.authors = ['WPScanTeam'] s.email = ['team@wpscan.org'] s.summary = 'Ruby OptionParser Validators' s.description = 'Implementation of validators for the ruby OptionParser lib. ' \ 'Mainly used in the CMSScanner gem to define the cli options available' s.homepage = 'https://github.com/wpscanteam/OptParseValidator' s.license = 'MIT' s.files = Dir.glob('lib/**/*') + %w[LICENSE README.md] s.test_files = [] s.require_paths = ['lib'] s.add_dependency 'activesupport', '~> 5.1.6' s.add_dependency 'addressable', '~> 2.5.0' s.add_development_dependency 'bundler', '>= 1.6' s.add_development_dependency 'coveralls', '~> 0.8.0' s.add_development_dependency 'rake', '~> 12.3' s.add_development_dependency 'rspec', '~> 3.8.0' s.add_development_dependency 'rspec-its', '~> 1.2.0' s.add_development_dependency 'rubocop', '~> 0.61.1' s.add_development_dependency 'simplecov', '~> 0.16.1' end Updates Deps lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'opt_parse_validator/version' Gem::Specification.new do |s| s.name = 'opt_parse_validator' s.version = OptParseValidator::VERSION s.platform = Gem::Platform::RUBY s.required_ruby_version = '>= 2.3' s.authors = ['WPScanTeam'] s.email = ['team@wpscan.org'] s.summary = 'Ruby OptionParser Validators' s.description = 'Implementation of validators for the ruby OptionParser lib. ' \ 'Mainly used in the CMSScanner gem to define the cli options available' s.homepage = 'https://github.com/wpscanteam/OptParseValidator' s.license = 'MIT' s.files = Dir.glob('lib/**/*') + %w[LICENSE README.md] s.test_files = [] s.require_paths = ['lib'] s.add_dependency 'activesupport', '~> 5.2.0' s.add_dependency 'addressable', '~> 2.5.0' s.add_development_dependency 'bundler', '>= 1.6' s.add_development_dependency 'coveralls', '~> 0.8.0' s.add_development_dependency 'rake', '~> 12.3' s.add_development_dependency 'rspec', '~> 3.8.0' s.add_development_dependency 'rspec-its', '~> 1.2.0' s.add_development_dependency 'rubocop', '~> 0.62.0' s.add_development_dependency 'simplecov', '~> 0.16.1' end
$LOAD_PATH << '..' require 'musikbot' require 'nokogiri' module CopyPatrolWikiProjects def self.run @mb = MusikBot::Session.new(inspect) # this ID represents the id of the last record we processed in copyright_diffs last_id = @mb.local_storage('CopyPatrol_lastid', 'r').read.to_i un, pw, host, db, port = Auth.copyright_p_credentials @client = Mysql2::Client.new( host: host, username: un, password: pw, database: db, port: port ) # get the all the CopyPatrol records since the last run records = fetch_records(last_id) # loop through and fetch WikiProjects as needed records.each do |record| # don't re-fetch WikiProjects - for now, that is next if wikiprojects?(record['page_title']) wikiprojects = parse_wikiprojects(record['page_title']) # save to database write_wikiprojects(wikiprojects, record) end # update the ID of the last run if records.any? run_file = @mb.local_storage('CopyPatrol_lastid', 'r+') run_file.write(records.last['id']) run_file.close end rescue => e # gets logged to User:MusikBot/CopyPatrolWikiProjects/Error_log @mb.report_error('Fatal error', e) end def self.fetch_records(last_id) query('SELECT id, page_title FROM copyright_diffs WHERE id > ?', last_id).to_a end def self.wikiprojects?(page_title) query('SELECT COUNT(*) AS count FROM wikiprojects WHERE wp_page_title = ?', page_title).to_a.first['count'] > 0 end def self.write_wikiprojects(wikiprojects, record) wikiprojects.each do |wikiproject| # use underscores instead of spaces, to be consistent query('INSERT INTO wikiprojects VALUES(NULL, ?, ?)', record['page_title'], wikiproject.tr(' ', '_') ) end end def self.parse_wikiprojects(page_title) # mw:API:Revisions talk_text = @mb.get("Talk:#{page_title}", rvsection: 0, rvparse: true ) # Uses XML query selectors to identify the WikiProject links, removing any sub-wikiprojects wp_links = Nokogiri::HTML(talk_text).css('.wpb-header a, .mbox-text b a') .collect { |a| a.attributes['href'].value.sub('/wiki/', '') }.uniq .select { |link| !link.include?('/') } Nokogiri::HTML(talk_text).css('.wpb-header a').collect(&:content).select { |text| text =~ /^WikiProject/ } end def self.query(sql, *values) puts sql statement = @client.prepare(sql) statement.execute(*values) end end CopyPatrolWikiProjects.run Fix detection of WikiProjects, round two $LOAD_PATH << '..' require 'musikbot' require 'nokogiri' module CopyPatrolWikiProjects def self.run @mb = MusikBot::Session.new(inspect) # this ID represents the id of the last record we processed in copyright_diffs last_id = @mb.local_storage('CopyPatrol_lastid', 'r').read.to_i un, pw, host, db, port = Auth.copyright_p_credentials @client = Mysql2::Client.new( host: host, username: un, password: pw, database: db, port: port ) # get the all the CopyPatrol records since the last run records = fetch_records(last_id) # loop through and fetch WikiProjects as needed records.each do |record| # don't re-fetch WikiProjects - for now, that is next if wikiprojects?(record['page_title']) wikiprojects = parse_wikiprojects(record['page_title']) # save to database write_wikiprojects(wikiprojects, record) end # update the ID of the last run if records.any? run_file = @mb.local_storage('CopyPatrol_lastid', 'r+') run_file.write(records.last['id']) run_file.close end rescue => e # gets logged to User:MusikBot/CopyPatrolWikiProjects/Error_log @mb.report_error('Fatal error', e) end def self.fetch_records(last_id) query('SELECT id, page_title FROM copyright_diffs WHERE id > ?', last_id).to_a end def self.wikiprojects?(page_title) query('SELECT COUNT(*) AS count FROM wikiprojects WHERE wp_page_title = ?', page_title).to_a.first['count'] > 0 end def self.write_wikiprojects(wikiprojects, record) wikiprojects.each do |wikiproject| # use underscores instead of spaces, to be consistent query('INSERT INTO wikiprojects VALUES(NULL, ?, ?)', record['page_title'], wikiproject.tr(' ', '_') ) end end def self.parse_wikiprojects(page_title) # mw:API:Revisions talk_text = @mb.get("Talk:#{page_title}", rvsection: 0, rvparse: true ) # Parses the talk page of the given article to find WikiProjects. # Uses XML query selectors to identify the WikiProject links, removing any sub-wikiprojects # and the Wikipedia: prefix Nokogiri::HTML(talk_text).css('.wpb-header a, .mbox-text b a') .collect { |link| link.attributes['href'].value.sub('/wiki/Wikipedia:', '') }.uniq .select { |link| link =~ /^WikiProject/ && !link.include?('/') } end def self.query(sql, *values) puts sql statement = @client.prepare(sql) statement.execute(*values) end end CopyPatrolWikiProjects.run
require 'helper' class TestSlimHtmlStructure < TestSlim def test_simple_render # Keep the trailing space behind "body "! source = %q{ html head title Simple Test Title body p Hello World, meet Slim. } assert_html '<html><head><title>Simple Test Title</title></head><body><p>Hello World, meet Slim.</p></body></html>', source end def test_relaxed_indentation_of_first_line source = %q{ p .content } assert_html "<p><div class=\"content\"></div></p>", source end def test_html_tag_with_text_and_empty_line source = %q{ p Hello p World } assert_html "<p>Hello</p><p>World</p>", source end def test_html_namespaces source = %q{ html:body html:p html:id="test" Text } assert_html '<html:body><html:p html:id="test">Text</html:p></html:body>', source end def test_doctype source = %q{ doctype 1.1 html } assert_html '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"><html></html>', source, format: :xhtml end def test_doctype_new_syntax source = %q{ doctype 5 html } assert_html '<!DOCTYPE html><html></html>', source, format: :xhtml end def test_doctype_new_syntax_html5 source = %q{ doctype html html } assert_html '<!DOCTYPE html><html></html>', source, format: :xhtml end def test_render_with_shortcut_attributes source = %q{ h1#title This is my title #notice.hello.world = hello_world } assert_html '<h1 id="title">This is my title</h1><div class="hello world" id="notice">Hello World from @env</div>', source end def test_render_with_overwritten_default_tag source = %q{ #notice.hello.world = hello_world } assert_html '<section class="hello world" id="notice">Hello World from @env</section>', source, default_tag: 'section' end def test_render_with_custom_shortcut source = %q{ #notice.hello.world@test = hello_world @abc = hello_world } assert_html '<div class="hello world" id="notice" role="test">Hello World from @env</div><section role="abc">Hello World from @env</section>', source, shortcut: {'#' => {attr: 'id'}, '.' => {attr: 'class'}, '@' => {tag: 'section', attr: 'role'}} end def test_render_with_custom_array_shortcut source = %q{ #user@.admin Daniel } assert_html '<div class="admin" id="user" role="admin">Daniel</div>', source, shortcut: {'#' => {attr: 'id'}, '.' => {attr: 'class'}, '@' => {attr: 'role'}, '@.' => {attr: ['class', 'role']}} end def test_render_with_custom_shortcut_and_additional_attrs source = %q{ ^items == "[{'title':'item0'},{'title':'item1'},{'title':'item2'},{'title':'item3'},{'title':'item4'}]" } assert_html '<script data-binding="items" type="application/json">[{\'title\':\'item0\'},{\'title\':\'item1\'},{\'title\':\'item2\'},{\'title\':\'item3\'},{\'title\':\'item4\'}]</script>', source, shortcut: {'^' => {tag: 'script', attr: 'data-binding', additional_attrs: { type: "application/json" }}} end def test_render_with_text_block source = %q{ p | Lorem ipsum dolor sit amet, consectetur adipiscing elit. } assert_html '<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit.</p>', source end def test_render_with_text_block_with_subsequent_markup source = %q{ p | Lorem ipsum dolor sit amet, consectetur adipiscing elit. p Some more markup } assert_html '<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit.</p><p>Some more markup</p>', source end def test_render_with_text_block_with_trailing_whitespace source = %q{ ' this is a link to a href="link" page } assert_html "this is\na link to <a href=\"link\">page</a>", source end def test_nested_text source = %q{ p | This is line one. This is line two. This is line three. This is line four. p This is a new paragraph. } assert_html "<p>This is line one.\n This is line two.\n This is line three.\n This is line four.</p><p>This is a new paragraph.</p>", source end def test_nested_text_with_nested_html_one_same_line source = %q{ p | This is line one. This is line two. span.bold This is a bold line in the paragraph. | This is more content. } assert_html "<p>This is line one.\n This is line two.<span class=\"bold\">This is a bold line in the paragraph.</span> This is more content.</p>", source end def test_nested_text_with_nested_html_one_same_line2 source = %q{ p |This is line one. This is line two. span.bold This is a bold line in the paragraph. | This is more content. } assert_html "<p>This is line one.\n This is line two.<span class=\"bold\">This is a bold line in the paragraph.</span> This is more content.</p>", source end def test_nested_text_with_nested_html source = %q{ p | This is line one. This is line two. This is line three. This is line four. span.bold This is a bold line in the paragraph. | This is more content. } assert_html "<p>This is line one.\n This is line two.\n This is line three.\n This is line four.<span class=\"bold\">This is a bold line in the paragraph.</span> This is more content.</p>", source end def test_simple_paragraph_with_padding source = %q{ p There will be 3 spaces in front of this line. } assert_html '<p> There will be 3 spaces in front of this line.</p>', source end def test_paragraph_with_nested_text source = %q{ p This is line one. This is line two. } assert_html "<p>This is line one.\n This is line two.</p>", source end def test_paragraph_with_padded_nested_text source = %q{ p This is line one. This is line two. } assert_html "<p> This is line one.\n This is line two.</p>", source end def test_paragraph_with_attributes_and_nested_text source = %q{ p#test class="paragraph" This is line one. This is line two. } assert_html "<p class=\"paragraph\" id=\"test\">This is line one.\nThis is line two.</p>", source end def test_relaxed_text_indentation source = %q{ p | text block text line3 } assert_html "<p>text block\ntext\n line3</p>", source end def test_output_code_with_leading_spaces source = %q{ p= hello_world p = hello_world p = hello_world } assert_html '<p>Hello World from @env</p><p>Hello World from @env</p><p>Hello World from @env</p>', source end def test_single_quoted_attributes source = %q{ p class='underscored_class_name' = output_number } assert_html '<p class="underscored_class_name">1337</p>', source end def test_nonstandard_attributes source = %q{ p id="dashed-id" class="underscored_class_name" = output_number } assert_html '<p class="underscored_class_name" id="dashed-id">1337</p>', source end def test_nonstandard_shortcut_attributes source = %q{ p#dashed-id.underscored_class_name = output_number } assert_html '<p class="underscored_class_name" id="dashed-id">1337</p>', source end def test_dashed_attributes source = %q{ p data-info="Illudium Q-36" = output_number } assert_html '<p data-info="Illudium Q-36">1337</p>', source end def test_dashed_attributes_with_shortcuts source = %q{ p#marvin.martian data-info="Illudium Q-36" = output_number } assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source end def test_parens_around_attributes source = %q{ p(id="marvin" class="martian" data-info="Illudium Q-36") = output_number } assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source end def test_square_brackets_around_attributes source = %q{ p[id="marvin" class="martian" data-info="Illudium Q-36"] = output_number } assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source end def test_parens_around_attributes_with_equal_sign_snug_to_right_paren source = %q{ p(id="marvin" class="martian" data-info="Illudium Q-36")= output_number } assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source end def test_default_attr_delims_option source = %q{ p<id="marvin" class="martian" data-info="Illudium Q-36">= output_number } Slim::Parser.options[:attr_list_delims].each do |k,v| str = source.sub('<',k).sub('>',v) assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', str end end def test_custom_attr_delims_option source = %q{ p { foo="bar" } } assert_html '<p foo="bar"></p>', source assert_html '<p foo="bar"></p>', source, attr_list_delims: {'{' => '}'} assert_html '<p>{ foo="bar" }</p>', source, attr_list_delims: {'(' => ')', '[' => ']'} end def test_closed_tag source = %q{ closed/ } assert_html '<closed />', source, format: :xhtml end def test_custom_attr_list_delims_option source = %q{ p { foo="bar" x=(1+1) } p < x=(1+1) > Hello } assert_html '<p foo="bar" x="2"></p><p>< x=(1+1) > Hello</p>', source assert_html '<p foo="bar" x="2"></p><p>< x=(1+1) > Hello</p>', source, attr_list_delims: {'{' => '}'} assert_html '<p>{ foo="bar" x=(1+1) }</p><p x="2">Hello</p>', source, attr_list_delims: {'<' => '>'}, code_attr_delims: { '(' => ')' } end def test_closed_tag source = %q{ closed/ } assert_html '<closed />', source, format: :xhtml end def test_attributs_with_parens_and_spaces source = %q{label{ for='filter' }= hello_world} assert_html '<label for="filter">Hello World from @env</label>', source end def test_attributs_with_parens_and_spaces2 source = %q{label{ for='filter' } = hello_world} assert_html '<label for="filter">Hello World from @env</label>', source end def test_attributs_with_multiple_spaces source = %q{label for='filter' class="test" = hello_world} assert_html '<label class="test" for="filter">Hello World from @env</label>', source end def test_closed_tag_with_attributes source = %q{ closed id="test" / } assert_html '<closed id="test" />', source, format: :xhtml end def test_closed_tag_with_attributes_and_parens source = %q{ closed(id="test")/ } assert_html '<closed id="test" />', source, format: :xhtml end def test_render_with_html_comments source = %q{ p Hello /! This is a comment Another comment p World } assert_html "<p>Hello</p><!--This is a comment\n\nAnother comment--><p>World</p>", source end def test_render_with_html_conditional_and_tag source = %q{ /[ if IE ] p Get a better browser. } assert_html "<!--[if IE]><p>Get a better browser.</p><![endif]-->", source end def test_render_with_html_conditional_and_method_output source = %q{ /[ if IE ] = message 'hello' } assert_html "<!--[if IE]>hello<![endif]-->", source end def test_multiline_attributes_with_method source = %q{ p<id="marvin" class="martian" data-info="Illudium Q-36"> = output_number } Slim::Parser.options[:attr_list_delims].each do |k,v| str = source.sub('<',k).sub('>',v) assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', str end end def test_multiline_attributes_with_text_on_same_line source = %q{ p<id="marvin" class="martian" data-info="Illudium Q-36"> THE space modulator } Slim::Parser.options[:attr_list_delims].each do |k,v| str = source.sub('<',k).sub('>',v) assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">THE space modulator</p>', str end end def test_multiline_attributes_with_nested_text source = %q{ p<id="marvin" class="martian" data-info="Illudium Q-36"> | THE space modulator } Slim::Parser.options[:attr_list_delims].each do |k,v| str = source.sub('<',k).sub('>',v) assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">THE space modulator</p>', str end end def test_multiline_attributes_with_dynamic_attr source = %q{ p<id=id_helper class="martian" data-info="Illudium Q-36"> | THE space modulator } Slim::Parser.options[:attr_list_delims].each do |k,v| str = source.sub('<',k).sub('>',v) assert_html '<p class="martian" data-info="Illudium Q-36" id="notice">THE space modulator</p>', str end end def test_multiline_attributes_with_nested_tag source = %q{ p<id=id_helper class="martian" data-info="Illudium Q-36"> span.emphasis THE | space modulator } Slim::Parser.options[:attr_list_delims].each do |k,v| str = source.sub('<',k).sub('>',v) assert_html '<p class="martian" data-info="Illudium Q-36" id="notice"><span class="emphasis">THE</span> space modulator</p>', str end end def test_multiline_attributes_with_nested_text_and_extra_indentation source = %q{ li< id="myid" class="myclass" data-info="myinfo"> a href="link" My Link } Slim::Parser.options[:attr_list_delims].each do |k,v| str = source.sub('<',k).sub('>',v) assert_html '<li class="myclass" data-info="myinfo" id="myid"><a href="link">My Link</a></li>', str end end def test_block_expansion_support source = %q{ ul li.first: a href='a' foo li: a href='b' bar li.last: a href='c' baz } assert_html %{<ul><li class=\"first\"><a href=\"a\">foo</a></li><li><a href=\"b\">bar</a></li><li class=\"last\"><a href=\"c\">baz</a></li></ul>}, source end def test_block_expansion_class_attributes source = %q{ .a: .b: #c d } assert_html %{<div class="a"><div class="b"><div id="c">d</div></div></div>}, source end def test_block_expansion_nesting source = %q{ html: body: .content | Text } assert_html %{<html><body><div class=\"content\">Text</div></body></html>}, source end def test_eval_attributes_once source = %q{ input[value=succ_x] input[value=succ_x] } assert_html %{<input value="1" /><input value="2" />}, source end def test_html_line_indicator source = %q{ <html> head meta name="keywords" content=hello_world - if true <p>#{hello_world}</p> span = hello_world </html> } assert_html '<html><head><meta content="Hello World from @env" name="keywords" /></head><p>Hello World from @env</p><span>Hello World from @env</span></html>', source end end Remove a duplicated test case (#714) require 'helper' class TestSlimHtmlStructure < TestSlim def test_simple_render # Keep the trailing space behind "body "! source = %q{ html head title Simple Test Title body p Hello World, meet Slim. } assert_html '<html><head><title>Simple Test Title</title></head><body><p>Hello World, meet Slim.</p></body></html>', source end def test_relaxed_indentation_of_first_line source = %q{ p .content } assert_html "<p><div class=\"content\"></div></p>", source end def test_html_tag_with_text_and_empty_line source = %q{ p Hello p World } assert_html "<p>Hello</p><p>World</p>", source end def test_html_namespaces source = %q{ html:body html:p html:id="test" Text } assert_html '<html:body><html:p html:id="test">Text</html:p></html:body>', source end def test_doctype source = %q{ doctype 1.1 html } assert_html '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"><html></html>', source, format: :xhtml end def test_doctype_new_syntax source = %q{ doctype 5 html } assert_html '<!DOCTYPE html><html></html>', source, format: :xhtml end def test_doctype_new_syntax_html5 source = %q{ doctype html html } assert_html '<!DOCTYPE html><html></html>', source, format: :xhtml end def test_render_with_shortcut_attributes source = %q{ h1#title This is my title #notice.hello.world = hello_world } assert_html '<h1 id="title">This is my title</h1><div class="hello world" id="notice">Hello World from @env</div>', source end def test_render_with_overwritten_default_tag source = %q{ #notice.hello.world = hello_world } assert_html '<section class="hello world" id="notice">Hello World from @env</section>', source, default_tag: 'section' end def test_render_with_custom_shortcut source = %q{ #notice.hello.world@test = hello_world @abc = hello_world } assert_html '<div class="hello world" id="notice" role="test">Hello World from @env</div><section role="abc">Hello World from @env</section>', source, shortcut: {'#' => {attr: 'id'}, '.' => {attr: 'class'}, '@' => {tag: 'section', attr: 'role'}} end def test_render_with_custom_array_shortcut source = %q{ #user@.admin Daniel } assert_html '<div class="admin" id="user" role="admin">Daniel</div>', source, shortcut: {'#' => {attr: 'id'}, '.' => {attr: 'class'}, '@' => {attr: 'role'}, '@.' => {attr: ['class', 'role']}} end def test_render_with_custom_shortcut_and_additional_attrs source = %q{ ^items == "[{'title':'item0'},{'title':'item1'},{'title':'item2'},{'title':'item3'},{'title':'item4'}]" } assert_html '<script data-binding="items" type="application/json">[{\'title\':\'item0\'},{\'title\':\'item1\'},{\'title\':\'item2\'},{\'title\':\'item3\'},{\'title\':\'item4\'}]</script>', source, shortcut: {'^' => {tag: 'script', attr: 'data-binding', additional_attrs: { type: "application/json" }}} end def test_render_with_text_block source = %q{ p | Lorem ipsum dolor sit amet, consectetur adipiscing elit. } assert_html '<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit.</p>', source end def test_render_with_text_block_with_subsequent_markup source = %q{ p | Lorem ipsum dolor sit amet, consectetur adipiscing elit. p Some more markup } assert_html '<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit.</p><p>Some more markup</p>', source end def test_render_with_text_block_with_trailing_whitespace source = %q{ ' this is a link to a href="link" page } assert_html "this is\na link to <a href=\"link\">page</a>", source end def test_nested_text source = %q{ p | This is line one. This is line two. This is line three. This is line four. p This is a new paragraph. } assert_html "<p>This is line one.\n This is line two.\n This is line three.\n This is line four.</p><p>This is a new paragraph.</p>", source end def test_nested_text_with_nested_html_one_same_line source = %q{ p | This is line one. This is line two. span.bold This is a bold line in the paragraph. | This is more content. } assert_html "<p>This is line one.\n This is line two.<span class=\"bold\">This is a bold line in the paragraph.</span> This is more content.</p>", source end def test_nested_text_with_nested_html_one_same_line2 source = %q{ p |This is line one. This is line two. span.bold This is a bold line in the paragraph. | This is more content. } assert_html "<p>This is line one.\n This is line two.<span class=\"bold\">This is a bold line in the paragraph.</span> This is more content.</p>", source end def test_nested_text_with_nested_html source = %q{ p | This is line one. This is line two. This is line three. This is line four. span.bold This is a bold line in the paragraph. | This is more content. } assert_html "<p>This is line one.\n This is line two.\n This is line three.\n This is line four.<span class=\"bold\">This is a bold line in the paragraph.</span> This is more content.</p>", source end def test_simple_paragraph_with_padding source = %q{ p There will be 3 spaces in front of this line. } assert_html '<p> There will be 3 spaces in front of this line.</p>', source end def test_paragraph_with_nested_text source = %q{ p This is line one. This is line two. } assert_html "<p>This is line one.\n This is line two.</p>", source end def test_paragraph_with_padded_nested_text source = %q{ p This is line one. This is line two. } assert_html "<p> This is line one.\n This is line two.</p>", source end def test_paragraph_with_attributes_and_nested_text source = %q{ p#test class="paragraph" This is line one. This is line two. } assert_html "<p class=\"paragraph\" id=\"test\">This is line one.\nThis is line two.</p>", source end def test_relaxed_text_indentation source = %q{ p | text block text line3 } assert_html "<p>text block\ntext\n line3</p>", source end def test_output_code_with_leading_spaces source = %q{ p= hello_world p = hello_world p = hello_world } assert_html '<p>Hello World from @env</p><p>Hello World from @env</p><p>Hello World from @env</p>', source end def test_single_quoted_attributes source = %q{ p class='underscored_class_name' = output_number } assert_html '<p class="underscored_class_name">1337</p>', source end def test_nonstandard_attributes source = %q{ p id="dashed-id" class="underscored_class_name" = output_number } assert_html '<p class="underscored_class_name" id="dashed-id">1337</p>', source end def test_nonstandard_shortcut_attributes source = %q{ p#dashed-id.underscored_class_name = output_number } assert_html '<p class="underscored_class_name" id="dashed-id">1337</p>', source end def test_dashed_attributes source = %q{ p data-info="Illudium Q-36" = output_number } assert_html '<p data-info="Illudium Q-36">1337</p>', source end def test_dashed_attributes_with_shortcuts source = %q{ p#marvin.martian data-info="Illudium Q-36" = output_number } assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source end def test_parens_around_attributes source = %q{ p(id="marvin" class="martian" data-info="Illudium Q-36") = output_number } assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source end def test_square_brackets_around_attributes source = %q{ p[id="marvin" class="martian" data-info="Illudium Q-36"] = output_number } assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source end def test_parens_around_attributes_with_equal_sign_snug_to_right_paren source = %q{ p(id="marvin" class="martian" data-info="Illudium Q-36")= output_number } assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source end def test_default_attr_delims_option source = %q{ p<id="marvin" class="martian" data-info="Illudium Q-36">= output_number } Slim::Parser.options[:attr_list_delims].each do |k,v| str = source.sub('<',k).sub('>',v) assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', str end end def test_custom_attr_delims_option source = %q{ p { foo="bar" } } assert_html '<p foo="bar"></p>', source assert_html '<p foo="bar"></p>', source, attr_list_delims: {'{' => '}'} assert_html '<p>{ foo="bar" }</p>', source, attr_list_delims: {'(' => ')', '[' => ']'} end def test_closed_tag source = %q{ closed/ } assert_html '<closed />', source, format: :xhtml end def test_custom_attr_list_delims_option source = %q{ p { foo="bar" x=(1+1) } p < x=(1+1) > Hello } assert_html '<p foo="bar" x="2"></p><p>< x=(1+1) > Hello</p>', source assert_html '<p foo="bar" x="2"></p><p>< x=(1+1) > Hello</p>', source, attr_list_delims: {'{' => '}'} assert_html '<p>{ foo="bar" x=(1+1) }</p><p x="2">Hello</p>', source, attr_list_delims: {'<' => '>'}, code_attr_delims: { '(' => ')' } end def test_attributs_with_parens_and_spaces source = %q{label{ for='filter' }= hello_world} assert_html '<label for="filter">Hello World from @env</label>', source end def test_attributs_with_parens_and_spaces2 source = %q{label{ for='filter' } = hello_world} assert_html '<label for="filter">Hello World from @env</label>', source end def test_attributs_with_multiple_spaces source = %q{label for='filter' class="test" = hello_world} assert_html '<label class="test" for="filter">Hello World from @env</label>', source end def test_closed_tag_with_attributes source = %q{ closed id="test" / } assert_html '<closed id="test" />', source, format: :xhtml end def test_closed_tag_with_attributes_and_parens source = %q{ closed(id="test")/ } assert_html '<closed id="test" />', source, format: :xhtml end def test_render_with_html_comments source = %q{ p Hello /! This is a comment Another comment p World } assert_html "<p>Hello</p><!--This is a comment\n\nAnother comment--><p>World</p>", source end def test_render_with_html_conditional_and_tag source = %q{ /[ if IE ] p Get a better browser. } assert_html "<!--[if IE]><p>Get a better browser.</p><![endif]-->", source end def test_render_with_html_conditional_and_method_output source = %q{ /[ if IE ] = message 'hello' } assert_html "<!--[if IE]>hello<![endif]-->", source end def test_multiline_attributes_with_method source = %q{ p<id="marvin" class="martian" data-info="Illudium Q-36"> = output_number } Slim::Parser.options[:attr_list_delims].each do |k,v| str = source.sub('<',k).sub('>',v) assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', str end end def test_multiline_attributes_with_text_on_same_line source = %q{ p<id="marvin" class="martian" data-info="Illudium Q-36"> THE space modulator } Slim::Parser.options[:attr_list_delims].each do |k,v| str = source.sub('<',k).sub('>',v) assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">THE space modulator</p>', str end end def test_multiline_attributes_with_nested_text source = %q{ p<id="marvin" class="martian" data-info="Illudium Q-36"> | THE space modulator } Slim::Parser.options[:attr_list_delims].each do |k,v| str = source.sub('<',k).sub('>',v) assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">THE space modulator</p>', str end end def test_multiline_attributes_with_dynamic_attr source = %q{ p<id=id_helper class="martian" data-info="Illudium Q-36"> | THE space modulator } Slim::Parser.options[:attr_list_delims].each do |k,v| str = source.sub('<',k).sub('>',v) assert_html '<p class="martian" data-info="Illudium Q-36" id="notice">THE space modulator</p>', str end end def test_multiline_attributes_with_nested_tag source = %q{ p<id=id_helper class="martian" data-info="Illudium Q-36"> span.emphasis THE | space modulator } Slim::Parser.options[:attr_list_delims].each do |k,v| str = source.sub('<',k).sub('>',v) assert_html '<p class="martian" data-info="Illudium Q-36" id="notice"><span class="emphasis">THE</span> space modulator</p>', str end end def test_multiline_attributes_with_nested_text_and_extra_indentation source = %q{ li< id="myid" class="myclass" data-info="myinfo"> a href="link" My Link } Slim::Parser.options[:attr_list_delims].each do |k,v| str = source.sub('<',k).sub('>',v) assert_html '<li class="myclass" data-info="myinfo" id="myid"><a href="link">My Link</a></li>', str end end def test_block_expansion_support source = %q{ ul li.first: a href='a' foo li: a href='b' bar li.last: a href='c' baz } assert_html %{<ul><li class=\"first\"><a href=\"a\">foo</a></li><li><a href=\"b\">bar</a></li><li class=\"last\"><a href=\"c\">baz</a></li></ul>}, source end def test_block_expansion_class_attributes source = %q{ .a: .b: #c d } assert_html %{<div class="a"><div class="b"><div id="c">d</div></div></div>}, source end def test_block_expansion_nesting source = %q{ html: body: .content | Text } assert_html %{<html><body><div class=\"content\">Text</div></body></html>}, source end def test_eval_attributes_once source = %q{ input[value=succ_x] input[value=succ_x] } assert_html %{<input value="1" /><input value="2" />}, source end def test_html_line_indicator source = %q{ <html> head meta name="keywords" content=hello_world - if true <p>#{hello_world}</p> span = hello_world </html> } assert_html '<html><head><meta content="Hello World from @env" name="keywords" /></head><p>Hello World from @env</p><span>Hello World from @env</span></html>', source end end
require File.expand_path('../boot', __FILE__) require "active_support" require "action_controller" require "action_view" require "sprockets/railtie" Bundler.require(*Rails.groups) require "mountain_view" module Dummy class Application < Rails::Application # Settings in config/environments/* take precedence over those specified here. # Application configuration should go into files in config/initializers # -- all .rb files in that directory are automatically loaded. # Set Time.zone default to the specified zone and make Active Record auto-convert to this zone. # Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC. # config.time_zone = 'Central Time (US & Canada)' # The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded. # config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s] # config.i18n.default_locale = :de # support newer rails without warnings config.active_support.test_order = :random rescue nil # Settings specified here will take precedence over those in config/application.rb. # The test environment is used exclusively to run your application's # test suite. You never need to work with it otherwise. Remember that # your test database is "scratch space" for the test suite and is wiped # and recreated between test runs. Don't rely on the data there! config.cache_classes = true # Do not eager load code on boot. This avoids loading your whole application # just for the purpose of running a single test. If you are using a tool that # preloads Rails for running tests, you may have to set it to true. config.eager_load = false # Configure static asset server for tests with Cache-Control for performance. config.public_file_server.headers = { 'Cache-Control' => 'public, max-age=3600' } config.assets.enabled = true # Show full error reports and disable caching. config.consider_all_requests_local = true config.action_controller.perform_caching = false # Raise exceptions instead of rendering exception templates. config.action_dispatch.show_exceptions = false # Disable request forgery protection in test environment. config.action_controller.allow_forgery_protection = false # Print deprecation notices to the stderr. config.active_support.deprecation = :stderr # Raises error for missing translations # config.action_view.raise_on_missing_translations = true end end Actually, just comment it out. require File.expand_path('../boot', __FILE__) require "active_support" require "action_controller" require "action_view" require "sprockets/railtie" Bundler.require(*Rails.groups) require "mountain_view" module Dummy class Application < Rails::Application # Settings in config/environments/* take precedence over those specified here. # Application configuration should go into files in config/initializers # -- all .rb files in that directory are automatically loaded. # Set Time.zone default to the specified zone and make Active Record auto-convert to this zone. # Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC. # config.time_zone = 'Central Time (US & Canada)' # The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded. # config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s] # config.i18n.default_locale = :de # support newer rails without warnings config.active_support.test_order = :random rescue nil # Settings specified here will take precedence over those in config/application.rb. # The test environment is used exclusively to run your application's # test suite. You never need to work with it otherwise. Remember that # your test database is "scratch space" for the test suite and is wiped # and recreated between test runs. Don't rely on the data there! config.cache_classes = true # Do not eager load code on boot. This avoids loading your whole application # just for the purpose of running a single test. If you are using a tool that # preloads Rails for running tests, you may have to set it to true. config.eager_load = false # Configure static asset server for tests with Cache-Control for performance. # config.public_file_server.headers = { 'Cache-Control' => 'public, max-age=3600' } config.assets.enabled = true # Show full error reports and disable caching. config.consider_all_requests_local = true config.action_controller.perform_caching = false # Raise exceptions instead of rendering exception templates. config.action_dispatch.show_exceptions = false # Disable request forgery protection in test environment. config.action_controller.allow_forgery_protection = false # Print deprecation notices to the stderr. config.active_support.deprecation = :stderr # Raises error for missing translations # config.action_view.raise_on_missing_translations = true end end
class Question < ActiveRecord::Base has_many :votes, as: :votable belongs_to :user has_many :comments, as: :commentable has_many :answers end change form_for on index page to use short form syntax class Question < ActiveRecord::Base has_many :votes, as: :votable belongs_to :user has_many :comments, as: :commentable has_many :answers # Need to confirm with Alex how the session hash is populated so that I can match user id with the def validate_question_ownership(body) @question.user_id === session[:id] ? true : false end end
module ExtraRoutes def add(map) # omniauth map.match '/auth/:provider/callback', :to => 'sessions#create_omniauth' # sessions map.match '/signin', :to => 'sessions#create' map.match '/signshow', :to => 'sessions#show' map.match '/signout', :to => 'sessions#destroy' end module_function :add end Removed white spaces. module ExtraRoutes def add(map) # omniauth map.match '/auth/:provider/callback', :to => 'sessions#create_omniauth' # sessions map.match '/signin', :to => 'sessions#create' map.match '/signshow', :to => 'sessions#show' map.match '/signout', :to => 'sessions#destroy' end module_function :add end
require 'test_helper' module Tire class SuggestIntegrationTest < Test::Unit::TestCase include Test::Integration context "Suggest" do should "add suggestions field to the results using the term suggester" do # Tire::Configuration.logger STDERR, :level => 'debug' s = Tire.search('articles-test') do suggest :term_suggest1, 'thrree' do term :title end end assert_equal 1, s.results.suggestions.size assert_equal 'three', s.results.suggestions["term_suggest1"].first["options"].first["text"] end end should "add suggestions field to the results using the phrase suggester" do # Tire::Configuration.logger STDERR, :level => 'debug' s = Tire.search('articles-test') do suggest :term_suggest1, 'thrree' do phrase :title end end assert_equal 1, s.results.suggestions.size assert_equal 'three', s.results.suggestions["term_suggest1"].first["options"].first["text"] end end end Suggestions: Working integration test within search require 'test_helper' module Tire class SuggestIntegrationTest < Test::Unit::TestCase include Test::Integration context "Search Suggest" do should "add suggestions field to the results using the term suggester" do # Tire::Configuration.logger STDERR, :level => 'debug' s = Tire.search('articles-test') do suggest :term_suggest1 do text 'thrree' term :title end end assert_equal 1, s.results.suggestions.size assert_equal 'three', s.results.suggestions.texts.first end end should "add suggestions field to the results using the phrase suggester" do # Tire::Configuration.logger STDERR, :level => 'debug' s = Tire.search('articles-test') do suggest :term_suggest1 do text 'thrree' phrase :title end end assert_equal 1, s.results.suggestions.size assert_equal 'three', s.results.suggestions.texts.first end end end
require 'test_helper' require "test_xml/mini_test" require "roxml" require "roar/representer/roxml" require "roar/model/representable" class VariantFunctionalTest < MiniTest::Spec class VariantXmlRepresenter < Roar::Representer::Roxml xml_accessor :size xml_accessor :id xml_accessor :title xml_accessor :price end class Variant def self.model_name "variant" end include Roar::Model accessors :size, :price, :id, :title include Roar::Model::Representable represents "application/xml", :with => VariantXmlRepresenter end class ArticleXmlRepresenter < Roar::Representer::Roxml xml_accessor :id xml_accessor :variant, :as => [Variant] end class Article def self.model_name "article" end include Roar::Model accessors :id, :variant # FIXME: should be variants include Roar::Model::Representable represents "application/xml", :with => ArticleXmlRepresenter end describe "All models in this use-case" do describe "VariantXmlRepresenter" do before do @shirt = Variant.new("size" => "S", "price" => "9.99", "id" => "1", "title" => "China Shirt") end it "be deserializable" do @v = Variant.from("application/xml", "<variant><id>1</id><size>S</size><price>9.99</price><title>China Shirt</title><variant>") assert_model @shirt, @v end it "be serializable" do # assert_xml_match (no ordering) assert_match_xml "<variant><size>S</size><id>1</id><title>China Shirt</title><price>9.99</price><variant>", @shirt.to("application/xml") end end # Article has Variants describe "ArticleXmlRepresenter" do before do @china_s = Variant.new("size" => "S", "price" => "9.99", "id" => "1-s", "title" => "China Shirt/S") @china_m = Variant.new("size" => "M", "price" => "9.99", "id" => "1-m", "title" => "China Shirt/M") @shirt = Article.new("id" => 1, :variant => [@china_s, @china_m]) end it "deserializes" do @a = Article.from("application/xml", "<article> <id>1</id> <variant><size>S</size><id>1-s</id><title>China Shirt</title><price>9.99</price><variant> <variant><size>M</size><id>1-m</id><title>China Shirt</title><price>9.99</price><variant> <article>") puts @a.inspect assert_model @shirt, @a end end end end variants are running! yeah! require 'test_helper' require "test_xml/mini_test" require "roxml" require "roar/representer/roxml" require "roar/model/representable" class VariantFunctionalTest < MiniTest::Spec class VariantXmlRepresenter < Roar::Representer::Roxml xml_accessor :size xml_accessor :id xml_accessor :title xml_accessor :price end class Variant def self.model_name "variant" end include Roar::Model accessors :size, :price, :id, :title include Roar::Model::Representable represents "application/xml", :with => VariantXmlRepresenter end class ArticleXmlRepresenter < Roar::Representer::Roxml xml_accessor :id xml_accessor :variants, :as => [Variant], :tag => :variant end class Article def self.model_name "article" end include Roar::Model accessors :id, :variants include Roar::Model::Representable represents "application/xml", :with => ArticleXmlRepresenter end describe "All models in this use-case" do describe "VariantXmlRepresenter" do before do @shirt = Variant.new("size" => "S", "price" => "9.99", "id" => "1", "title" => "China Shirt") end it "be deserializable" do @v = Variant.from("application/xml", "<variant><id>1</id><size>S</size><price>9.99</price><title>China Shirt</title><variant>") assert_model @shirt, @v end it "be serializable" do # assert_xml_match (no ordering) assert_match_xml "<variant><size>S</size><id>1</id><title>China Shirt</title><price>9.99</price><variant>", @shirt.to("application/xml") end end # Article has Variants describe "ArticleXmlRepresenter" do before do @china_s = Variant.new("size" => "S", "price" => "9.99", "id" => "1-s", "title" => "China Shirt-S") @china_m = Variant.new("size" => "M", "price" => "9.99", "id" => "1-m", "title" => "China Shirt-M") @shirt = Article.new("id" => 1, "variants" => [@china_s, @china_m]) end it "deserializes" do @a = Article.from("application/xml", "<article> <id>1</id> <variant><size>S</size><id>1-s</id><title>China Shirt-S</title><price>9.99</price></variant> <variant><size>M</size><id>1-m</id><title>China Shirt-M</title><price>9.99</price></variant> <article>") puts @a.inspect assert_model @shirt, @a end end end end
Pod::Spec.new do |s| s.name = "SkyFloatingLabelTextField" s.version = "0.0.1" s.summary = "A beautiful, flexible and customizable textfield. Supports a floating label placeholder / title, error state and iconography." s.homepage = "https://github.com/Skyscanner/SkyFloatingLabelTextField" s.license = { :type => "MIT", :file => "LICENSE.md" } s.authors = "Daniel Langh, Gergely Orosz, Raimon Lapuente" s.ios.deployment_target = "8.0" s.source = { :git => "https://github.com/Skyscanner/SkyFloatingLabelTextField.git" } s.source_files = 'SkyFloatingLabelTextField/SkyFloatingLabelTextField/*.swift' end Updating podspec for 0.1.1 Pod::Spec.new do |s| s.name = "SkyFloatingLabelTextField" s.version = "0.1.1" s.summary = "A beautiful, flexible and customizable textfield. Supports a floating label placeholder / title, error state and iconography." s.homepage = "https://github.com/Skyscanner/SkyFloatingLabelTextField" s.license = { :type => "Apache 2.0", :file => "LICENSE.md" } s.authors = "Daniel Langh, Gergely Orosz, Raimon Lapuente" s.ios.deployment_target = "8.0" s.source = { :git => "https://github.com/Skyscanner/SkyFloatingLabelTextField.git", :tag => "v0.1.1" } s.source_files = 'SkyFloatingLabelTextField/SkyFloatingLabelTextField/*.swift' end
# Generated by jeweler # DO NOT EDIT THIS FILE DIRECTLY # Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command # -*- encoding: utf-8 -*- Gem::Specification.new do |s| s.name = %q{enigmamachine} s.version = "0.3.0" s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= s.authors = ["dave"] s.date = %q{2010-07-12} s.default_executable = %q{enigmamachine} s.description = %q{A RESTful video encoder which you can use as either a front-end to ffmpeg or headless on a server.} s.email = %q{dave@caprica} s.executables = ["enigmamachine"] s.extra_rdoc_files = [ "LICENSE", "README.rdoc" ] s.files = [ ".document", ".gitignore", "LICENSE", "README.rdoc", "Rakefile", "VERSION", "bin/enigmamachine", "enigmamachine.gemspec", "lib/enigmamachine.rb", "lib/enigmamachine.sqlite3", "lib/enigmamachine/config.ru", "lib/enigmamachine/encoding_queue.rb", "lib/enigmamachine/models/encoder.rb", "lib/enigmamachine/models/encoding_task.rb", "lib/enigmamachine/models/video.rb", "lib/enigmamachine/public/default.css", "lib/enigmamachine/public/images/Enigma-logo.jpg", "lib/enigmamachine/public/images/bg01.jpg", "lib/enigmamachine/public/images/bg02.jpg", "lib/enigmamachine/public/images/bg03.jpg", "lib/enigmamachine/public/images/bg04.jpg", "lib/enigmamachine/public/images/img02.gif", "lib/enigmamachine/public/images/img03.gif", "lib/enigmamachine/public/images/img04.gif", "lib/enigmamachine/public/images/img05.gif", "lib/enigmamachine/public/images/img06.jpg", "lib/enigmamachine/public/images/spacer.gif", "lib/enigmamachine/views/encoders/edit.erb", "lib/enigmamachine/views/encoders/encoder.erb", "lib/enigmamachine/views/encoders/encoding_task.erb", "lib/enigmamachine/views/encoders/form.erb", "lib/enigmamachine/views/encoders/index.erb", "lib/enigmamachine/views/encoders/new.erb", "lib/enigmamachine/views/encoders/show.erb", "lib/enigmamachine/views/encoding_tasks/edit.erb", "lib/enigmamachine/views/encoding_tasks/form.erb", "lib/enigmamachine/views/encoding_tasks/new.erb", "lib/enigmamachine/views/index.erb", "lib/enigmamachine/views/layout.erb", "lib/enigmamachine/views/videos/form.erb", "lib/enigmamachine/views/videos/index.erb", "lib/enigmamachine/views/videos/new.erb", "lib/enigmamachine/views/videos/video.erb", "lib/ext/array_ext.rb", "lib/ext/partials.rb", "lib/generators/config.yml", "test/helper.rb", "test/support/afile.mpg", "test/support/blueprints.rb", "test/test_encoder.rb", "test/test_encoding_queue.rb", "test/test_enigmamachine.rb", "test/test_video.rb" ] s.homepage = %q{http://github.com/futurechimp/enigmamachine} s.rdoc_options = ["--charset=UTF-8"] s.require_paths = ["lib"] s.rubygems_version = %q{1.3.7} s.summary = %q{A RESTful video encoder.} s.test_files = [ "test/support/blueprints.rb", "test/test_encoding_queue.rb", "test/helper.rb", "test/test_encoder.rb", "test/test_enigmamachine.rb", "test/test_video.rb" ] if s.respond_to? :specification_version then current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION s.specification_version = 3 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_development_dependency(%q<thoughtbot-shoulda>, [">= 0"]) s.add_runtime_dependency(%q<data_mapper>, ["= 1.0.0"]) s.add_runtime_dependency(%q<eventmachine>, ["= 0.12.10"]) s.add_runtime_dependency(%q<dm-sqlite-adapter>, ["= 1.0.0"]) s.add_runtime_dependency(%q<rack-flash>, [">= 0"]) s.add_runtime_dependency(%q<ruby-debug>, [">= 0"]) s.add_runtime_dependency(%q<sinatra>, ["= 1.0.0"]) s.add_runtime_dependency(%q<thin>, [">= 0"]) else s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"]) s.add_dependency(%q<data_mapper>, ["= 1.0.0"]) s.add_dependency(%q<eventmachine>, ["= 0.12.10"]) s.add_dependency(%q<dm-sqlite-adapter>, ["= 1.0.0"]) s.add_dependency(%q<rack-flash>, [">= 0"]) s.add_dependency(%q<ruby-debug>, [">= 0"]) s.add_dependency(%q<sinatra>, ["= 1.0.0"]) s.add_dependency(%q<thin>, [">= 0"]) end else s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"]) s.add_dependency(%q<data_mapper>, ["= 1.0.0"]) s.add_dependency(%q<eventmachine>, ["= 0.12.10"]) s.add_dependency(%q<dm-sqlite-adapter>, ["= 1.0.0"]) s.add_dependency(%q<rack-flash>, [">= 0"]) s.add_dependency(%q<ruby-debug>, [">= 0"]) s.add_dependency(%q<sinatra>, ["= 1.0.0"]) s.add_dependency(%q<thin>, [">= 0"]) end end Regenerated gemspec for version 0.3.1 # Generated by jeweler # DO NOT EDIT THIS FILE DIRECTLY # Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command # -*- encoding: utf-8 -*- Gem::Specification.new do |s| s.name = %q{enigmamachine} s.version = "0.3.1" s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= s.authors = ["dave"] s.date = %q{2010-07-12} s.default_executable = %q{enigmamachine} s.description = %q{A RESTful video encoder which you can use as either a front-end to ffmpeg or headless on a server.} s.email = %q{dave@caprica} s.executables = ["enigmamachine"] s.extra_rdoc_files = [ "LICENSE", "README.rdoc" ] s.files = [ ".document", ".gitignore", "LICENSE", "README.rdoc", "Rakefile", "VERSION", "bin/enigmamachine", "enigmamachine.gemspec", "lib/enigmamachine.rb", "lib/enigmamachine.sqlite3", "lib/enigmamachine/config.ru", "lib/enigmamachine/encoding_queue.rb", "lib/enigmamachine/models/encoder.rb", "lib/enigmamachine/models/encoding_task.rb", "lib/enigmamachine/models/video.rb", "lib/enigmamachine/public/default.css", "lib/enigmamachine/public/images/Enigma-logo.jpg", "lib/enigmamachine/public/images/bg01.jpg", "lib/enigmamachine/public/images/bg02.jpg", "lib/enigmamachine/public/images/bg03.jpg", "lib/enigmamachine/public/images/bg04.jpg", "lib/enigmamachine/public/images/img02.gif", "lib/enigmamachine/public/images/img03.gif", "lib/enigmamachine/public/images/img04.gif", "lib/enigmamachine/public/images/img05.gif", "lib/enigmamachine/public/images/img06.jpg", "lib/enigmamachine/public/images/spacer.gif", "lib/enigmamachine/views/encoders/edit.erb", "lib/enigmamachine/views/encoders/encoder.erb", "lib/enigmamachine/views/encoders/encoding_task.erb", "lib/enigmamachine/views/encoders/form.erb", "lib/enigmamachine/views/encoders/index.erb", "lib/enigmamachine/views/encoders/new.erb", "lib/enigmamachine/views/encoders/show.erb", "lib/enigmamachine/views/encoding_tasks/edit.erb", "lib/enigmamachine/views/encoding_tasks/form.erb", "lib/enigmamachine/views/encoding_tasks/new.erb", "lib/enigmamachine/views/index.erb", "lib/enigmamachine/views/layout.erb", "lib/enigmamachine/views/videos/form.erb", "lib/enigmamachine/views/videos/index.erb", "lib/enigmamachine/views/videos/new.erb", "lib/enigmamachine/views/videos/video.erb", "lib/ext/array_ext.rb", "lib/ext/partials.rb", "lib/generators/config.yml", "test/helper.rb", "test/support/afile.mpg", "test/support/blueprints.rb", "test/test_encoder.rb", "test/test_encoding_queue.rb", "test/test_enigmamachine.rb", "test/test_video.rb" ] s.homepage = %q{http://github.com/futurechimp/enigmamachine} s.rdoc_options = ["--charset=UTF-8"] s.require_paths = ["lib"] s.rubygems_version = %q{1.3.7} s.summary = %q{A RESTful video encoder.} s.test_files = [ "test/support/blueprints.rb", "test/test_encoding_queue.rb", "test/helper.rb", "test/test_encoder.rb", "test/test_enigmamachine.rb", "test/test_video.rb" ] if s.respond_to? :specification_version then current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION s.specification_version = 3 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_development_dependency(%q<thoughtbot-shoulda>, [">= 0"]) s.add_runtime_dependency(%q<data_mapper>, ["= 1.0.0"]) s.add_runtime_dependency(%q<eventmachine>, ["= 0.12.10"]) s.add_runtime_dependency(%q<dm-sqlite-adapter>, ["= 1.0.0"]) s.add_runtime_dependency(%q<rack-flash>, [">= 0"]) s.add_runtime_dependency(%q<ruby-debug>, [">= 0"]) s.add_runtime_dependency(%q<sinatra>, ["= 1.0.0"]) s.add_runtime_dependency(%q<thin>, [">= 0"]) else s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"]) s.add_dependency(%q<data_mapper>, ["= 1.0.0"]) s.add_dependency(%q<eventmachine>, ["= 0.12.10"]) s.add_dependency(%q<dm-sqlite-adapter>, ["= 1.0.0"]) s.add_dependency(%q<rack-flash>, [">= 0"]) s.add_dependency(%q<ruby-debug>, [">= 0"]) s.add_dependency(%q<sinatra>, ["= 1.0.0"]) s.add_dependency(%q<thin>, [">= 0"]) end else s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"]) s.add_dependency(%q<data_mapper>, ["= 1.0.0"]) s.add_dependency(%q<eventmachine>, ["= 0.12.10"]) s.add_dependency(%q<dm-sqlite-adapter>, ["= 1.0.0"]) s.add_dependency(%q<rack-flash>, [">= 0"]) s.add_dependency(%q<ruby-debug>, [">= 0"]) s.add_dependency(%q<sinatra>, ["= 1.0.0"]) s.add_dependency(%q<thin>, [">= 0"]) end end
$:.push File.expand_path("../lib", __FILE__) # Maintain your gem's version: require "enju_question/version" # Describe your gem and declare its dependencies: Gem::Specification.new do |s| s.name = "enju_question" s.version = EnjuQuestion::VERSION s.authors = ["Kosuke Tanabe"] s.email = ["tanabe@mwr.mediacom.keio.ac.jp"] s.homepage = "https://github.com/next-l/enju_question" s.summary = "enju_queestion plugin" s.description = "Question and answer management for Next-L Enju" s.files = Dir["{app,config,db,lib}/**/*"] + ["MIT-LICENSE", "Rakefile", "README.rdoc"] s.test_files = Dir["spec/**/*"] - Dir["spec/dummy/log/*"] - Dir["spec/dummy/solr/{data,pids}/*"] s.add_dependency "rails", "~> 3.2" s.add_dependency "simple_form" s.add_dependency "acts-as-taggable-on", "~> 2.3" s.add_dependency "enju_core", "~> 0.1.1.pre4" s.add_development_dependency "sqlite3" s.add_development_dependency "rspec-rails" s.add_development_dependency "factory_girl_rails" s.add_development_dependency "vcr", "~> 2.4" s.add_development_dependency "enju_biblio", "~> 0.1.0.pre26" s.add_development_dependency "enju_ndl", "~> 0.1.0.pre10" s.add_development_dependency "sunspot_solr", "~> 2.0.0" s.add_development_dependency "mobylette" s.add_development_dependency "fakeweb" end updated gemspec $:.push File.expand_path("../lib", __FILE__) # Maintain your gem's version: require "enju_question/version" # Describe your gem and declare its dependencies: Gem::Specification.new do |s| s.name = "enju_question" s.version = EnjuQuestion::VERSION s.authors = ["Kosuke Tanabe"] s.email = ["tanabe@mwr.mediacom.keio.ac.jp"] s.homepage = "https://github.com/next-l/enju_question" s.summary = "enju_queestion plugin" s.description = "Question and answer management for Next-L Enju" s.files = Dir["{app,config,db,lib}/**/*"] + ["MIT-LICENSE", "Rakefile", "README.rdoc"] s.test_files = Dir["spec/**/*"] - Dir["spec/dummy/log/*"] - Dir["spec/dummy/solr/{data,pids}/*"] s.add_dependency "rails", "~> 3.2" s.add_dependency "simple_form" s.add_dependency "acts-as-taggable-on", "~> 2.3" s.add_dependency "enju_core", "~> 0.1.1.pre4" s.add_development_dependency "sqlite3" s.add_development_dependency "rspec-rails" s.add_development_dependency "factory_girl_rails" s.add_development_dependency "vcr", "~> 2.4" s.add_development_dependency "enju_biblio", "~> 0.1.0.pre28" s.add_development_dependency "enju_ndl", "~> 0.1.0.pre13" s.add_development_dependency "sunspot_solr", "~> 2.0.0" s.add_development_dependency "mobylette" s.add_development_dependency "fakeweb" end
lib = File.expand_path("#{File.dirname(__FILE__)}/../lib") unit_tests = File.expand_path("#{File.dirname(__FILE__)}/../test") $:.unshift(lib) $:.unshift(unit_tests) require 'test/unit' require 'active_resource' require 'active_resource/http_mock' require 'active_resource_response' require "fixtures/country" class ActiveResourceResponseTest < Test::Unit::TestCase def setup @country = {:country => {:id => 1, :name => "Ukraine", :iso=>"UA"}} @user = {:user => {:id => 1, :name => "Nsme", :password=>"password"}} ActiveResource::HttpMock.respond_to do |mock| mock.get "/countries.json", {}, [@country].to_json, 200, {"X-total"=>'1'} mock.get "/countries/1.json", {}, @country.to_json, 200, {"X-total"=>'1', 'Set-Cookie'=>['path=/; expires=Tue, 20-Jan-2015 15:03:14 GMT, foo=bar, bar=foo']} mock.get "/countries/1/count.json", {}, {:count => 1155}.to_json, 200, {"X-total"=>'1'} mock.post "/countries.json" , {}, @country.to_json, 422, {"X-total"=>'1'} end end def test_methods_appeared countries = Country.all assert countries.respond_to?(:http) assert countries.http.respond_to?(:cookies) assert countries.http.respond_to?(:headers) assert Country.respond_to?(:http_response) end def test_get_headers_from_all countries = Country.all assert_kind_of Country, countries.first assert_equal "UA", countries.first.iso assert_equal countries.http['X-total'].to_i, 1 assert_equal countries.http.headers[:x_total].to_i, 1 end def test_get_headers_from_custom_methods count = Country.find(1).get("count") assert_equal count.to_i, 1155 assert_equal Country.connection.http_response['X-total'].to_i, 1 assert_equal Country.connection.http_response.headers[:x_total].to_i, 1 assert_equal Country.http_response['X-total'].to_i ,1 end def test_get_headers_from_find country = Country.find(1) assert_equal country.http['X-total'].to_i, 1 assert_equal country.http.headers[:x_total].to_i, 1 end def test_get_cookies country = Country.find(1) assert_equal country.http.cookies['foo'] , 'bar' assert_equal country.http.cookies['bar'] , 'foo' #from class assert_equal Country.http_response.cookies['foo'] , 'bar' assert_equal Country.http_response.cookies['bar'] , 'foo' end def test_get_headers_after_exception begin country = Country.create(@country[:country]) rescue ActiveResource::ConnectionError => e response = e.response assert_equal response.headers[:x_total].to_i, 1 end assert_equal Country.http_response['X-total'].to_i, 1 end end test fixes lib = File.expand_path("#{File.dirname(__FILE__)}/../lib") unit_tests = File.expand_path("#{File.dirname(__FILE__)}/../test") $:.unshift(lib) $:.unshift(unit_tests) require 'test/unit' require 'active_resource' require 'active_resource/http_mock' require 'active_resource_response' require "fixtures/country" class ActiveResourceResponseTest < Test::Unit::TestCase def setup @country = {:country => {:id => 1, :name => "Ukraine", :iso=>"UA"}} ActiveResource::HttpMock.respond_to do |mock| mock.get "/countries.json", {}, [@country].to_json, 200, {"X-total"=>'1'} mock.get "/countries/1.json", {}, @country.to_json, 200, {"X-total"=>'1', 'Set-Cookie'=>['path=/; expires=Tue, 20-Jan-2015 15:03:14 GMT, foo=bar, bar=foo']} mock.get "/countries/1/count.json", {}, {:count => 1155}.to_json, 200, {"X-total"=>'1'} mock.post "/countries.json" , {}, @country.to_json, 422, {"X-total"=>'1'} end end def test_methods_appeared countries = Country.all assert countries.respond_to?(:http) assert countries.http.respond_to?(:cookies) assert countries.http.respond_to?(:headers) assert Country.respond_to?(:http_response) end def test_get_headers_from_all countries = Country.all assert_kind_of Country, countries.first assert_equal "UA", countries.first.iso assert_equal countries.http['X-total'].to_i, 1 assert_equal countries.http.headers[:x_total].to_i, 1 end def test_get_headers_from_custom_methods count = Country.find(1).get("count") assert_equal count.to_i, 1155 assert_equal Country.connection.http_response['X-total'].to_i, 1 assert_equal Country.connection.http_response.headers[:x_total].to_i, 1 assert_equal Country.http_response['X-total'].to_i ,1 end def test_get_headers_from_find country = Country.find(1) assert_equal country.http['X-total'].to_i, 1 assert_equal country.http.headers[:x_total].to_i, 1 end def test_get_cookies country = Country.find(1) assert_equal country.http.cookies['foo'] , 'bar' assert_equal country.http.cookies['bar'] , 'foo' #from class assert_equal Country.http_response.cookies['foo'] , 'bar' assert_equal Country.http_response.cookies['bar'] , 'foo' end def test_get_headers_after_exception exception = nil begin country = Country.create(@country[:country]) rescue ActiveResource::ConnectionError => e exception = e response = e.response assert_equal response.headers[:x_total].to_i, 1 end assert_equal Country.http_response['X-total'].to_i, 1 assert_equal country.http['X-total'].to_i, 1 assert_equal exception.class, ActiveResource::ResourceInvalid end end
# # Be sure to run `pod spec lint SPUncaughtExceptionHandler.podspec' to ensure this is a # valid spec and to remove all comments including this before submitting the spec. # # To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html # To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/ # Pod::Spec.new do |s| # ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # These will help people to find your library, and whilst it # can feel like a chore to fill in it's definitely to your advantage. The # summary should be tweet-length, and the description more in depth. # s.name = "SPUncaughtExceptionHandler" s.version = "0.1.0" s.summary = "APP闪退时,由用户决定是否继续.宝宝再也不用担心APP闪退了." # This description is used to generate tags and improve search results. # * Think: What does it do? Why did you write it? What is the focus? # * Try to keep it short, snappy and to the point. # * Write the description between the DESC delimiters below. # * Finally, don't worry about the indent, CocoaPods strips it! s.description = <<-DESC 应用在iOS上的异常捕获 用Objective-C编写 APP闪退时,由用户决定是否继续.宝宝再也不用担心APP闪退了. DESC s.homepage = "https://github.com/kshipeng/SPUncaughtExceptionHandler" # s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif" # ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Licensing your code is important. See http://choosealicense.com for more info. # CocoaPods will detect a license file if there is a named LICENSE* # Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'. # #s.license = "MIT" s.license = { :type => "MIT", :file => "LICENSE" } # ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Specify the authors of the library, with email addresses. Email addresses # of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also # accepts just a name if you'd rather not provide an email address. # # Specify a social_media_url where others can refer to, for example a twitter # profile URL. # s.author = { "kshipeng" => "1083734730@qq.com" } # Or just: s.author = "kshipeng" # s.authors = { "kshipeng" => "kang_shipeng@126.com" } # s.social_media_url = "http://twitter.com/kshipeng" # ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # If this Pod runs only on iOS or OS X, then specify the platform and # the deployment target. You can optionally include the target after the platform. # # s.platform = :ios s.platform = :ios, "5.0" # When using multiple platforms # s.ios.deployment_target = "5.0" # s.osx.deployment_target = "10.7" # s.watchos.deployment_target = "2.0" # s.tvos.deployment_target = "9.0" # ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Specify the location from where the source should be retrieved. # Supports git, hg, bzr, svn and HTTP. # s.source = { :git => "https://github.com/kshipeng/SPUncaughtExceptionHandler.git", :tag => "0.0.1" } # ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # CocoaPods is smart about how it includes source code. For source files # giving a folder will include any swift, h, m, mm, c & cpp files. # For header files it will include any header in the folder. # Not including the public_header_files will make all headers public. # s.source_files = "SPUncaughtExceptionHandler", "SPUncaughtExceptionHandler/SPUncaughtExceptionHandler/*.{h,m}" # s.public_header_files = "Classes/**/*.h" # ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # A list of resources included with the Pod. These are copied into the # target bundle with a build phase script. Anything else will be cleaned. # You can preserve files from being cleaned, please don't preserve # non-essential files like tests, examples and documentation. # # s.resource = "icon.png" # s.resources = "Resources/*.png" # s.preserve_paths = "FilesToSave", "MoreFilesToSave" # ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Link your library with frameworks, or libraries. Libraries do not include # the lib prefix of their name. # # s.framework = "SomeFramework" # s.frameworks = "SomeFramework", "AnotherFramework" # s.library = "iconv" # s.libraries = "iconv", "xml2" # ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # If your library depends on compiler flags you can set them in the xcconfig hash # where they will only apply to your library. If you depend on other Podspecs # you can include multiple dependencies to ensure it works. s.requires_arc = true # s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" } # s.dependency "JSONKit", "~> 1.4" end 0.1.1 # # Be sure to run `pod spec lint SPUncaughtExceptionHandler.podspec' to ensure this is a # valid spec and to remove all comments including this before submitting the spec. # # To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html # To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/ # Pod::Spec.new do |s| # ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # These will help people to find your library, and whilst it # can feel like a chore to fill in it's definitely to your advantage. The # summary should be tweet-length, and the description more in depth. # s.name = "SPUncaughtExceptionHandler" s.version = "0.1.1" s.summary = "APP闪退时,由用户决定是否继续.宝宝再也不用担心APP闪退了." # This description is used to generate tags and improve search results. # * Think: What does it do? Why did you write it? What is the focus? # * Try to keep it short, snappy and to the point. # * Write the description between the DESC delimiters below. # * Finally, don't worry about the indent, CocoaPods strips it! s.description = <<-DESC 应用在iOS上的异常捕获 用Objective-C编写 APP闪退时,由用户决定是否继续.宝宝再也不用担心APP闪退了. DESC s.homepage = "https://github.com/kshipeng/SPUncaughtExceptionHandler" # s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif" # ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Licensing your code is important. See http://choosealicense.com for more info. # CocoaPods will detect a license file if there is a named LICENSE* # Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'. # #s.license = "MIT" s.license = { :type => "MIT", :file => "LICENSE" } # ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Specify the authors of the library, with email addresses. Email addresses # of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also # accepts just a name if you'd rather not provide an email address. # # Specify a social_media_url where others can refer to, for example a twitter # profile URL. # s.author = { "kshipeng" => "1083734730@qq.com" } # Or just: s.author = "kshipeng" # s.authors = { "kshipeng" => "kang_shipeng@126.com" } # s.social_media_url = "http://twitter.com/kshipeng" # ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # If this Pod runs only on iOS or OS X, then specify the platform and # the deployment target. You can optionally include the target after the platform. # # s.platform = :ios s.platform = :ios, "5.0" # When using multiple platforms # s.ios.deployment_target = "5.0" # s.osx.deployment_target = "10.7" # s.watchos.deployment_target = "2.0" # s.tvos.deployment_target = "9.0" # ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Specify the location from where the source should be retrieved. # Supports git, hg, bzr, svn and HTTP. # s.source = { :git => "https://github.com/kshipeng/SPUncaughtExceptionHandler.git", :tag => "0.0.1" } # ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # CocoaPods is smart about how it includes source code. For source files # giving a folder will include any swift, h, m, mm, c & cpp files. # For header files it will include any header in the folder. # Not including the public_header_files will make all headers public. # s.source_files = "SPUncaughtExceptionHandler", "SPUncaughtExceptionHandler/SPUncaughtExceptionHandler/*.{h,m}" # s.public_header_files = "Classes/**/*.h" # ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # A list of resources included with the Pod. These are copied into the # target bundle with a build phase script. Anything else will be cleaned. # You can preserve files from being cleaned, please don't preserve # non-essential files like tests, examples and documentation. # # s.resource = "icon.png" # s.resources = "Resources/*.png" # s.preserve_paths = "FilesToSave", "MoreFilesToSave" # ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Link your library with frameworks, or libraries. Libraries do not include # the lib prefix of their name. # # s.framework = "SomeFramework" # s.frameworks = "SomeFramework", "AnotherFramework" # s.library = "iconv" # s.libraries = "iconv", "xml2" # ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # If your library depends on compiler flags you can set them in the xcconfig hash # where they will only apply to your library. If you depend on other Podspecs # you can include multiple dependencies to ensure it works. s.requires_arc = true # s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" } # s.dependency "JSONKit", "~> 1.4" end
Add basic implementations of core classes as_json require 'json' class Object def as_json nil end end class Boolean def as_json self end end class NilClass def as_json self end end class Numeric def as_json self end end class String def as_json self end end
require_relative 'minitest_helper' require 'aptly_cli' class TestAptlyCli < Minitest::Unit::TestCase attr_reader :test_aptly_load def setup @test_aptly_load = ::AptlyLoad.new end def test_that_it_has_a_version_number refute_nil ::AptlyCli::VERSION end def test_that_config_loads assert_equal ({:server => "127.0.0.2", :port => 8083}), @test_aptly_load.configure(opts = {:server => "127.0.0.2", :port => 8083 }) end def test_that_config_loads_from_yaml assert_equal ({:server => "127.0.0.1", :port => 8082}), @test_aptly_load.configure_with('test/fixtures/aptly-cli.yaml') end def test_that_config_loads_defaults_if_bad_yaml assert_equal ({:server => "127.0.0.1", :port => 8081}), @test_aptly_load.configure_with('test/fixtures/aptly-cli_invalid.yaml') end def test_that_config_loads_defaults_if_no_yaml assert_equal ({:server => "127.0.0.1", :port => 8081}), @test_aptly_load.configure_with('test/fixtures/aptly-cli_no_yaml.yaml') end end apply class under module require_relative 'minitest_helper' require 'aptly_cli' class TestAptlyCli < Minitest::Unit::TestCase attr_reader :test_aptly_load def setup @test_aptly_load = AptlyCli::AptlyLoad.new end def test_that_it_has_a_version_number refute_nil ::AptlyCli::VERSION end def test_that_config_loads assert_equal ({:server => "127.0.0.2", :port => 8083}), @test_aptly_load.configure(opts = {:server => "127.0.0.2", :port => 8083 }) end def test_that_config_loads_from_yaml assert_equal ({:server => "127.0.0.1", :port => 8082}), @test_aptly_load.configure_with('test/fixtures/aptly-cli.yaml') end def test_that_config_loads_defaults_if_bad_yaml assert_equal ({:server => "127.0.0.1", :port => 8081}), @test_aptly_load.configure_with('test/fixtures/aptly-cli_invalid.yaml') end def test_that_config_loads_defaults_if_no_yaml assert_equal ({:server => "127.0.0.1", :port => 8081}), @test_aptly_load.configure_with('test/fixtures/aptly-cli_no_yaml.yaml') end end
added stub questionnaire for easier testing survey 'GB', :full_title => 'United Kingdom', :default_mandatory => 'false', :dataset_title => 'dataTitle', :status => 'beta', :description => '<p>This self-assessment questionnaire generates an open data certificate and badge you can publish to tell people all about this open data. We also use your answers to learn how organisations publish open data.</p><p>When you answer these questions it demonstrates your efforts to comply with relevant UK legislation. You should also check which other laws and policies apply to your sector, especially if you’re outside the UK (which these questions don’t cover).</p><p><strong>You do not need to answer all the questions to get a certificate.</strong> Just answer those you can.</p>' do translations :en => :default section_general 'General Information', :description => '', :display_header => false do q_dataTitle 'What\'s this data called?', :help_text => 'People see the name of your open data in a list of similar ones so make this as unambiguous and descriptive as you can in this tiny box so they quickly identify what\'s unique about it.', :required => :required a_1 'Data Title', :string, :placeholder => 'Data Title', :required => :required q_documentationUrl 'Where is it described?', :display_on_certificate => true, :text_as_statement => 'This data is described at', :help_text => 'Give a URL for people to read about the contents of your open data and find more detail. It can be a page within a bigger catalog like data.gov.uk.' a_1 'Documentation URL', :string, :input_type => :url, :placeholder => 'Documentation URL', :requirement => ['pilot_1', 'basic_1'] label_pilot_1 'You should have a <strong>web page that offers documentation</strong> about the open data you publish so that people can understand its context, content and utility.', :custom_renderer => '/partials/requirement_pilot', :requirement => 'pilot_1' dependency :rule => 'A and B' condition_A :q_releaseType, '!=', :a_collection condition_B :q_documentationUrl, '==', {:string_value => '', :answer_reference => '1'} label_basic_1 'You must have a <strong>web page that gives documentation</strong> and access to the open data you publish so that people can use it.', :custom_renderer => '/partials/requirement_basic', :requirement => 'basic_1' dependency :rule => 'A and B' condition_A :q_releaseType, '==', :a_collection condition_B :q_documentationUrl, '==', {:string_value => '', :answer_reference => '1'} q_publisher 'Who publishes this data?', :display_on_certificate => true, :text_as_statement => 'This data is published by', :help_text => 'Give the name of the organisation who publishes this data. It’s probably who you work for unless you’re doing this on behalf of someone else.', :required => :required a_1 'Data Publisher', :string, :placeholder => 'Data Publisher', :required => :required q_publisherUrl 'What website is the data published on?', :display_on_certificate => true, :text_as_statement => 'The data is published on', :help_text => 'Give a URL to a website, this helps us to group data from the same organisation even if people give different names.' a_1 'Publisher URL', :string, :input_type => :url, :placeholder => 'Publisher URL' q_releaseType 'What kind of release is this?', :pick => :one, :required => :required a_oneoff 'a one-off release of a single dataset', :help_text => 'This is a single file and you don’t currently plan to publish similar files in the future.' a_collection 'a one-off release of a set of related datasets', :help_text => 'This is a collection of related files about the same data and you don’t currently plan to publish similar collections in the future.' a_series 'ongoing release of a series of related datasets', :help_text => 'This is a sequence of datasets with planned periodic updates in the future.' a_service 'a service or API for accessing open data', :help_text => 'This is a live web service that exposes your data to programmers through an interface they can query.' end end
Pod::Spec.new do |s| s.name = "Detail" s.version = "0.1.0" s.summary = "A short description of Detail." s.homepage = "http://EXAMPLE/Detail" s.license = "MIT" s.author = { "Eli Tsai" => "baicai@in66.com" } s.ios.deployment_target = "6.0" s.source = { :git => "https://github.com/EddieTsai/Detail.git", :tag => "0.1.0" } spec.vendored_frameworks = 'DetailFramework.framework' #spec.resource = "Resources/HockeySDK.bundle" spec.frameworks = 'UIKit' end update Pod::Spec.new do |s| s.name = "Detail" s.version = "0.1.0" s.summary = "A short description of Detail." s.homepage = "http://EXAMPLE/Detail" s.license = "MIT" s.author = { "Eli Tsai" => "baicai@in66.com" } s.ios.deployment_target = "6.0" s.source = { :git => "https://github.com/EddieTsai/Detail.git", :tag => "0.1.0" } s.vendored_frameworks = 'DetailFramework.framework' #s.resource = "Resources/HockeySDK.bundle" s.frameworks = 'UIKit' end
# encoding: utf-8 require 'test_helper' class PeliasTest < GeocoderTestCase def setup Geocoder.configure(lookup: :pelias, api_key: 'abc123', pelias: {}) # Empty pelias hash only for test (pollution control) end def test_configure_default_endpoint query = Geocoder::Query.new('Madison Square Garden, New York, NY') assert_true query.url.start_with?('http://localhost/v1/search'), query.url end def test_configure_custom_endpoint Geocoder.configure(lookup: :pelias, api_key: 'abc123', pelias: {endpoint: 'self.hosted.pelias/proxy'}) query = Geocoder::Query.new('Madison Square Garden, New York, NY') assert_true query.url.start_with?('http://self.hosted.pelias/proxy/v1/search'), query.url end def test_query_url_defaults_to_one query = Geocoder::Query.new('Madison Square Garden, New York, NY') assert_match 'size=1', query.url end def test_query_for_reverse_geocode lookup = Geocoder::Lookup::Mapzen.new url = lookup.query_url(Geocoder::Query.new([45.423733, -75.676333])) assert_match(/point.lat=45.423733&point.lon=-75.676333&size=1/, url) end end Use lookup indicated by test file. # encoding: utf-8 require 'test_helper' class PeliasTest < GeocoderTestCase def setup Geocoder.configure(lookup: :pelias, api_key: 'abc123', pelias: {}) # Empty pelias hash only for test (pollution control) end def test_configure_default_endpoint query = Geocoder::Query.new('Madison Square Garden, New York, NY') assert_true query.url.start_with?('http://localhost/v1/search'), query.url end def test_configure_custom_endpoint Geocoder.configure(lookup: :pelias, api_key: 'abc123', pelias: {endpoint: 'self.hosted.pelias/proxy'}) query = Geocoder::Query.new('Madison Square Garden, New York, NY') assert_true query.url.start_with?('http://self.hosted.pelias/proxy/v1/search'), query.url end def test_query_url_defaults_to_one query = Geocoder::Query.new('Madison Square Garden, New York, NY') assert_match 'size=1', query.url end def test_query_for_reverse_geocode lookup = Geocoder::Lookup::Pelias.new url = lookup.query_url(Geocoder::Query.new([45.423733, -75.676333])) assert_match(/point.lat=45.423733&point.lon=-75.676333&size=1/, url) end end
describe 'sys::autofs' do cached(:chef_run) { ChefSpec::SoloRunner.new.converge(described_recipe) } context 'node.sys.autofs.maps and node.sys.autofs.ldap is empty' do it 'does nothing' do expect(chef_run.run_context.resource_collection).to be_empty end end context 'with basic attributes' do cached(:chef_run) do ChefSpec::SoloRunner.new do |node| node.default['sys']['autofs']['maps'] = { "/mount/point" => { "path" => "config"} } end.converge(described_recipe) end before do allow(File).to receive(:exists?).and_call_original allow(File).to receive(:exists?).with('/mount/point').and_return(false) end it 'installs autofs' do expect(chef_run).to install_package('autofs') end it 'manages /etc/auto.master' do expect(chef_run).to create_template('/etc/auto.master').with_mode("0644").with( :variables => { :maps => { "/mount/point" => { "path" => "config" }} } ) end it 'creates necessary mount-points' do expect(chef_run).to create_directory('/mount/point') end it 'starts the autofs service' do expect(chef_run).to start_service('autofs') end end context 'with ldap attributes' do before do allow(File).to receive(:exist?).and_call_original allow(File).to receive(:exist?).with('/usr/bin/kinit').and_return(true) end cached(:chef_run) do ChefSpec::SoloRunner.new do |node| node.automatic['fqdn'] = 'node.example.com' node.automatic['sys']['autofs']['ldap']['servers'] = [ 'ldap01.example.com', 'ldap02.example.com' ] node.default['sys']['autofs']['maps'] = { "/mount/point" => { "map" => "ldap:ou=autofs.mount,dc=example,dc=com"} } node.default['sys']['autofs']['ldap'] = {:omg => :lol} node.default['sys']['krb5']['realm'] = 'EXAMPLE.COM' node.default['sys']['autofs']['ldap']['searchbase'] = 'dc=example,dc=com' node.automatic['fqdn'] = 'node.example.com' end.converge(described_recipe) end it 'installs autofs-ldap' do expect(chef_run).to install_package('autofs-ldap') end it 'manages /etc/auto.master' do expect(chef_run).to create_template('/etc/auto.master').with_mode("0644") expect(chef_run).to render_file('/etc/auto.master') .with_content('') end it 'manages /etc/auto.master.d' do expect(chef_run).to create_directory('/etc/auto.master.d') expect(chef_run).to create_template('/etc/auto.master.d/mount_point.autofs').with_mode("0644").with( :variables => { :path => "/mount/point", :map => { 'map' => "ldap:ou=autofs.mount,dc=example,dc=com" } }) expect(chef_run).to render_file('/etc/auto.master.d/mount_point.autofs').with_content( "/mount/point ldap:ou=autofs.mount,dc=example,dc=com" ) end it 'manages /etc/autofs_ldap_auth.conf' do # actually this template is rather static and should be a cookbook_file expect(chef_run).to create_template('/etc/autofs_ldap_auth.conf') .with_mode("0600") expect(chef_run).to render_file('/etc/autofs_ldap_auth.conf') .with_content('credentialcache="/tmp/krb5cc_autofs"') end it 'manages /etc/default/autofs' do expect(chef_run).to create_template('/etc/default/autofs').with_mode("0644").with( :variables => { :uris => [ 'ldap01.example.com', 'ldap02.example.com' ], :searchbase => 'dc=example,dc=com', :browsemode => 'no', :logging => nil } ) expect(chef_run).to render_file('/etc/default/autofs').with_content( "MASTER_MAP_NAME=/etc/auto.master" ) expect(chef_run).to render_file('/etc/default/autofs').with_content( 'LDAP_URI="ldap://ldap01.example.com/ ldap://ldap02.example.com/' ) end it 'manages /etc/init.d/autofs' do expect(chef_run).to create_cookbook_file('/etc/init.d/autofs').with_mode("0755") end it 'starts the autofs-service' do expect(chef_run).to start_service('autofs') end it 'does reload autofs-service on config-change' do resource = chef_run.template('/etc/auto.master') expect(resource).to notify('service[autofs]').to(:reload).delayed end it 'does restart autofs-service on config-change' do a = chef_run.template('/etc/autofs_ldap_auth.conf') expect(a).to notify('service[autofs]').to(:restart).delayed b = chef_run.template('/etc/default/autofs') expect(b).to notify('service[autofs]').to(:restart).delayed c = chef_run.cookbook_file('/etc/init.d/autofs') expect(c).to notify('service[autofs]').to(:restart).delayed end end end Init script deployment only before Stretch describe 'sys::autofs' do cached(:chef_run) { ChefSpec::SoloRunner.new.converge(described_recipe) } context 'node.sys.autofs.maps and node.sys.autofs.ldap is empty' do it 'does nothing' do expect(chef_run.run_context.resource_collection).to be_empty end end context 'with basic attributes' do cached(:chef_run) do ChefSpec::SoloRunner.new do |node| node.default['sys']['autofs']['maps'] = { "/mount/point" => { "path" => "config"} } end.converge(described_recipe) end before do allow(File).to receive(:exists?).and_call_original allow(File).to receive(:exists?).with('/mount/point').and_return(false) end it 'installs autofs' do expect(chef_run).to install_package('autofs') end it 'manages /etc/auto.master' do expect(chef_run).to create_template('/etc/auto.master').with_mode("0644").with( :variables => { :maps => { "/mount/point" => { "path" => "config" }} } ) end it 'creates necessary mount-points' do expect(chef_run).to create_directory('/mount/point') end it 'starts the autofs service' do expect(chef_run).to start_service('autofs') end end context 'with ldap attributes' do before do allow(File).to receive(:exist?).and_call_original allow(File).to receive(:exist?).with('/usr/bin/kinit').and_return(true) end cached(:chef_run) do ChefSpec::SoloRunner.new do |node| node.automatic['fqdn'] = 'node.example.com' node.automatic['sys']['autofs']['ldap']['servers'] = [ 'ldap01.example.com', 'ldap02.example.com' ] node.default['sys']['autofs']['maps'] = { "/mount/point" => { "map" => "ldap:ou=autofs.mount,dc=example,dc=com"} } node.default['sys']['autofs']['ldap'] = {:omg => :lol} node.default['sys']['krb5']['realm'] = 'EXAMPLE.COM' node.default['sys']['autofs']['ldap']['searchbase'] = 'dc=example,dc=com' node.automatic['fqdn'] = 'node.example.com' end.converge(described_recipe) end it 'installs autofs-ldap' do expect(chef_run).to install_package('autofs-ldap') end it 'manages /etc/auto.master' do expect(chef_run).to create_template('/etc/auto.master').with_mode("0644") expect(chef_run).to render_file('/etc/auto.master') .with_content('') end it 'manages /etc/auto.master.d' do expect(chef_run).to create_directory('/etc/auto.master.d') expect(chef_run).to create_template('/etc/auto.master.d/mount_point.autofs').with_mode("0644").with( :variables => { :path => "/mount/point", :map => { 'map' => "ldap:ou=autofs.mount,dc=example,dc=com" } }) expect(chef_run).to render_file('/etc/auto.master.d/mount_point.autofs').with_content( "/mount/point ldap:ou=autofs.mount,dc=example,dc=com" ) end it 'manages /etc/autofs_ldap_auth.conf' do # actually this template is rather static and should be a cookbook_file expect(chef_run).to create_template('/etc/autofs_ldap_auth.conf') .with_mode("0600") expect(chef_run).to render_file('/etc/autofs_ldap_auth.conf') .with_content('credentialcache="/tmp/krb5cc_autofs"') end it 'manages /etc/default/autofs' do expect(chef_run).to create_template('/etc/default/autofs').with_mode("0644").with( :variables => { :uris => [ 'ldap01.example.com', 'ldap02.example.com' ], :searchbase => 'dc=example,dc=com', :browsemode => 'no', :logging => nil } ) expect(chef_run).to render_file('/etc/default/autofs').with_content( "MASTER_MAP_NAME=/etc/auto.master" ) expect(chef_run).to render_file('/etc/default/autofs').with_content( 'LDAP_URI="ldap://ldap01.example.com/ ldap://ldap02.example.com/' ) end it 'starts the autofs-service' do expect(chef_run).to start_service('autofs') end it 'does reload autofs-service on config-change' do resource = chef_run.template('/etc/auto.master') expect(resource).to notify('service[autofs]').to(:reload).delayed end it 'does restart autofs-service on config-change' do a = chef_run.template('/etc/autofs_ldap_auth.conf') expect(a).to notify('service[autofs]').to(:restart).delayed b = chef_run.template('/etc/default/autofs') expect(b).to notify('service[autofs]').to(:restart).delayed end # Work in progress context "on Jessie" do xit 'manages /etc/init.d/autofs' do # only valid for Jessie, systemd utilized on Stretch and beyond expect(chef_run).to create_cookbook_file('/etc/init.d/autofs') .with_mode("0755") end xit 'does restart autofs-service on config-change' do c = chef_run.cookbook_file('/etc/init.d/autofs') expect(c).to notify('service[autofs]').to(:restart).delayed end end end end
Add unit test for LitleXmlMapper request. =begin Copyright (c) 2012 Litle & Co. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. =end require File.expand_path("../../../lib/LitleOnline",__FILE__) require 'test/unit' require 'mocha/setup' module LitleOnline class TestLitleXmlMapper < Test::Unit::TestCase def test_LitleXmlMapper_request_xml_response_0 hash = { 'merchantId' => '101', 'id' => 'test', 'version'=>'8.8', 'reportGroup'=>'Planets', 'litleTxnId'=>'123456', } Communications.expects(:http_post).returns("<litleOnlineResponse version=\"1.0\" xmlns=\"http://www.litle.com/schema/online\" response=\"0\" message=\"Invalid credentials. Contact support@litle.com.\"></litleOnlineResponse>") response = LitleXmlMapper.request("","") assert_equal('0',response.response) end def test_LitleXmlMapper_request_xml_response_1 hash = { 'merchantId' => '101', 'id' => 'test', 'version'=>'8.8', 'reportGroup'=>'Planets', 'litleTxnId'=>'123456', } Communications.expects(:http_post).returns("<litleOnlineResponse version=\"1.0\" xmlns=\"http://www.litle.com/schema/online\" response=\"1\" message=\"Invalid credentials. Contact support@litle.com.\"></litleOnlineResponse>") #Get exceptions exception = assert_raise(RuntimeError){ LitleXmlMapper.request("","") } #Test assert(exception.message =~ /Error with http response, code: 1/) end def test_LitleXmlMapper_request_xml_response_2 hash = { 'merchantId' => '101', 'id' => 'test', 'version'=>'8.8', 'reportGroup'=>'Planets', 'litleTxnId'=>'123456', } Communications.expects(:http_post).returns("<litleOnlineResponse version=\"1.0\" xmlns=\"http://www.litle.com/schema/online\" response=\"2\" message=\"Invalid credentials. Contact support@litle.com.\"></litleOnlineResponse>") #Get exceptions exception = assert_raise(RuntimeError){ LitleXmlMapper.request("","") } #Test assert(exception.message =~ /Error with http response, code: 2/) end def test_LitleXmlMapper_request_xml_response_3 hash = { 'merchantId' => '101', 'id' => 'test', 'version'=>'8.8', 'reportGroup'=>'Planets', 'litleTxnId'=>'123456', } Communications.expects(:http_post).returns("<litleOnlineResponse version=\"1.0\" xmlns=\"http://www.litle.com/schema/online\" response=\"3\" message=\"Invalid credentials. Contact support@litle.com.\"></litleOnlineResponse>") #Get exceptions exception = assert_raise(RuntimeError){ LitleXmlMapper.request("","") } #Test assert(exception.message =~ /Error with http response, code: 3/) end def test_LitleXmlMapper_request_xml_response_4 hash = { 'merchantId' => '101', 'id' => 'test', 'version'=>'8.8', 'reportGroup'=>'Planets', 'litleTxnId'=>'123456', } Communications.expects(:http_post).returns("<litleOnlineResponse version=\"1.0\" xmlns=\"http://www.litle.com/schema/online\" response=\"4\" message=\"Invalid credentials. Contact support@litle.com.\"></litleOnlineResponse>") #Get exceptions exception = assert_raise(RuntimeError){ LitleXmlMapper.request("","") } #Test assert(exception.message =~ /Error with http response, code: 4/) end def test_LitleXmlMapper_request_xml_response_5 hash = { 'merchantId' => '101', 'id' => 'test', 'version'=>'8.8', 'reportGroup'=>'Planets', 'litleTxnId'=>'123456', } Communications.expects(:http_post).returns("<litleOnlineResponse version=\"1.0\" xmlns=\"http://www.litle.com/schema/online\" response=\"5\" message=\"Invalid credentials. Contact support@litle.com.\"></litleOnlineResponse>") #Get exceptions exception = assert_raise(RuntimeError){ LitleXmlMapper.request("","") } #Test assert(exception.message =~ /Error with http response, code: 5/) end end end
require 'test_helper' require 'i18n_yaml_editor/transformation' class TestTransformation < Minitest::Test I18N_HASH = { 'da.session.login' => 'Log ind', 'da.session.logout' => 'Log ud', 'en.session.login' => 'Log in', 'en.session.logout' => 'Log out' }.freeze def test_flatten_hash input = { da: { session: { login: 'Log ind', logout: 'Log ud' } }, en: { session: { login: 'Log in', logout: 'Log out' } } } assert_equal I18N_HASH, Transformation.flatten_hash(input) end def test_nest_hash expected = { da: { session: { login: 'Log ind', logout: 'Log ud' } }, en: { session: { login: 'Log in', logout: 'Log out' } } }.with_indifferent_access assert_equal expected, Transformation.nest_hash(I18N_HASH) end end Add sad path test I18nYamlEditor::Transformation#nest_hash require 'test_helper' require 'i18n_yaml_editor/transformation' class TestTransformation < Minitest::Test I18N_HASH = { 'da.session.login' => 'Log ind', 'da.session.logout' => 'Log ud', 'en.session.login' => 'Log in', 'en.session.logout' => 'Log out' }.freeze def test_flatten_hash input = { da: { session: { login: 'Log ind', logout: 'Log ud' } }, en: { session: { login: 'Log in', logout: 'Log out' } } } assert_equal I18N_HASH, Transformation.flatten_hash(input) end def test_nest_hash expected = { da: { session: { login: 'Log ind', logout: 'Log ud' } }, en: { session: { login: 'Log in', logout: 'Log out' } } }.with_indifferent_access assert_equal expected, Transformation.nest_hash(I18N_HASH) end def test_nest_hash_transformation_error assert_raises (I18nYamlEditor::TransformationError) do I18nYamlEditor::Transformation.nest_hash({:error => 'value'}) end end end
Pod::Spec.new do |s| s.platform = :ios s.ios.deployment_target = '8.0' s.name = "SwipeNavigationController" s.summary = "Snapchat like 4-way swipe navigation in Swift for iOS." s.requires_arc = true s.version = "1.0" s.license = { :type => "MIT", :file => "LICENSE" } s.author = { "Casey Law" => "casey@lomotif.com" } s.homepage = "http://www.lomotif.com" s.source = { :git => "https://github.com/Lomotif/swipe-navigation.git", :tag => "#{s.version}"} s.framework = 'UIKit' s.source_files = "SwipeNavigationController/SwipeNavigationController/*.{h,swift}" s.module_name = 'SwipeNavigationController' end updated pod spec version Pod::Spec.new do |s| s.platform = :ios s.ios.deployment_target = '8.0' s.name = "SwipeNavigationController" s.summary = "Snapchat like 4-way swipe navigation in Swift for iOS." s.requires_arc = true s.version = "1.0.1" s.license = { :type => "MIT", :file => "LICENSE" } s.author = { "Casey Law" => "casey@lomotif.com" } s.homepage = "http://www.lomotif.com" s.source = { :git => "https://github.com/Lomotif/swipe-navigation.git", :tag => "#{s.version}"} s.framework = 'UIKit' s.source_files = "SwipeNavigationController/SwipeNavigationController/*.{h,swift}" s.module_name = 'SwipeNavigationController' end
require 'rubygems' require 'sinatra' require 'json' $: << File.join(File.dirname(__FILE__), 'lib') require 'seinfeld/models' require 'seinfeld/calendar_helper' $0 = __FILE__ error do e = request.env['sinatra.error'] puts "#{e.class}: #{e.message}\n#{e.backtrace.join("\n ")}" end configure do require File.dirname(__FILE__) + '/config/seinfeld.rb' end before do Time.zone = "UTC" end get '/' do @recent_users = Seinfeld::User.best_current_streak @alltime_users = Seinfeld::User.best_alltime_streak haml :index end get '/~:name' do show_user_calendar end get '/~:name.json' do show_user_json end get '/~:name/:year' do show_user_calendar end get '/~:name/:year.json' do show_user_json end get '/~:name/:year/:month' do show_user_calendar end get '/~:name/:year/:month.json' do show_user_json end get '/group/:names' do show_group_calendar end post '/github' do if params[:token] == Seinfeld::User.creation_token Seinfeld::User.process_new_github_user(params[:subject]) else redirect "/" end end helpers do include Seinfeld::CalendarHelper def page_title "%s's Calendar" % @user.login end def get_user_and_progressions(extra = 0) [:year, :month].each do |key| value = params[key].to_i params[key] = value.zero? ? Date.today.send(key) : value end if @user = Seinfeld::User.first(:login => params[:name]) Time.zone = @user.time_zone || "UTC" progressions = Set.new @user.progress_for(params[:year], params[:month], extra) end return progressions || Set.new end def show_user_calendar @progressions = get_user_and_progressions(6) if @user haml :show else redirect "/" end end def show_group_calendar @progressions = Set.new @users = params[:names].split(',') @users.each do |name| params[:name] = name # hack @progressions.merge get_user_and_progressions(6) end haml :group end def show_user_json @progressions = get_user_and_progressions json = {:days => @progressions.map { |p| p.to_s }.sort!, :longest_streak => @user.longest_streak, :current_streak => @user.current_streak}.to_json if params[:callback] "#{params[:callback]}(#{json})" else json end end def link_to_user(user, streak_count = :current_streak) %(<a href="/~#{user.login}">#{user.login} (#{user.send(streak_count)})</a>) end def seinfeld now = Date.new(params[:year], params[:month]) prev_month = now << 1 next_month = now >> 1 calendar :year => now.year, :month => now.month, :previous_month_text => %(<a href="/~#{@user.login}/#{prev_month.year}/#{prev_month.month}">Previous Month</a>), :next_month_text => %(<a href="/~#{@user.login}/#{next_month.year}/#{next_month.month}" class="next">Next Month</a>) do |d| if @progressions.include? d [d.mday, {:class => "progressed"}] else [d.mday, {:class => "slacked"}] end end end def group_seinfeld now = Date.new(params[:year], params[:month]) prev_month = now << 1 next_month = now >> 1 calendar :year => now.year, :month => now.month do |d| if @progressions.include? d [d.mday, {:class => "progressed"}] else [d.mday, {:class => "slacked"}] end end end end return statements FTL require 'rubygems' require 'sinatra' require 'json' $: << File.join(File.dirname(__FILE__), 'lib') require 'seinfeld/models' require 'seinfeld/calendar_helper' $0 = __FILE__ error do e = request.env['sinatra.error'] puts "#{e.class}: #{e.message}\n#{e.backtrace.join("\n ")}" end configure do require File.dirname(__FILE__) + '/config/seinfeld.rb' end before do Time.zone = "UTC" end get '/' do @recent_users = Seinfeld::User.best_current_streak @alltime_users = Seinfeld::User.best_alltime_streak haml :index end get '/~:name' do show_user_calendar end get '/~:name.json' do show_user_json end get '/~:name/:year' do show_user_calendar end get '/~:name/:year.json' do show_user_json end get '/~:name/:year/:month' do show_user_calendar end get '/~:name/:year/:month.json' do show_user_json end get '/group/:names' do show_group_calendar end post '/github' do if params[:token] == Seinfeld::User.creation_token Seinfeld::User.process_new_github_user(params[:subject]) else redirect "/" end end helpers do include Seinfeld::CalendarHelper def page_title "%s's Calendar" % @user.login end def get_user_and_progressions(extra = 0) [:year, :month].each do |key| value = params[key].to_i params[key] = value.zero? ? Date.today.send(key) : value end if @user = Seinfeld::User.first(:login => params[:name]) Time.zone = @user.time_zone || "UTC" progressions = @user.progress_for(params[:year], params[:month], extra) end Set.new(progressions || []) end def show_user_calendar @progressions = get_user_and_progressions(6) if @user haml :show else redirect "/" end end def show_group_calendar @progressions = Set.new @users = params[:names].split(',') @users.each do |name| params[:name] = name # hack @progressions.merge get_user_and_progressions(6) end haml :group end def show_user_json @progressions = get_user_and_progressions json = {:days => @progressions.map { |p| p.to_s }.sort!, :longest_streak => @user.longest_streak, :current_streak => @user.current_streak}.to_json if params[:callback] "#{params[:callback]}(#{json})" else json end end def link_to_user(user, streak_count = :current_streak) %(<a href="/~#{user.login}">#{user.login} (#{user.send(streak_count)})</a>) end def seinfeld now = Date.new(params[:year], params[:month]) prev_month = now << 1 next_month = now >> 1 calendar :year => now.year, :month => now.month, :previous_month_text => %(<a href="/~#{@user.login}/#{prev_month.year}/#{prev_month.month}">Previous Month</a>), :next_month_text => %(<a href="/~#{@user.login}/#{next_month.year}/#{next_month.month}" class="next">Next Month</a>) do |d| if @progressions.include? d [d.mday, {:class => "progressed"}] else [d.mday, {:class => "slacked"}] end end end def group_seinfeld now = Date.new(params[:year], params[:month]) prev_month = now << 1 next_month = now >> 1 calendar :year => now.year, :month => now.month do |d| if @progressions.include? d [d.mday, {:class => "progressed"}] else [d.mday, {:class => "slacked"}] end end end end
# Generated by jeweler # DO NOT EDIT THIS FILE DIRECTLY # Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec' # -*- encoding: utf-8 -*- Gem::Specification.new do |s| s.name = %q{self-control} s.version = "0.1.1" s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= s.authors = ["hexorx"] s.date = %q{2011-06-20} s.description = %q{State machines are awesome but sometimes you need a little more. Like who should do what in order for it to move on? How many steps are left? How can I restfully trigger the next event? Self Control adds route helpers, controller actions and a dsl to turn your existing state machines into full workflows. It is designed to use rails 3 with ActiveModel and should work with any state machine with just a few extra methods.} s.email = %q{hexorx@gmail.com} s.extra_rdoc_files = [ "LICENSE", "README.rdoc" ] s.files = [ ".document", "LICENSE", "README.rdoc", "Rakefile", "VERSION", "lib/self-control.rb", "lib/self-control/controller_additions.rb", "lib/self-control/exceptions.rb", "lib/self-control/flow.rb", "lib/self-control/flow_builder.rb", "lib/self-control/railtie.rb", "lib/self-control/route_helpers.rb", "lib/self-control/step.rb", "lib/self-control/step_adapter.rb", "lib/self-control/step_builder.rb", "self-control.gemspec", "spec/self-control_spec.rb", "spec/spec_helper.rb" ] s.homepage = %q{http://github.com/hexorx/self-control} s.require_paths = ["lib"] s.rubygems_version = %q{1.6.2} s.summary = %q{Self-Control builds on you existing state machine to create full work flows.} if s.respond_to? :specification_version then s.specification_version = 3 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_development_dependency(%q<rspec>, [">= 0"]) s.add_development_dependency(%q<yard>, [">= 0"]) else s.add_dependency(%q<rspec>, [">= 0"]) s.add_dependency(%q<yard>, [">= 0"]) end else s.add_dependency(%q<rspec>, [">= 0"]) s.add_dependency(%q<yard>, [">= 0"]) end end Regenerate gemspec for version 0.2.0 # Generated by jeweler # DO NOT EDIT THIS FILE DIRECTLY # Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec' # -*- encoding: utf-8 -*- Gem::Specification.new do |s| s.name = %q{self-control} s.version = "0.2.0" s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= s.authors = ["hexorx"] s.date = %q{2011-06-22} s.description = %q{State machines are awesome but sometimes you need a little more. Like who should do what in order for it to move on? How many steps are left? How can I restfully trigger the next event? Self Control adds route helpers, controller actions and a dsl to turn your existing state machines into full workflows. It is designed to use rails 3 with ActiveModel and should work with any state machine with just a few extra methods.} s.email = %q{hexorx@gmail.com} s.extra_rdoc_files = [ "LICENSE", "README.rdoc" ] s.files = [ ".document", "LICENSE", "README.rdoc", "Rakefile", "VERSION", "lib/self-control.rb", "lib/self-control/action.rb", "lib/self-control/builder.rb", "lib/self-control/controller_extensions.rb", "lib/self-control/exceptions.rb", "lib/self-control/flow.rb", "lib/self-control/flow_builder.rb", "lib/self-control/helper.rb", "lib/self-control/railtie.rb", "lib/self-control/route_helpers.rb", "lib/self-control/step.rb", "lib/self-control/step_adapter.rb", "lib/self-control/step_builder.rb", "self-control.gemspec", "spec/self-control_spec.rb", "spec/spec_helper.rb" ] s.homepage = %q{http://github.com/hexorx/self-control} s.require_paths = ["lib"] s.rubygems_version = %q{1.6.2} s.summary = %q{Self-Control builds on you existing state machine to create full work flows.} if s.respond_to? :specification_version then s.specification_version = 3 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_development_dependency(%q<rspec>, [">= 0"]) s.add_development_dependency(%q<yard>, [">= 0"]) else s.add_dependency(%q<rspec>, [">= 0"]) s.add_dependency(%q<yard>, [">= 0"]) end else s.add_dependency(%q<rspec>, [">= 0"]) s.add_dependency(%q<yard>, [">= 0"]) end end
class TestMeter < Test::Unit::TestCase def setup Yao.default_client.pool["metering"] = Yao::Client.gen_client("https://example.com:12345") # notice: admin_pool を指定するあたりでハマったので注意 Yao.default_client.admin_pool["identity"] = Yao::Client.gen_client("https://example.com:12345") end def test_meter # https://docs.openstack.org/ceilometer/pike/webapi/v2.html params = { "meter_id" => "YmQ5NDMxYzEtOGQ2OS00YWQzLTgwM2EtOGQ0YTZiODlmZDM2K2luc3RhbmNl", "name" => "instance", "project_id" => "35b17138-b364-4e6a-a131-8f3099c5be68", "resource_id" => "bd9431c1-8d69-4ad3-803a-8d4a6b89fd36", "source" => "openstack", "type" => "gauge", "unit" => "instance", "user_id" => "efd87807-12d2-4b38-9c70-5f5c2ac427ff" } meter = Yao::Meter.new(params) # friendly_attributes assert_equal(meter.meter_id, "YmQ5NDMxYzEtOGQ2OS00YWQzLTgwM2EtOGQ0YTZiODlmZDM2K2luc3RhbmNl") assert_equal(meter.meter_id, meter.id) assert_equal(meter.name, "instance") assert_equal(meter.project_id, "35b17138-b364-4e6a-a131-8f3099c5be68") assert_equal(meter.resource_id, "bd9431c1-8d69-4ad3-803a-8d4a6b89fd36") assert_equal(meter.source, "openstack") assert_equal(meter.type, "gauge") assert_equal(meter.unit, "instance") assert_equal(meter.user_id, "efd87807-12d2-4b38-9c70-5f5c2ac427ff") end def test_resource # https://docs.openstack.org/ceilometer/pike/webapi/v2.html stub_request(:get, "https://example.com:12345/v2/resources/00000000-0000-0000-0000-000000000000") .to_return( status: 200, body: <<-JSON, { "resource_id": "00000000-0000-0000-0000-000000000000" } JSON headers: {'Content-Type' => 'application/json'} ) params = { "resource_id" => "00000000-0000-0000-0000-000000000000", } meter = Yao::Meter.new(params) resource = meter.resource assert { resource.instance_of?(Yao::Resource) } assert_equal(resource.resource_id, "00000000-0000-0000-0000-000000000000") end def test_tenant stub_request(:get, "https://example.com:12345/tenants/00000000-0000-0000-0000-000000000000") .to_return( status: 200, body: <<-JSON, { "tenant": { "id": "00000000-0000-0000-0000-000000000000" } } JSON headers: {'Content-Type' => 'application/json'} ) params = { "project_id" => "00000000-0000-0000-0000-000000000000", } meter = Yao::Meter.new(params) assert { meter.tenant.instance_of?(Yao::Tenant) } assert_equal(meter.tenant.id, "00000000-0000-0000-0000-000000000000") end end add test Yao::Meter#user class TestMeter < Test::Unit::TestCase def setup Yao.default_client.pool["metering"] = Yao::Client.gen_client("https://example.com:12345") # notice: admin_pool を指定するあたりでハマったので注意 Yao.default_client.admin_pool["identity"] = Yao::Client.gen_client("https://example.com:12345") end def test_meter # https://docs.openstack.org/ceilometer/pike/webapi/v2.html params = { "meter_id" => "YmQ5NDMxYzEtOGQ2OS00YWQzLTgwM2EtOGQ0YTZiODlmZDM2K2luc3RhbmNl", "name" => "instance", "project_id" => "35b17138-b364-4e6a-a131-8f3099c5be68", "resource_id" => "bd9431c1-8d69-4ad3-803a-8d4a6b89fd36", "source" => "openstack", "type" => "gauge", "unit" => "instance", "user_id" => "efd87807-12d2-4b38-9c70-5f5c2ac427ff" } meter = Yao::Meter.new(params) # friendly_attributes assert_equal(meter.meter_id, "YmQ5NDMxYzEtOGQ2OS00YWQzLTgwM2EtOGQ0YTZiODlmZDM2K2luc3RhbmNl") assert_equal(meter.meter_id, meter.id) assert_equal(meter.name, "instance") assert_equal(meter.project_id, "35b17138-b364-4e6a-a131-8f3099c5be68") assert_equal(meter.resource_id, "bd9431c1-8d69-4ad3-803a-8d4a6b89fd36") assert_equal(meter.source, "openstack") assert_equal(meter.type, "gauge") assert_equal(meter.unit, "instance") assert_equal(meter.user_id, "efd87807-12d2-4b38-9c70-5f5c2ac427ff") end def test_resource # https://docs.openstack.org/ceilometer/pike/webapi/v2.html stub_request(:get, "https://example.com:12345/v2/resources/00000000-0000-0000-0000-000000000000") .to_return( status: 200, body: <<-JSON, { "resource_id": "00000000-0000-0000-0000-000000000000" } JSON headers: {'Content-Type' => 'application/json'} ) params = { "resource_id" => "00000000-0000-0000-0000-000000000000", } meter = Yao::Meter.new(params) resource = meter.resource assert { resource.instance_of?(Yao::Resource) } assert_equal(resource.resource_id, "00000000-0000-0000-0000-000000000000") end def test_tenant stub_request(:get, "https://example.com:12345/tenants/00000000-0000-0000-0000-000000000000") .to_return( status: 200, body: <<-JSON, { "tenant": { "id": "00000000-0000-0000-0000-000000000000" } } JSON headers: {'Content-Type' => 'application/json'} ) params = { "project_id" => "00000000-0000-0000-0000-000000000000", } meter = Yao::Meter.new(params) assert { meter.tenant.instance_of?(Yao::Tenant) } assert_equal(meter.tenant.id, "00000000-0000-0000-0000-000000000000") end def test_user stub_request(:get, "https://example.com:12345/users/00000000-0000-0000-0000-000000000000") .to_return( status: 200, body: <<-JSON, { "user": { "id": "00000000-0000-0000-0000-000000000000" } } JSON headers: {'Content-Type' => 'application/json'} ) params = { "user_id" => "00000000-0000-0000-0000-000000000000", } meter = Yao::Meter.new(params) assert { meter.user.instance_of?(Yao::User) } assert_equal(meter.user.id, "00000000-0000-0000-0000-000000000000") end end
require File.expand_path('../gir_ffi_test_helper.rb', File.dirname(__FILE__)) require 'tempfile' GirFFI.setup :Regress describe "Pretty-printing" do def assert_syntax_ok str tmp = Tempfile.new "gir_ffi" # TODO: Make #pretty_print add this preamble. tmp.write "# coding: utf-8\n" tmp.write str tmp.flush is_ok = `ruby -c #{tmp.path} 2>&1` assert_equal "Syntax OK\n", is_ok end describe "for the Regress module" do it "runs without throwing an exception" do Regress._builder.pretty_print end it "results in valid Ruby" do assert_syntax_ok Regress._builder.pretty_print end end describe "for the GLib module" do it "results in valid Ruby" do assert_syntax_ok GLib._builder.pretty_print end end end Allow for slightly different Syntax OK message from JRuby. require File.expand_path('../gir_ffi_test_helper.rb', File.dirname(__FILE__)) require 'tempfile' GirFFI.setup :Regress describe "Pretty-printing" do def assert_syntax_ok str tmp = Tempfile.new "gir_ffi" # TODO: Make #pretty_print add this preamble. tmp.write "# coding: utf-8\n" tmp.write str tmp.flush is_ok = `ruby -c #{tmp.path} 2>&1` is_ok.must_match /^Syntax OK/ end describe "for the Regress module" do it "runs without throwing an exception" do Regress._builder.pretty_print end it "results in valid Ruby" do assert_syntax_ok Regress._builder.pretty_print end end describe "for the GLib module" do it "results in valid Ruby" do assert_syntax_ok GLib._builder.pretty_print end end end
require 'test_helper' class VolunteerMailerTest < ActionMailer::TestCase def setup @volunteer = create :volunteer, education: 'Bogus Education', woman: true @email_template = create :email_template end test 'volunteer welcome mail with confirmation data is sent correctly' do # FIXME: Problem with umlauts, but only in tests. Test doesn't recognize # email encoded utf-8 umlauts @volunteer.contact.city = 'Zuerich' @volunteer.save mailer = VolunteerMailer.welcome_email(@volunteer, @email_template).deliver assert_equal @email_template.subject, mailer.subject assert_equal [@volunteer.contact.primary_email], mailer.to assert_equal ['freiwillige@aoz.ch'], mailer.from mail_body = mailer.body.encoded assert_match @volunteer.contact.first_name, mail_body assert_match @volunteer.contact.last_name, mail_body assert_match @volunteer.contact.postal_code, mail_body assert_match @volunteer.contact.city, mail_body assert_match @volunteer.birth_year.year.to_s, mail_body assert_match 'Bogus Education', mail_body assert_match 'Frau', mail_body end test 'trial_period_mailer' do _, _, group_assignments = create_group_offer_entity( nil, 7.weeks.ago, nil, create(:volunteer_with_user), create(:volunteer_with_user) ) assignment = make_assignment(start_date: 7.weeks.ago) mailing = create_probation_mailing(*group_assignments, assignment) mailing.reminder_mailing_volunteers.each do |rmv| mailer = VolunteerMailer.trial_period_reminder(rmv).deliver assert_equal rmv.process_template[:subject], mailer.subject assert mailer.to.include? rmv.volunteer.contact.primary_email assert_match rmv.process_template[:body], mailer.body.encoded end end test 'half_year_mailer' do _, _, group_assignments = create_group_offer_entity( nil, 8.months.ago, nil, create(:volunteer_with_user), create(:volunteer_with_user) ) assignment = make_assignment(start_date: 8.months.ago) mailing = create_half_year_mailing(*group_assignments, assignment) mailing.reminder_mailing_volunteers.each do |rmv| mailer = VolunteerMailer.half_year_reminder(rmv).deliver assert_equal rmv.process_template[:subject], mailer.subject assert mailer.to.include? rmv.volunteer.contact.primary_email assert_match rmv.process_template[:body], mailer.body.encoded end end test 'volunteer termination with confirmation data is sent correctly' do assignment = make_assignment(start_date: 8.months.ago, end_date: 2.days.ago) mailing = create_termination_mailing(assignment) mailing.reminder_mailing_volunteers do |rmv| mailer = VolunteerMailer.termination_email(rmv).deliver assert_equal rmv.process_template[:subject], mailer.subject assert mailer.to.include? rmv.volunteer.contact.primary_email assert_match rmv.process_template[:body], mailer.body.encoded end end test 'send_group_assignment_termination_email_works_correctly' do group_assignment = create :group_assignment, period_start: 2.months.ago, period_end: 2.days.ago, period_end_set_by: @superadmin termination_reminder = create :reminder_mailing, kind: :termination, reminder_mailing_volunteers: [group_assignment], body: '%{Anrede} %{Name} %{FeedbackLink} %{Einsatz} %{EinsatzTitel} %{EmailAbsender} '\ '%{EinsatzStart}' mailing_volunteer = termination_reminder.reminder_mailing_volunteers.first mailer = VolunteerMailer.termination_email(mailing_volunteer).deliver assert_equal mailing_volunteer.process_template[:subject], mailer.subject assert mailer.to.include? mailing_volunteer.volunteer.contact.primary_email assert_match mailing_volunteer.volunteer.contact.natural_name, mailer.body.encoded assert mailing_volunteer.email_sent, "email not marked sent on ReminderMailingVolunteer.id: #{mailing_volunteer.id}" end end drop redundant signup mail test require 'test_helper' class VolunteerMailerTest < ActionMailer::TestCase def setup @volunteer = create :volunteer, education: 'Bogus Education', woman: true @email_template = create :email_template end test 'trial_period_mailer' do _, _, group_assignments = create_group_offer_entity( nil, 7.weeks.ago, nil, create(:volunteer_with_user), create(:volunteer_with_user) ) assignment = make_assignment(start_date: 7.weeks.ago) mailing = create_probation_mailing(*group_assignments, assignment) mailing.reminder_mailing_volunteers.each do |rmv| mailer = VolunteerMailer.trial_period_reminder(rmv).deliver assert_equal rmv.process_template[:subject], mailer.subject assert mailer.to.include? rmv.volunteer.contact.primary_email assert_match rmv.process_template[:body], mailer.body.encoded end end test 'half_year_mailer' do _, _, group_assignments = create_group_offer_entity( nil, 8.months.ago, nil, create(:volunteer_with_user), create(:volunteer_with_user) ) assignment = make_assignment(start_date: 8.months.ago) mailing = create_half_year_mailing(*group_assignments, assignment) mailing.reminder_mailing_volunteers.each do |rmv| mailer = VolunteerMailer.half_year_reminder(rmv).deliver assert_equal rmv.process_template[:subject], mailer.subject assert mailer.to.include? rmv.volunteer.contact.primary_email assert_match rmv.process_template[:body], mailer.body.encoded end end test 'volunteer termination with confirmation data is sent correctly' do assignment = make_assignment(start_date: 8.months.ago, end_date: 2.days.ago) mailing = create_termination_mailing(assignment) mailing.reminder_mailing_volunteers do |rmv| mailer = VolunteerMailer.termination_email(rmv).deliver assert_equal rmv.process_template[:subject], mailer.subject assert mailer.to.include? rmv.volunteer.contact.primary_email assert_match rmv.process_template[:body], mailer.body.encoded end end test 'send_group_assignment_termination_email_works_correctly' do group_assignment = create :group_assignment, period_start: 2.months.ago, period_end: 2.days.ago, period_end_set_by: @superadmin termination_reminder = create :reminder_mailing, kind: :termination, reminder_mailing_volunteers: [group_assignment], body: '%{Anrede} %{Name} %{FeedbackLink} %{Einsatz} %{EinsatzTitel} %{EmailAbsender} '\ '%{EinsatzStart}' mailing_volunteer = termination_reminder.reminder_mailing_volunteers.first mailer = VolunteerMailer.termination_email(mailing_volunteer).deliver assert_equal mailing_volunteer.process_template[:subject], mailer.subject assert mailer.to.include? mailing_volunteer.volunteer.contact.primary_email assert_match mailing_volunteer.volunteer.contact.natural_name, mailer.body.encoded assert mailing_volunteer.email_sent, "email not marked sent on ReminderMailingVolunteer.id: #{mailing_volunteer.id}" end end
require 'helper' require 'date' require 'fluent/test/helpers' require 'json' require 'fluent/test/driver/output' require 'flexmock/test_unit' class ElasticsearchOutput < Test::Unit::TestCase include FlexMock::TestCase include Fluent::Test::Helpers attr_accessor :index_cmds, :index_command_counts def setup Fluent::Test.setup require 'fluent/plugin/out_elasticsearch' @driver = nil log = Fluent::Engine.log log.out.logs.slice!(0, log.out.logs.length) end def driver(conf='', es_version=5, client_version="\"5.0\"") # For request stub to detect compatibility. @es_version ||= es_version @client_version ||= client_version if @es_version Fluent::Plugin::ElasticsearchOutput.module_eval(<<-CODE) def detect_es_major_version #{@es_version} end CODE end Fluent::Plugin::ElasticsearchOutput.module_eval(<<-CODE) def client_library_version #{@client_version} end CODE @driver ||= Fluent::Test::Driver::Output.new(Fluent::Plugin::ElasticsearchOutput) { # v0.12's test driver assume format definition. This simulates ObjectBufferedOutput format if !defined?(Fluent::Plugin::Output) def format(tag, time, record) [time, record].to_msgpack end end }.configure(conf) end def default_type_name Fluent::Plugin::ElasticsearchOutput::DEFAULT_TYPE_NAME end def sample_record(content={}) {'age' => 26, 'request_id' => '42', 'parent_id' => 'parent', 'routing_id' => 'routing'}.merge(content) end def nested_sample_record {'nested' => {'age' => 26, 'parent_id' => 'parent', 'routing_id' => 'routing', 'request_id' => '42'} } end def stub_elastic_info(url="http://localhost:9200/", version="6.4.2") body ="{\"version\":{\"number\":\"#{version}\"}}" stub_request(:get, url).to_return({:status => 200, :body => body, :headers => { 'Content-Type' => 'json' } }) end def stub_elastic(url="http://localhost:9200/_bulk") stub_request(:post, url).with do |req| @index_cmds = req.body.split("\n").map {|r| JSON.parse(r) } end end def stub_elastic_unavailable(url="http://localhost:9200/_bulk") stub_request(:post, url).to_return(:status => [503, "Service Unavailable"]) end def stub_elastic_timeout(url="http://localhost:9200/_bulk") stub_request(:post, url).to_timeout end def stub_elastic_with_store_index_command_counts(url="http://localhost:9200/_bulk") if @index_command_counts == nil @index_command_counts = {} @index_command_counts.default = 0 end stub_request(:post, url).with do |req| index_cmds = req.body.split("\n").map {|r| JSON.parse(r) } @index_command_counts[url] += index_cmds.size end end def make_response_body(req, error_el = nil, error_status = nil, error = nil) req_index_cmds = req.body.split("\n").map { |r| JSON.parse(r) } items = [] count = 0 ids = 1 op = nil index = nil type = nil id = nil req_index_cmds.each do |cmd| if count.even? op = cmd.keys[0] index = cmd[op]['_index'] type = cmd[op]['_type'] if cmd[op].has_key?('_id') id = cmd[op]['_id'] else # Note: this appears to be an undocumented feature of Elasticsearch # https://www.elastic.co/guide/en/elasticsearch/reference/2.4/docs-bulk.html # When you submit an "index" write_operation, with no "_id" field in the # metadata header, Elasticsearch will turn this into a "create" # operation in the response. if "index" == op op = "create" end id = ids ids += 1 end else item = { op => { '_index' => index, '_type' => type, '_id' => id, '_version' => 1, '_shards' => { 'total' => 1, 'successful' => 1, 'failed' => 0 }, 'status' => op == 'create' ? 201 : 200 } } items.push(item) end count += 1 end if !error_el.nil? && !error_status.nil? && !error.nil? op = items[error_el].keys[0] items[error_el][op].delete('_version') items[error_el][op].delete('_shards') items[error_el][op]['error'] = error items[error_el][op]['status'] = error_status errors = true else errors = false end @index_cmds = items body = { 'took' => 6, 'errors' => errors, 'items' => items } return body.to_json end def stub_elastic_bad_argument(url="http://localhost:9200/_bulk") error = { "type" => "mapper_parsing_exception", "reason" => "failed to parse [...]", "caused_by" => { "type" => "illegal_argument_exception", "reason" => "Invalid format: \"...\"" } } stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 1, 400, error), :headers => { 'Content-Type' => 'json' } } }) end def stub_elastic_bulk_error(url="http://localhost:9200/_bulk") error = { "type" => "some-unrecognized-error", "reason" => "some message printed here ...", } stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 1, 500, error), :headers => { 'Content-Type' => 'json' } } }) end def stub_elastic_bulk_rejected(url="http://localhost:9200/_bulk") error = { "status" => 500, "type" => "es_rejected_execution_exception", "reason" => "rejected execution of org.elasticsearch.transport.TransportService$4@1a34d37a on EsThreadPoolExecutor[bulk, queue capacity = 50, org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor@312a2162[Running, pool size = 32, active threads = 32, queued tasks = 50, completed tasks = 327053]]" } stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 1, 429, error), :headers => { 'Content-Type' => 'json' } } }) end def stub_elastic_out_of_memory(url="http://localhost:9200/_bulk") error = { "status" => 500, "type" => "out_of_memory_error", "reason" => "Java heap space" } stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 1, 500, error), :headers => { 'Content-Type' => 'json' } } }) end def stub_elastic_unexpected_response_op(url="http://localhost:9200/_bulk") error = { "category" => "some-other-type", "reason" => "some-other-reason" } stub_request(:post, url).to_return(lambda { |req| bodystr = make_response_body(req, 0, 500, error); body = JSON.parse(bodystr); body['items'][0]['unknown'] = body['items'][0].delete('create'); { :status => 200, :body => body.to_json, :headers => { 'Content-Type' => 'json' } } }) end def assert_logs_include(logs, msg, exp_matches=1) matches = logs.grep /#{msg}/ assert_equal(exp_matches, matches.length, "Logs do not contain '#{msg}' '#{logs}'") end def assert_logs_include_compare_size(exp_matches=1, operator="<=", logs="", msg="") matches = logs.grep /#{msg}/ assert_compare(exp_matches, operator, matches.length, "Logs do not contain '#{msg}' '#{logs}'") end def test_configure config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe } instance = driver(config).instance assert_equal 'logs.google.com', instance.host assert_equal 777, instance.port assert_equal :https, instance.scheme assert_equal '/es/', instance.path assert_equal 'john', instance.user assert_equal 'doe', instance.password assert_equal :TLSv1, instance.ssl_version assert_nil instance.client_key assert_nil instance.client_cert assert_nil instance.client_key_pass assert_false instance.with_transporter_log assert_equal :"application/json", instance.content_type assert_equal "fluentd", default_type_name assert_equal :excon, instance.http_backend assert_false instance.prefer_oj_serializer assert_equal ["out_of_memory_error", "es_rejected_execution_exception"], instance.unrecoverable_error_types assert_true instance.verify_es_version_at_startup assert_equal Fluent::Plugin::ElasticsearchOutput::DEFAULT_ELASTICSEARCH_VERSION, instance.default_elasticsearch_version assert_false instance.log_es_400_reason end test 'configure Content-Type' do config = %{ content_type application/x-ndjson } instance = driver(config).instance assert_equal :"application/x-ndjson", instance.content_type end test 'invalid Content-Type' do config = %{ content_type nonexistent/invalid } assert_raise(Fluent::ConfigError) { instance = driver(config).instance } end test 'invalid specification of times of retrying template installation' do config = %{ max_retry_putting_template -3 } assert_raise(Fluent::ConfigError) { instance = driver(config).instance } end test 'Detected Elasticsearch 7' do config = %{ type_name changed } instance = driver(config, 7).instance assert_equal '_doc', instance.type_name end test 'Detected Elasticsearch 6 and insecure security' do config = %{ ssl_version TLSv1_1 @log_level warn scheme https } instance = driver(config, 6).instance logs = driver.logs assert_logs_include(logs, /Detected ES 6.x or above and enabled insecure security/, 1) end test 'Detected Elasticsearch 7 and secure security' do config = %{ ssl_version TLSv1_2 @log_level warn scheme https } instance = driver(config, 7).instance logs = driver.logs assert_logs_include(logs, /Detected ES 6.x or above and enabled insecure security/, 0) end test 'Pass Elasticsearch and client library are same' do config = %{ @log_level warn validate_client_version true } assert_nothing_raised do driver(config, 6, "\"6.1.0\"").instance end end test 'Detected Elasticsearch and client library mismatch' do config = %{ @log_level warn validate_client_version true } assert_raise_message(/Detected ES 7 but you use ES client 5.0/) do driver(config, 7, "\"5.0.5\"").instance end end test 'lack of tag in chunk_keys' do assert_raise_message(/'tag' in chunk_keys is required./) do driver(Fluent::Config::Element.new( 'ROOT', '', { '@type' => 'elasticsearch', 'host' => 'log.google.com', 'port' => 777, 'scheme' => 'https', 'path' => '/es/', 'user' => 'john', 'pasword' => 'doe', }, [ Fluent::Config::Element.new('buffer', 'mykey', { 'chunk_keys' => 'mykey' }, []) ] )) end end sub_test_case 'connection exceptions' do test 'default connection exception' do driver(Fluent::Config::Element.new( 'ROOT', '', { '@type' => 'elasticsearch', 'host' => 'log.google.com', 'port' => 777, 'scheme' => 'https', 'path' => '/es/', 'user' => 'john', 'pasword' => 'doe', }, [ Fluent::Config::Element.new('buffer', 'tag', { }, []) ] )) logs = driver.logs assert_logs_include(logs, /you should specify 2 or more 'flush_thread_count'/, 1) end end def test_retry_get_es_version config = %{ host logs.google.com port 778 scheme https path /es/ user john password doe max_retry_get_es_version 3 } connection_resets = 0 stub_request(:get, "https://john:doe@logs.google.com:778/es//").with do |req| connection_resets += 1 raise Timeout::Error, "Test message" end assert_raise(Fluent::Plugin::ElasticsearchError::RetryableOperationExhaustedFailure) do driver(config, nil) end assert_equal(connection_resets, 4) end def test_template_already_present config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name logstash template_file /abc123 } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_not_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash") end def test_template_create cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name logstash template_file #{template_file} } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 404, :body => "", :headers => {}) # creation stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash", times: 1) end def test_custom_template_create cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_alias_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name myapp_alias_template template_file #{template_file} customize_template {"--appid--": "myapp-logs","--index_prefix--":"mylogs"} } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 404, :body => "", :headers => {}) # creation stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template", times: 1) end def test_custom_template_with_rollover_index_create cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_alias_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name myapp_alias_template template_file #{template_file} customize_template {"--appid--": "myapp-logs","--index_prefix--":"mylogs"} rollover_index true index_date_pattern now/w{xxxx.ww} deflector_alias myapp_deflector index_prefix mylogs application_name myapp } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 404, :body => "", :headers => {}) # creation stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 200, :body => "", :headers => {}) # creation of index which can rollover stub_request(:put, "https://john:doe@logs.google.com:777/es//%3Cmylogs-myapp-%7Bnow%2Fw%7Bxxxx.ww%7D%7D-000001%3E"). to_return(:status => 200, :body => "", :headers => {}) # check if alias exists stub_request(:head, "https://john:doe@logs.google.com:777/es//_alias/myapp_deflector"). to_return(:status => 404, :body => "", :headers => {}) # put the alias for the index stub_request(:put, "https://john:doe@logs.google.com:777/es//%3Cmylogs-myapp-%7Bnow%2Fw%7Bxxxx.ww%7D%7D-000001%3E/_alias/myapp_deflector"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template", times: 1) end def test_template_overwrite cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name logstash template_file #{template_file} template_overwrite true } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 200, :body => "", :headers => {}) # creation stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash", times: 1) end def test_custom_template_overwrite cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name myapp_alias_template template_file #{template_file} template_overwrite true customize_template {"--appid--": "myapp-logs","--index_prefix--":"mylogs"} } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 200, :body => "", :headers => {}) # creation stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template", times: 1) end def test_custom_template_with_rollover_index_overwrite cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name myapp_alias_template template_file #{template_file} template_overwrite true customize_template {"--appid--": "myapp-logs","--index_prefix--":"mylogs"} deflector_alias myapp_deflector rollover_index true index_prefix mylogs application_name myapp } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 200, :body => "", :headers => {}) # creation stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 200, :body => "", :headers => {}) # creation of index which can rollover stub_request(:put, "https://john:doe@logs.google.com:777/es//%3Cmylogs-myapp-%7Bnow%2Fd%7D-000001%3E"). to_return(:status => 200, :body => "", :headers => {}) # check if alias exists stub_request(:head, "https://john:doe@logs.google.com:777/es//_alias/myapp_deflector"). to_return(:status => 404, :body => "", :headers => {}) # put the alias for the index stub_request(:put, "https://john:doe@logs.google.com:777/es//%3Cmylogs-myapp-%7Bnow%2Fd%7D-000001%3E/_alias/myapp_deflector"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template", times: 1) end def test_template_create_invalid_filename config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name logstash template_file /abc123 } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 404, :body => "", :headers => {}) assert_raise(RuntimeError) { driver(config) } end def test_template_retry_install cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 778 scheme https path /es/ user john password doe template_name logstash template_file #{template_file} max_retry_putting_template 3 } connection_resets = 0 # check if template exists stub_request(:get, "https://john:doe@logs.google.com:778/es//_template/logstash").with do |req| connection_resets += 1 raise Faraday::ConnectionFailed, "Test message" end assert_raise(Fluent::Plugin::ElasticsearchError::RetryableOperationExhaustedFailure) do driver(config) end assert_equal(connection_resets, 4) end def test_templates_create cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe templates {"logstash1":"#{template_file}", "logstash2":"#{template_file}","logstash3":"#{template_file}" } } stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 404, :body => "", :headers => {}) stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 404, :body => "", :headers => {}) stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash3"). to_return(:status => 200, :body => "", :headers => {}) #exists stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash3"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested( :put, "https://john:doe@logs.google.com:777/es//_template/logstash1", times: 1) assert_requested( :put, "https://john:doe@logs.google.com:777/es//_template/logstash2", times: 1) assert_not_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash3") #exists end def test_templates_overwrite cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe templates {"logstash1":"#{template_file}", "logstash2":"#{template_file}","logstash3":"#{template_file}" } template_overwrite true } stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash3"). to_return(:status => 200, :body => "", :headers => {}) #exists stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash3"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash1", times: 1) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash2", times: 1) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash3", times: 1) end def test_templates_not_used cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name logstash template_file #{template_file} templates {"logstash1":"#{template_file}", "logstash2":"#{template_file}" } } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 404, :body => "", :headers => {}) stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 404, :body => "", :headers => {}) stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 404, :body => "", :headers => {}) #creation stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash", times: 1) assert_not_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash1") assert_not_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash2") end def test_templates_can_be_partially_created_if_error_occurs cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe templates {"logstash1":"#{template_file}", "logstash2":"/abc" } } stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 404, :body => "", :headers => {}) stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 404, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 200, :body => "", :headers => {}) assert_raise(RuntimeError) { driver(config) } assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash1", times: 1) assert_not_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash2") end def test_legacy_hosts_list config = %{ hosts host1:50,host2:100,host3 scheme https path /es/ port 123 } instance = driver(config).instance assert_equal 3, instance.get_connection_options[:hosts].length host1, host2, host3 = instance.get_connection_options[:hosts] assert_equal 'host1', host1[:host] assert_equal 50, host1[:port] assert_equal 'https', host1[:scheme] assert_equal '/es/', host2[:path] assert_equal 'host3', host3[:host] assert_equal 123, host3[:port] assert_equal 'https', host3[:scheme] assert_equal '/es/', host3[:path] end def test_hosts_list config = %{ hosts https://john:password@host1:443/elastic/,http://host2 path /default_path user default_user password default_password } instance = driver(config).instance assert_equal 2, instance.get_connection_options[:hosts].length host1, host2 = instance.get_connection_options[:hosts] assert_equal 'host1', host1[:host] assert_equal 443, host1[:port] assert_equal 'https', host1[:scheme] assert_equal 'john', host1[:user] assert_equal 'password', host1[:password] assert_equal '/elastic/', host1[:path] assert_equal 'host2', host2[:host] assert_equal 'http', host2[:scheme] assert_equal 'default_user', host2[:user] assert_equal 'default_password', host2[:password] assert_equal '/default_path', host2[:path] end def test_hosts_list_with_escape_placeholders config = %{ hosts https://%{j+hn}:%{passw@rd}@host1:443/elastic/,http://host2 path /default_path user default_user password default_password } instance = driver(config).instance assert_equal 2, instance.get_connection_options[:hosts].length host1, host2 = instance.get_connection_options[:hosts] assert_equal 'host1', host1[:host] assert_equal 443, host1[:port] assert_equal 'https', host1[:scheme] assert_equal 'j%2Bhn', host1[:user] assert_equal 'passw%40rd', host1[:password] assert_equal '/elastic/', host1[:path] assert_equal 'host2', host2[:host] assert_equal 'http', host2[:scheme] assert_equal 'default_user', host2[:user] assert_equal 'default_password', host2[:password] assert_equal '/default_path', host2[:path] end def test_single_host_params_and_defaults config = %{ host logs.google.com user john password doe } instance = driver(config).instance assert_equal 1, instance.get_connection_options[:hosts].length host1 = instance.get_connection_options[:hosts][0] assert_equal 'logs.google.com', host1[:host] assert_equal 9200, host1[:port] assert_equal 'http', host1[:scheme] assert_equal 'john', host1[:user] assert_equal 'doe', host1[:password] assert_equal nil, host1[:path] end def test_single_host_params_and_defaults_with_escape_placeholders config = %{ host logs.google.com user %{j+hn} password %{d@e} } instance = driver(config).instance assert_equal 1, instance.get_connection_options[:hosts].length host1 = instance.get_connection_options[:hosts][0] assert_equal 'logs.google.com', host1[:host] assert_equal 9200, host1[:port] assert_equal 'http', host1[:scheme] assert_equal 'j%2Bhn', host1[:user] assert_equal 'd%40e', host1[:password] assert_equal nil, host1[:path] end def test_content_type_header stub_request(:head, "http://localhost:9200/"). to_return(:status => 200, :body => "", :headers => {}) if Elasticsearch::VERSION >= "6.0.2" elastic_request = stub_request(:post, "http://localhost:9200/_bulk"). with(headers: { "Content-Type" => "application/x-ndjson" }) else elastic_request = stub_request(:post, "http://localhost:9200/_bulk"). with(headers: { "Content-Type" => "application/json" }) end driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_requested(elastic_request) end def test_write_message_with_bad_chunk driver.configure("target_index_key bad_value\n@log_level debug\n") stub_elastic driver.run(default_tag: 'test') do driver.feed({'bad_value'=>"\255"}) end error_log = driver.error_events.map {|e| e.last.message } assert_logs_include(error_log, /(input string invalid)|(invalid byte sequence in UTF-8)/) end def test_writes_to_default_index stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal('fluentd', index_cmds.first['index']['_index']) end def test_writes_to_default_type stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(default_type_name, index_cmds.first['index']['_type']) end def test_writes_to_speficied_index driver.configure("index_name myindex\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal('myindex', index_cmds.first['index']['_index']) end class IndexNamePlaceholdersTest < self def test_writes_to_speficied_index_with_tag_placeholder driver.configure("index_name myindex.${tag}\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal('myindex.test', index_cmds.first['index']['_index']) end def test_writes_to_speficied_index_with_time_placeholder driver.configure(Fluent::Config::Element.new( 'ROOT', '', { '@type' => 'elasticsearch', 'index_name' => 'myindex.%Y.%m.%d', }, [ Fluent::Config::Element.new('buffer', 'tag,time', { 'chunk_keys' => ['tag', 'time'], 'timekey' => 3600, }, []) ] )) stub_elastic time = Time.parse Date.today.iso8601 driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal("myindex.#{time.utc.strftime("%Y.%m.%d")}", index_cmds.first['index']['_index']) end def test_writes_to_speficied_index_with_custom_key_placeholder driver.configure(Fluent::Config::Element.new( 'ROOT', '', { '@type' => 'elasticsearch', 'index_name' => 'myindex.${pipeline_id}', }, [ Fluent::Config::Element.new('buffer', 'tag,pipeline_id', {}, []) ] )) time = Time.parse Date.today.iso8601 pipeline_id = "mypipeline" logstash_index = "myindex.#{pipeline_id}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record.merge({"pipeline_id" => pipeline_id})) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end end def test_writes_to_speficied_index_uppercase driver.configure("index_name MyIndex\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end # Allthough index_name has upper-case characters, # it should be set as lower-case when sent to elasticsearch. assert_equal('myindex', index_cmds.first['index']['_index']) end def test_writes_to_target_index_key driver.configure("target_index_key @target_index\n") stub_elastic record = sample_record.clone driver.run(default_tag: 'test') do driver.feed(sample_record.merge('@target_index' => 'local-override')) end assert_equal('local-override', index_cmds.first['index']['_index']) assert_nil(index_cmds[1]['@target_index']) end def test_writes_to_target_index_key_logstash driver.configure("target_index_key @target_index logstash_format true") time = Time.parse Date.today.iso8601 stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record.merge('@target_index' => 'local-override')) end assert_equal('local-override', index_cmds.first['index']['_index']) end def test_writes_to_target_index_key_logstash_uppercase driver.configure("target_index_key @target_index logstash_format true") time = Time.parse Date.today.iso8601 stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record.merge('@target_index' => 'local-override')) end # Allthough @target_index has upper-case characters, # it should be set as lower-case when sent to elasticsearch. assert_equal('local-override', index_cmds.first['index']['_index']) end def test_writes_to_default_index_with_pipeline pipeline = "fluentd" driver.configure("pipeline #{pipeline}") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(pipeline, index_cmds.first['index']['pipeline']) end def test_writes_to_target_index_key_fallack driver.configure("target_index_key @target_index\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal('fluentd', index_cmds.first['index']['_index']) end def test_writes_to_target_index_key_fallack_logstash driver.configure("target_index_key @target_index\n logstash_format true") time = Time.parse Date.today.iso8601 logstash_index = "logstash-#{time.getutc.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end data("border" => {"es_version" => 6, "_type" => "mytype"}, "fixed_behavior"=> {"es_version" => 7, "_type" => "_doc"}, ) def test_writes_to_speficied_type(data) driver('', data["es_version"]).configure("type_name mytype\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(data['_type'], index_cmds.first['index']['_type']) end data("border" => {"es_version" => 6, "_type" => "mytype.test"}, "fixed_behavior"=> {"es_version" => 7, "_type" => "_doc"}, ) def test_writes_to_speficied_type_with_placeholders(data) driver('', data["es_version"]).configure("type_name mytype.${tag}\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(data['_type'], index_cmds.first['index']['_type']) end data("old" => {"es_version" => 2, "_type" => "local-override"}, "old_behavior" => {"es_version" => 5, "_type" => "local-override"}, "border" => {"es_version" => 6, "_type" => "fluentd"}, "fixed_behavior"=> {"es_version" => 7, "_type" => "_doc"}, ) def test_writes_to_target_type_key(data) driver('', data["es_version"]).configure("target_type_key @target_type\n") stub_elastic record = sample_record.clone driver.run(default_tag: 'test') do driver.feed(sample_record.merge('@target_type' => 'local-override')) end assert_equal(data["_type"], index_cmds.first['index']['_type']) assert_nil(index_cmds[1]['@target_type']) end def test_writes_to_target_type_key_fallack_to_default driver.configure("target_type_key @target_type\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(default_type_name, index_cmds.first['index']['_type']) end def test_writes_to_target_type_key_fallack_to_type_name driver.configure("target_type_key @target_type type_name mytype") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal('mytype', index_cmds.first['index']['_type']) end data("old" => {"es_version" => 2, "_type" => "local-override"}, "old_behavior" => {"es_version" => 5, "_type" => "local-override"}, "border" => {"es_version" => 6, "_type" => "fluentd"}, "fixed_behavior"=> {"es_version" => 7, "_type" => "_doc"}, ) def test_writes_to_target_type_key_nested(data) driver('', data["es_version"]).configure("target_type_key kubernetes.labels.log_type\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record.merge('kubernetes' => { 'labels' => { 'log_type' => 'local-override' } })) end assert_equal(data["_type"], index_cmds.first['index']['_type']) assert_nil(index_cmds[1]['kubernetes']['labels']['log_type']) end def test_writes_to_target_type_key_fallack_to_default_nested driver.configure("target_type_key kubernetes.labels.log_type\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record.merge('kubernetes' => { 'labels' => { 'other_labels' => 'test' } })) end assert_equal(default_type_name, index_cmds.first['index']['_type']) end def test_writes_to_speficied_host driver.configure("host 192.168.33.50\n") elastic_request = stub_elastic("http://192.168.33.50:9200/_bulk") driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_requested(elastic_request) end def test_writes_to_speficied_port driver.configure("port 9201\n") elastic_request = stub_elastic("http://localhost:9201/_bulk") driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_requested(elastic_request) end def test_writes_to_multi_hosts hosts = [['192.168.33.50', 9201], ['192.168.33.51', 9201], ['192.168.33.52', 9201]] hosts_string = hosts.map {|x| "#{x[0]}:#{x[1]}"}.compact.join(',') driver.configure("hosts #{hosts_string}") hosts.each do |host_info| host, port = host_info stub_elastic_with_store_index_command_counts("http://#{host}:#{port}/_bulk") end driver.run(default_tag: 'test') do 1000.times do driver.feed(sample_record.merge('age'=>rand(100))) end end # @note: we cannot make multi chunks with options (flush_interval, buffer_chunk_limit) # it's Fluentd test driver's constraint # so @index_command_counts.size is always 1 assert(@index_command_counts.size > 0, "not working with hosts options") total = 0 @index_command_counts.each do |url, count| total += count end assert_equal(2000, total) end def test_nested_record_with_flattening_on driver.configure("flatten_hashes true flatten_hashes_separator |") original_hash = {"foo" => {"bar" => "baz"}, "people" => [ {"age" => "25", "height" => "1ft"}, {"age" => "30", "height" => "2ft"} ]} expected_output = {"foo|bar"=>"baz", "people" => [ {"age" => "25", "height" => "1ft"}, {"age" => "30", "height" => "2ft"} ]} stub_elastic driver.run(default_tag: 'test') do driver.feed(original_hash) end assert_equal expected_output, index_cmds[1] end def test_nested_record_with_flattening_off # flattening off by default original_hash = {"foo" => {"bar" => "baz"}} expected_output = {"foo" => {"bar" => "baz"}} stub_elastic driver.run(default_tag: 'test') do driver.feed(original_hash) end assert_equal expected_output, index_cmds[1] end def test_makes_bulk_request stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) driver.feed(sample_record.merge('age' => 27)) end assert_equal(4, index_cmds.count) end def test_all_records_are_preserved_in_bulk stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) driver.feed(sample_record.merge('age' => 27)) end assert_equal(26, index_cmds[1]['age']) assert_equal(27, index_cmds[3]['age']) end def test_writes_to_logstash_index driver.configure("logstash_format true\n") # # This is 1 second past midnight in BST, so the UTC index should be the day before dt = DateTime.new(2015, 6, 1, 0, 0, 1, "+01:00") logstash_index = "logstash-2015.05.31" stub_elastic driver.run(default_tag: 'test') do driver.feed(dt.to_time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end def test_writes_to_logstash_non_utc_index driver.configure("logstash_format true utc_index false") # When using `utc_index false` the index time will be the local day of # ingestion time time = Date.today.to_time index = "logstash-#{time.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(index, index_cmds.first['index']['_index']) end def test_writes_to_logstash_index_with_specified_prefix driver.configure("logstash_format true logstash_prefix myprefix") time = Time.parse Date.today.iso8601 logstash_index = "myprefix-#{time.getutc.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end def test_writes_to_logstash_index_with_specified_prefix_and_separator separator = '_' driver.configure("logstash_format true logstash_prefix_separator #{separator} logstash_prefix myprefix") time = Time.parse Date.today.iso8601 logstash_index = "myprefix#{separator}#{time.getutc.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end class LogStashPrefixPlaceholdersTest < self def test_writes_to_logstash_index_with_specified_prefix_and_tag_placeholder driver.configure("logstash_format true logstash_prefix myprefix-${tag}") time = Time.parse Date.today.iso8601 logstash_index = "myprefix-test-#{time.getutc.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end def test_writes_to_logstash_index_with_specified_prefix_and_time_placeholder driver.configure(Fluent::Config::Element.new( 'ROOT', '', { '@type' => 'elasticsearch', 'logstash_format' => true, 'logstash_prefix' => 'myprefix-%H', }, [ Fluent::Config::Element.new('buffer', 'tag,time', { 'chunk_keys' => ['tag', 'time'], 'timekey' => 3600, }, []) ] )) time = Time.parse Date.today.iso8601 logstash_index = "myprefix-#{time.getutc.strftime("%H")}-#{time.getutc.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end def test_writes_to_logstash_index_with_specified_prefix_and_custom_key_placeholder driver.configure(Fluent::Config::Element.new( 'ROOT', '', { '@type' => 'elasticsearch', 'logstash_format' => true, 'logstash_prefix' => 'myprefix-${pipeline_id}', }, [ Fluent::Config::Element.new('buffer', 'tag,pipeline_id', {}, []) ] )) time = Time.parse Date.today.iso8601 pipeline_id = "mypipeline" logstash_index = "myprefix-#{pipeline_id}-#{time.getutc.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record.merge({"pipeline_id" => pipeline_id})) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end end def test_writes_to_logstash_index_with_specified_prefix_uppercase driver.configure("logstash_format true logstash_prefix MyPrefix") time = Time.parse Date.today.iso8601 logstash_index = "myprefix-#{time.getutc.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end # Allthough logstash_prefix has upper-case characters, # it should be set as lower-case when sent to elasticsearch. assert_equal(logstash_index, index_cmds.first['index']['_index']) end def test_writes_to_logstash_index_with_specified_dateformat driver.configure("logstash_format true logstash_dateformat %Y.%m") time = Time.parse Date.today.iso8601 logstash_index = "logstash-#{time.getutc.strftime("%Y.%m")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end def test_writes_to_logstash_index_with_specified_prefix_and_dateformat driver.configure("logstash_format true logstash_prefix myprefix logstash_dateformat %Y.%m") time = Time.parse Date.today.iso8601 logstash_index = "myprefix-#{time.getutc.strftime("%Y.%m")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end def test_error_if_tag_not_in_chunk_keys assert_raise(Fluent::ConfigError) { config = %{ <buffer foo> </buffer> } driver.configure(config) } end def test_can_use_custom_chunk_along_with_tag config = %{ <buffer tag, foo> </buffer> } driver.configure(config) end def test_doesnt_add_logstash_timestamp_by_default stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_nil(index_cmds[1]['@timestamp']) end def test_adds_timestamp_when_logstash driver.configure("logstash_format true\n") stub_elastic ts = DateTime.now time = Fluent::EventTime.from_time(ts.to_time) driver.run(default_tag: 'test') do driver.feed(time, sample_record) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts.iso8601(9)) end def test_adds_timestamp_when_include_timestamp driver.configure("include_timestamp true\n") stub_elastic ts = DateTime.now time = Fluent::EventTime.from_time(ts.to_time) driver.run(default_tag: 'test') do driver.feed(time, sample_record) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts.iso8601(9)) end def test_uses_custom_timestamp_when_included_in_record driver.configure("logstash_format true\n") stub_elastic ts = DateTime.new(2001,2,3).iso8601 driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('@timestamp' => ts)) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts) end def test_uses_custom_timestamp_when_included_in_record_without_logstash driver.configure("include_timestamp true\n") stub_elastic ts = DateTime.new(2001,2,3).iso8601 driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('@timestamp' => ts)) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts) end def test_uses_custom_time_key driver.configure("logstash_format true time_key vtm\n") stub_elastic ts = DateTime.new(2001,2,3).iso8601(9) driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('vtm' => ts)) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts) end def test_uses_custom_time_key_with_format driver.configure("logstash_format true time_key_format %Y-%m-%d %H:%M:%S.%N%z time_key vtm\n") stub_elastic ts = "2001-02-03 13:14:01.673+02:00" driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('vtm' => ts)) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], DateTime.parse(ts).iso8601(9)) assert_equal("logstash-2001.02.03", index_cmds[0]['index']['_index']) end def test_uses_custom_time_key_with_format_without_logstash driver.configure("include_timestamp true index_name test time_key_format %Y-%m-%d %H:%M:%S.%N%z time_key vtm\n") stub_elastic ts = "2001-02-03 13:14:01.673+02:00" driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('vtm' => ts)) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], DateTime.parse(ts).iso8601(9)) assert_equal("test", index_cmds[0]['index']['_index']) end def test_uses_custom_time_key_exclude_timekey driver.configure("logstash_format true time_key vtm time_key_exclude_timestamp true\n") stub_elastic ts = DateTime.new(2001,2,3).iso8601 driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('vtm' => ts)) end assert(!index_cmds[1].key?('@timestamp'), '@timestamp should be messing') end def test_uses_custom_time_key_format driver.configure("logstash_format true time_key_format %Y-%m-%dT%H:%M:%S.%N%z\n") stub_elastic ts = "2001-02-03T13:14:01.673+02:00" driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('@timestamp' => ts)) end assert_equal("logstash-2001.02.03", index_cmds[0]['index']['_index']) assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts) end def test_uses_custom_time_key_format_without_logstash driver.configure("include_timestamp true index_name test time_key_format %Y-%m-%dT%H:%M:%S.%N%z\n") stub_elastic ts = "2001-02-03T13:14:01.673+02:00" driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('@timestamp' => ts)) end assert_equal("test", index_cmds[0]['index']['_index']) assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts) end data(:default => nil, :custom_tag => 'es_plugin.output.time.error') def test_uses_custom_time_key_format_logs_an_error(tag_for_error) tag_config = tag_for_error ? "time_parse_error_tag #{tag_for_error}" : '' tag_for_error = 'Fluent::ElasticsearchOutput::TimeParser.error' if tag_for_error.nil? driver.configure("logstash_format true time_key_format %Y-%m-%dT%H:%M:%S.%N%z\n#{tag_config}\n") stub_elastic ts = "2001/02/03 13:14:01,673+02:00" index = "logstash-#{Date.today.strftime("%Y.%m.%d")}" flexmock(driver.instance.router).should_receive(:emit_error_event) .with(tag_for_error, Fluent::EventTime, Hash, ArgumentError).once driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('@timestamp' => ts)) end assert_equal(index, index_cmds[0]['index']['_index']) assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts) end def test_uses_custom_time_key_format_obscure_format driver.configure("logstash_format true time_key_format %a %b %d %H:%M:%S %Z %Y\n") stub_elastic ts = "Thu Nov 29 14:33:20 GMT 2001" driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('@timestamp' => ts)) end assert_equal("logstash-2001.11.29", index_cmds[0]['index']['_index']) assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts) end def test_uses_nanosecond_precision_by_default driver.configure("logstash_format true\n") stub_elastic time = Fluent::EventTime.new(Time.now.to_i, 123456789) driver.run(default_tag: 'test') do driver.feed(time, sample_record) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], Time.at(time).iso8601(9)) end def test_uses_subsecond_precision_when_configured driver.configure("logstash_format true time_precision 3\n") stub_elastic time = Fluent::EventTime.new(Time.now.to_i, 123456789) driver.run(default_tag: 'test') do driver.feed(time, sample_record) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], Time.at(time).iso8601(3)) end def test_doesnt_add_tag_key_by_default stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_nil(index_cmds[1]['tag']) end def test_adds_tag_key_when_configured driver.configure("include_tag_key true\n") stub_elastic driver.run(default_tag: 'mytag') do driver.feed(sample_record) end assert(index_cmds[1].has_key?('tag')) assert_equal(index_cmds[1]['tag'], 'mytag') end def test_adds_id_key_when_configured driver.configure("id_key request_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(index_cmds[0]['index']['_id'], '42') end class NestedIdKeyTest < self def test_adds_nested_id_key_with_dot driver.configure("id_key nested.request_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_id'], '42') end def test_adds_nested_id_key_with_dollar_dot driver.configure("id_key $.nested.request_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_id'], '42') end def test_adds_nested_id_key_with_bracket driver.configure("id_key $['nested']['request_id']\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_id'], '42') end end def test_doesnt_add_id_key_if_missing_when_configured driver.configure("id_key another_request_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(!index_cmds[0]['index'].has_key?('_id')) end def test_adds_id_key_when_not_configured stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(!index_cmds[0]['index'].has_key?('_id')) end def test_adds_parent_key_when_configured driver.configure("parent_key parent_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(index_cmds[0]['index']['_parent'], 'parent') end class NestedParentKeyTest < self def test_adds_nested_parent_key_with_dot driver.configure("parent_key nested.parent_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_parent'], 'parent') end def test_adds_nested_parent_key_with_dollar_dot driver.configure("parent_key $.nested.parent_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_parent'], 'parent') end def test_adds_nested_parent_key_with_bracket driver.configure("parent_key $['nested']['parent_id']\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_parent'], 'parent') end end def test_doesnt_add_parent_key_if_missing_when_configured driver.configure("parent_key another_parent_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(!index_cmds[0]['index'].has_key?('_parent')) end def test_adds_parent_key_when_not_configured stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(!index_cmds[0]['index'].has_key?('_parent')) end class AddsRoutingKeyWhenConfiguredTest < self def test_es6 driver('', 6).configure("routing_key routing_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(index_cmds[0]['index']['_routing'], 'routing') end def test_es7 driver('', 7).configure("routing_key routing_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(index_cmds[0]['index']['routing'], 'routing') end end class NestedRoutingKeyTest < self def test_adds_nested_routing_key_with_dot driver.configure("routing_key nested.routing_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_routing'], 'routing') end def test_adds_nested_routing_key_with_dollar_dot driver.configure("routing_key $.nested.routing_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_routing'], 'routing') end def test_adds_nested_routing_key_with_bracket driver.configure("routing_key $['nested']['routing_id']\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_routing'], 'routing') end end def test_doesnt_add_routing_key_if_missing_when_configured driver.configure("routing_key another_routing_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(!index_cmds[0]['index'].has_key?('_routing')) end def test_adds_routing_key_when_not_configured stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(!index_cmds[0]['index'].has_key?('_routing')) end def test_remove_one_key driver.configure("remove_keys key1\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record.merge('key1' => 'v1', 'key2' => 'v2')) end assert(!index_cmds[1].has_key?('key1')) assert(index_cmds[1].has_key?('key2')) end def test_remove_multi_keys driver.configure("remove_keys key1, key2\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record.merge('key1' => 'v1', 'key2' => 'v2')) end assert(!index_cmds[1].has_key?('key1')) assert(!index_cmds[1].has_key?('key2')) end def test_request_error stub_elastic_unavailable assert_raise(Fluent::Plugin::ElasticsearchOutput::RecoverableRequestFailure) { driver.run(default_tag: 'test', shutdown: false) do driver.feed(sample_record) end } end def test_request_forever omit("retry_forever test is unstable.") if ENV["CI"] stub_elastic driver.configure(Fluent::Config::Element.new( 'ROOT', '', { '@type' => 'elasticsearch', }, [ Fluent::Config::Element.new('buffer', '', { 'retry_forever' => true }, []) ] )) stub_elastic_timeout assert_raise(Timeout::Error) { driver.run(default_tag: 'test', timeout: 10, force_flush_retry: true) do driver.feed(sample_record) end } end def test_connection_failed connection_resets = 0 stub_request(:post, "http://localhost:9200/_bulk").with do |req| connection_resets += 1 raise Faraday::ConnectionFailed, "Test message" end assert_raise(Fluent::Plugin::ElasticsearchOutput::RecoverableRequestFailure) { driver.run(default_tag: 'test', shutdown: false) do driver.feed(sample_record) end } assert_equal(connection_resets, 1) end def test_reconnect_on_error_enabled connection_resets = 0 stub_request(:post, "http://localhost:9200/_bulk").with do |req| connection_resets += 1 raise ZeroDivisionError, "any not host_unreachable_exceptions exception" end driver.configure("reconnect_on_error true\n") assert_raise(Fluent::Plugin::ElasticsearchOutput::RecoverableRequestFailure) { driver.run(default_tag: 'test', shutdown: false) do driver.feed(sample_record) end } assert_raise(Timeout::Error) { driver.run(default_tag: 'test', shutdown: false) do driver.feed(sample_record) end } # FIXME: Consider keywords arguments in #run and how to test this later. # Because v0.14 test driver does not have 1 to 1 correspondence between #run and #flush in tests. assert_equal(connection_resets, 1) end def test_reconnect_on_error_disabled connection_resets = 0 stub_request(:post, "http://localhost:9200/_bulk").with do |req| connection_resets += 1 raise ZeroDivisionError, "any not host_unreachable_exceptions exception" end driver.configure("reconnect_on_error false\n") assert_raise(Fluent::Plugin::ElasticsearchOutput::RecoverableRequestFailure) { driver.run(default_tag: 'test', shutdown: false) do driver.feed(sample_record) end } assert_raise(Timeout::Error) { driver.run(default_tag: 'test', shutdown: false) do driver.feed(sample_record) end } assert_equal(connection_resets, 1) end def test_bulk_error_retags_when_configured driver.configure("retry_tag retry\n") stub_request(:post, 'http://localhost:9200/_bulk') .to_return(lambda do |req| { :status => 200, :headers => { 'Content-Type' => 'json' }, :body => %({ "took" : 1, "errors" : true, "items" : [ { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "abc", "status" : 500, "error" : { "type" : "some unrecognized type", "reason":"some error to cause version mismatch" } } } ] }) } end) driver.run(default_tag: 'test') do driver.feed(1, sample_record) end assert_equal [['retry', 1, sample_record]], driver.events end def test_create_should_write_records_with_ids_and_skip_those_without driver.configure("write_operation create\nid_key my_id\n@log_level debug") stub_request(:post, 'http://localhost:9200/_bulk') .to_return(lambda do |req| { :status => 200, :headers => { 'Content-Type' => 'json' }, :body => %({ "took" : 1, "errors" : true, "items" : [ { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "abc" } }, { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "xyz", "status" : 500, "error" : { "type" : "some unrecognized type", "reason":"some error to cause version mismatch" } } } ] }) } end) sample_record1 = sample_record('my_id' => 'abc') sample_record4 = sample_record('my_id' => 'xyz') driver.run(default_tag: 'test') do driver.feed(1, sample_record1) driver.feed(2, sample_record) driver.feed(3, sample_record) driver.feed(4, sample_record4) end logs = driver.logs # one record succeeded while the other should be 'retried' assert_equal [['test', 4, sample_record4]], driver.events assert_logs_include(logs, /(Dropping record)/, 2) end def test_create_should_write_records_with_ids_and_emit_those_without driver.configure("write_operation create\nid_key my_id\nemit_error_for_missing_id true\n@log_level debug") stub_request(:post, 'http://localhost:9200/_bulk') .to_return(lambda do |req| { :status => 200, :headers => { 'Content-Type' => 'json' }, :body => %({ "took" : 1, "errors" : true, "items" : [ { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "abc" } }, { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "xyz", "status" : 500, "error" : { "type" : "some unrecognized type", "reason":"some error to cause version mismatch" } } } ] }) } end) sample_record1 = sample_record('my_id' => 'abc') sample_record4 = sample_record('my_id' => 'xyz') driver.run(default_tag: 'test') do driver.feed(1, sample_record1) driver.feed(2, sample_record) driver.feed(3, sample_record) driver.feed(4, sample_record4) end error_log = driver.error_events.map {|e| e.last.message } # one record succeeded while the other should be 'retried' assert_equal [['test', 4, sample_record4]], driver.events assert_logs_include(error_log, /(Missing '_id' field)/, 2) end def test_bulk_error stub_request(:post, 'http://localhost:9200/_bulk') .to_return(lambda do |req| { :status => 200, :headers => { 'Content-Type' => 'json' }, :body => %({ "took" : 1, "errors" : true, "items" : [ { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "abc", "status" : 500, "error" : { "type" : "some unrecognized type", "reason":"some error to cause version mismatch" } } }, { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "abc", "status" : 201 } }, { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "abc", "status" : 500, "error" : { "type" : "some unrecognized type", "reason":"some error to cause version mismatch" } } }, { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "abc", "_id" : "abc", "status" : 409 } } ] }) } end) driver.run(default_tag: 'test') do driver.feed(1, sample_record) driver.feed(2, sample_record) driver.feed(3, sample_record) driver.feed(4, sample_record) end expect = [['test', 1, sample_record], ['test', 3, sample_record]] assert_equal expect, driver.events end def test_update_should_not_write_if_theres_no_id driver.configure("write_operation update\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_nil(index_cmds) end def test_upsert_should_not_write_if_theres_no_id driver.configure("write_operation upsert\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_nil(index_cmds) end def test_create_should_not_write_if_theres_no_id driver.configure("write_operation create\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_nil(index_cmds) end def test_update_should_write_update_op_and_doc_as_upsert_is_false driver.configure("write_operation update id_key request_id") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(index_cmds[0].has_key?("update")) assert(!index_cmds[1]["doc_as_upsert"]) assert(!index_cmds[1]["upsert"]) end def test_update_should_remove_keys_from_doc_when_keys_are_skipped driver.configure("write_operation update id_key request_id remove_keys_on_update parent_id") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(index_cmds[1]["doc"]) assert(!index_cmds[1]["doc"]["parent_id"]) end def test_upsert_should_write_update_op_and_doc_as_upsert_is_true driver.configure("write_operation upsert id_key request_id") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(index_cmds[0].has_key?("update")) assert(index_cmds[1]["doc_as_upsert"]) assert(!index_cmds[1]["upsert"]) end def test_upsert_should_write_update_op_upsert_and_doc_when_keys_are_skipped driver.configure("write_operation upsert id_key request_id remove_keys_on_update parent_id") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(index_cmds[0].has_key?("update")) assert(!index_cmds[1]["doc_as_upsert"]) assert(index_cmds[1]["upsert"]) assert(index_cmds[1]["doc"]) end def test_upsert_should_remove_keys_from_doc_when_keys_are_skipped driver.configure("write_operation upsert id_key request_id remove_keys_on_update parent_id") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(index_cmds[1]["upsert"] != index_cmds[1]["doc"]) assert(!index_cmds[1]["doc"]["parent_id"]) assert(index_cmds[1]["upsert"]["parent_id"]) end def test_upsert_should_remove_multiple_keys_when_keys_are_skipped driver.configure("write_operation upsert id_key id remove_keys_on_update foo,baz") stub_elastic driver.run(default_tag: 'test') do driver.feed("id" => 1, "foo" => "bar", "baz" => "quix", "zip" => "zam") end assert( index_cmds[1]["doc"] == { "id" => 1, "zip" => "zam", } ) assert( index_cmds[1]["upsert"] == { "id" => 1, "foo" => "bar", "baz" => "quix", "zip" => "zam", } ) end def test_upsert_should_remove_keys_from_when_the_keys_are_in_the_record driver.configure("write_operation upsert id_key id remove_keys_on_update_key keys_to_skip") stub_elastic driver.run(default_tag: 'test') do driver.feed("id" => 1, "foo" => "bar", "baz" => "quix", "keys_to_skip" => ["baz"]) end assert( index_cmds[1]["doc"] == { "id" => 1, "foo" => "bar", } ) assert( index_cmds[1]["upsert"] == { "id" => 1, "foo" => "bar", "baz" => "quix", } ) end def test_upsert_should_remove_keys_from_key_on_record_has_higher_presedence_than_config driver.configure("write_operation upsert id_key id remove_keys_on_update foo,bar remove_keys_on_update_key keys_to_skip") stub_elastic driver.run(default_tag: 'test') do driver.feed("id" => 1, "foo" => "bar", "baz" => "quix", "keys_to_skip" => ["baz"]) end assert( index_cmds[1]["doc"] == { "id" => 1, # we only expect baz to be stripped here, if the config was more important # foo would be stripped too. "foo" => "bar", } ) assert( index_cmds[1]["upsert"] == { "id" => 1, "foo" => "bar", "baz" => "quix", } ) end def test_create_should_write_create_op driver.configure("write_operation create id_key request_id") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(index_cmds[0].has_key?("create")) end def test_include_index_in_url stub_elastic('http://localhost:9200/logstash-2018.01.01/_bulk') driver.configure("index_name logstash-2018.01.01 include_index_in_url true") driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(index_cmds.length, 2) assert_equal(index_cmds.first['index']['_index'], nil) end def test_use_simple_sniffer require 'fluent/plugin/elasticsearch_simple_sniffer' stub_elastic_info stub_elastic config = %[ sniffer_class_name Fluent::Plugin::ElasticsearchSimpleSniffer log_level debug with_transporter_log true reload_connections true reload_after 1 ] driver(config, nil) driver.run(default_tag: 'test') do driver.feed(sample_record) end log = driver.logs # 2 or 3 - one for the ping, one for the _bulk, (and client.info) assert_logs_include_compare_size(3, ">", log, /In Fluent::Plugin::ElasticsearchSimpleSniffer hosts/) assert_logs_include_compare_size(1, "<=", log, /In Fluent::Plugin::ElasticsearchSimpleSniffer hosts/) end end Fix testcase for retryable #detect_es_major_version require 'helper' require 'date' require 'fluent/test/helpers' require 'json' require 'fluent/test/driver/output' require 'flexmock/test_unit' class ElasticsearchOutput < Test::Unit::TestCase include FlexMock::TestCase include Fluent::Test::Helpers attr_accessor :index_cmds, :index_command_counts def setup Fluent::Test.setup require 'fluent/plugin/out_elasticsearch' @driver = nil log = Fluent::Engine.log log.out.logs.slice!(0, log.out.logs.length) end def driver(conf='', es_version=5, client_version="\"5.0\"") # For request stub to detect compatibility. @es_version ||= es_version @client_version ||= client_version if @es_version Fluent::Plugin::ElasticsearchOutput.module_eval(<<-CODE) def detect_es_major_version #{@es_version} end CODE end Fluent::Plugin::ElasticsearchOutput.module_eval(<<-CODE) def client_library_version #{@client_version} end CODE @driver ||= Fluent::Test::Driver::Output.new(Fluent::Plugin::ElasticsearchOutput) { # v0.12's test driver assume format definition. This simulates ObjectBufferedOutput format if !defined?(Fluent::Plugin::Output) def format(tag, time, record) [time, record].to_msgpack end end }.configure(conf) end def default_type_name Fluent::Plugin::ElasticsearchOutput::DEFAULT_TYPE_NAME end def sample_record(content={}) {'age' => 26, 'request_id' => '42', 'parent_id' => 'parent', 'routing_id' => 'routing'}.merge(content) end def nested_sample_record {'nested' => {'age' => 26, 'parent_id' => 'parent', 'routing_id' => 'routing', 'request_id' => '42'} } end def stub_elastic_info(url="http://localhost:9200/", version="6.4.2") body ="{\"version\":{\"number\":\"#{version}\"}}" stub_request(:get, url).to_return({:status => 200, :body => body, :headers => { 'Content-Type' => 'json' } }) end def stub_elastic(url="http://localhost:9200/_bulk") stub_request(:post, url).with do |req| @index_cmds = req.body.split("\n").map {|r| JSON.parse(r) } end end def stub_elastic_unavailable(url="http://localhost:9200/_bulk") stub_request(:post, url).to_return(:status => [503, "Service Unavailable"]) end def stub_elastic_timeout(url="http://localhost:9200/_bulk") stub_request(:post, url).to_timeout end def stub_elastic_with_store_index_command_counts(url="http://localhost:9200/_bulk") if @index_command_counts == nil @index_command_counts = {} @index_command_counts.default = 0 end stub_request(:post, url).with do |req| index_cmds = req.body.split("\n").map {|r| JSON.parse(r) } @index_command_counts[url] += index_cmds.size end end def make_response_body(req, error_el = nil, error_status = nil, error = nil) req_index_cmds = req.body.split("\n").map { |r| JSON.parse(r) } items = [] count = 0 ids = 1 op = nil index = nil type = nil id = nil req_index_cmds.each do |cmd| if count.even? op = cmd.keys[0] index = cmd[op]['_index'] type = cmd[op]['_type'] if cmd[op].has_key?('_id') id = cmd[op]['_id'] else # Note: this appears to be an undocumented feature of Elasticsearch # https://www.elastic.co/guide/en/elasticsearch/reference/2.4/docs-bulk.html # When you submit an "index" write_operation, with no "_id" field in the # metadata header, Elasticsearch will turn this into a "create" # operation in the response. if "index" == op op = "create" end id = ids ids += 1 end else item = { op => { '_index' => index, '_type' => type, '_id' => id, '_version' => 1, '_shards' => { 'total' => 1, 'successful' => 1, 'failed' => 0 }, 'status' => op == 'create' ? 201 : 200 } } items.push(item) end count += 1 end if !error_el.nil? && !error_status.nil? && !error.nil? op = items[error_el].keys[0] items[error_el][op].delete('_version') items[error_el][op].delete('_shards') items[error_el][op]['error'] = error items[error_el][op]['status'] = error_status errors = true else errors = false end @index_cmds = items body = { 'took' => 6, 'errors' => errors, 'items' => items } return body.to_json end def stub_elastic_bad_argument(url="http://localhost:9200/_bulk") error = { "type" => "mapper_parsing_exception", "reason" => "failed to parse [...]", "caused_by" => { "type" => "illegal_argument_exception", "reason" => "Invalid format: \"...\"" } } stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 1, 400, error), :headers => { 'Content-Type' => 'json' } } }) end def stub_elastic_bulk_error(url="http://localhost:9200/_bulk") error = { "type" => "some-unrecognized-error", "reason" => "some message printed here ...", } stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 1, 500, error), :headers => { 'Content-Type' => 'json' } } }) end def stub_elastic_bulk_rejected(url="http://localhost:9200/_bulk") error = { "status" => 500, "type" => "es_rejected_execution_exception", "reason" => "rejected execution of org.elasticsearch.transport.TransportService$4@1a34d37a on EsThreadPoolExecutor[bulk, queue capacity = 50, org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor@312a2162[Running, pool size = 32, active threads = 32, queued tasks = 50, completed tasks = 327053]]" } stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 1, 429, error), :headers => { 'Content-Type' => 'json' } } }) end def stub_elastic_out_of_memory(url="http://localhost:9200/_bulk") error = { "status" => 500, "type" => "out_of_memory_error", "reason" => "Java heap space" } stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 1, 500, error), :headers => { 'Content-Type' => 'json' } } }) end def stub_elastic_unexpected_response_op(url="http://localhost:9200/_bulk") error = { "category" => "some-other-type", "reason" => "some-other-reason" } stub_request(:post, url).to_return(lambda { |req| bodystr = make_response_body(req, 0, 500, error); body = JSON.parse(bodystr); body['items'][0]['unknown'] = body['items'][0].delete('create'); { :status => 200, :body => body.to_json, :headers => { 'Content-Type' => 'json' } } }) end def assert_logs_include(logs, msg, exp_matches=1) matches = logs.grep /#{msg}/ assert_equal(exp_matches, matches.length, "Logs do not contain '#{msg}' '#{logs}'") end def assert_logs_include_compare_size(exp_matches=1, operator="<=", logs="", msg="") matches = logs.grep /#{msg}/ assert_compare(exp_matches, operator, matches.length, "Logs do not contain '#{msg}' '#{logs}'") end def test_configure config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe } instance = driver(config).instance assert_equal 'logs.google.com', instance.host assert_equal 777, instance.port assert_equal :https, instance.scheme assert_equal '/es/', instance.path assert_equal 'john', instance.user assert_equal 'doe', instance.password assert_equal :TLSv1, instance.ssl_version assert_nil instance.client_key assert_nil instance.client_cert assert_nil instance.client_key_pass assert_false instance.with_transporter_log assert_equal :"application/json", instance.content_type assert_equal "fluentd", default_type_name assert_equal :excon, instance.http_backend assert_false instance.prefer_oj_serializer assert_equal ["out_of_memory_error", "es_rejected_execution_exception"], instance.unrecoverable_error_types assert_true instance.verify_es_version_at_startup assert_equal Fluent::Plugin::ElasticsearchOutput::DEFAULT_ELASTICSEARCH_VERSION, instance.default_elasticsearch_version assert_false instance.log_es_400_reason end test 'configure Content-Type' do config = %{ content_type application/x-ndjson } instance = driver(config).instance assert_equal :"application/x-ndjson", instance.content_type end test 'invalid Content-Type' do config = %{ content_type nonexistent/invalid } assert_raise(Fluent::ConfigError) { instance = driver(config).instance } end test 'invalid specification of times of retrying template installation' do config = %{ max_retry_putting_template -3 } assert_raise(Fluent::ConfigError) { instance = driver(config).instance } end test 'Detected Elasticsearch 7' do config = %{ type_name changed } instance = driver(config, 7).instance assert_equal '_doc', instance.type_name end test 'Detected Elasticsearch 6 and insecure security' do config = %{ ssl_version TLSv1_1 @log_level warn scheme https } instance = driver(config, 6).instance logs = driver.logs assert_logs_include(logs, /Detected ES 6.x or above and enabled insecure security/, 1) end test 'Detected Elasticsearch 7 and secure security' do config = %{ ssl_version TLSv1_2 @log_level warn scheme https } instance = driver(config, 7).instance logs = driver.logs assert_logs_include(logs, /Detected ES 6.x or above and enabled insecure security/, 0) end test 'Pass Elasticsearch and client library are same' do config = %{ @log_level warn validate_client_version true } assert_nothing_raised do driver(config, 6, "\"6.1.0\"").instance end end test 'Detected Elasticsearch and client library mismatch' do config = %{ @log_level warn validate_client_version true } assert_raise_message(/Detected ES 7 but you use ES client 5.0/) do driver(config, 7, "\"5.0.5\"").instance end end test 'lack of tag in chunk_keys' do assert_raise_message(/'tag' in chunk_keys is required./) do driver(Fluent::Config::Element.new( 'ROOT', '', { '@type' => 'elasticsearch', 'host' => 'log.google.com', 'port' => 777, 'scheme' => 'https', 'path' => '/es/', 'user' => 'john', 'pasword' => 'doe', }, [ Fluent::Config::Element.new('buffer', 'mykey', { 'chunk_keys' => 'mykey' }, []) ] )) end end sub_test_case 'connection exceptions' do test 'default connection exception' do driver(Fluent::Config::Element.new( 'ROOT', '', { '@type' => 'elasticsearch', 'host' => 'log.google.com', 'port' => 777, 'scheme' => 'https', 'path' => '/es/', 'user' => 'john', 'pasword' => 'doe', }, [ Fluent::Config::Element.new('buffer', 'tag', { }, []) ] )) logs = driver.logs assert_logs_include(logs, /you should specify 2 or more 'flush_thread_count'/, 1) end end class GetElasticsearchVersionTest < self def create_driver(conf='', client_version="\"5.0\"") # For request stub to detect compatibility. @client_version ||= client_version # Ensure original implementation existence. Fluent::Plugin::ElasticsearchOutput.module_eval(<<-CODE) def detect_es_major_version @_es_info ||= client.info @_es_info["version"]["number"].to_i end CODE Fluent::Plugin::ElasticsearchOutput.module_eval(<<-CODE) def client_library_version #{@client_version} end CODE Fluent::Test::Driver::Output.new(Fluent::Plugin::ElasticsearchOutput).configure(conf) end def test_retry_get_es_version config = %{ host logs.google.com port 778 scheme https path /es/ user john password doe verify_es_version_at_startup true max_retry_get_es_version 3 } connection_resets = 0 stub_request(:get, "https://john:doe@logs.google.com:778/es//").with do |req| connection_resets += 1 raise Faraday::ConnectionFailed, "Test message" end assert_raise(Fluent::Plugin::ElasticsearchError::RetryableOperationExhaustedFailure) do create_driver(config) end assert_equal(connection_resets, 4) end end def test_template_already_present config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name logstash template_file /abc123 } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_not_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash") end def test_template_create cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name logstash template_file #{template_file} } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 404, :body => "", :headers => {}) # creation stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash", times: 1) end def test_custom_template_create cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_alias_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name myapp_alias_template template_file #{template_file} customize_template {"--appid--": "myapp-logs","--index_prefix--":"mylogs"} } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 404, :body => "", :headers => {}) # creation stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template", times: 1) end def test_custom_template_with_rollover_index_create cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_alias_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name myapp_alias_template template_file #{template_file} customize_template {"--appid--": "myapp-logs","--index_prefix--":"mylogs"} rollover_index true index_date_pattern now/w{xxxx.ww} deflector_alias myapp_deflector index_prefix mylogs application_name myapp } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 404, :body => "", :headers => {}) # creation stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 200, :body => "", :headers => {}) # creation of index which can rollover stub_request(:put, "https://john:doe@logs.google.com:777/es//%3Cmylogs-myapp-%7Bnow%2Fw%7Bxxxx.ww%7D%7D-000001%3E"). to_return(:status => 200, :body => "", :headers => {}) # check if alias exists stub_request(:head, "https://john:doe@logs.google.com:777/es//_alias/myapp_deflector"). to_return(:status => 404, :body => "", :headers => {}) # put the alias for the index stub_request(:put, "https://john:doe@logs.google.com:777/es//%3Cmylogs-myapp-%7Bnow%2Fw%7Bxxxx.ww%7D%7D-000001%3E/_alias/myapp_deflector"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template", times: 1) end def test_template_overwrite cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name logstash template_file #{template_file} template_overwrite true } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 200, :body => "", :headers => {}) # creation stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash", times: 1) end def test_custom_template_overwrite cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name myapp_alias_template template_file #{template_file} template_overwrite true customize_template {"--appid--": "myapp-logs","--index_prefix--":"mylogs"} } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 200, :body => "", :headers => {}) # creation stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template", times: 1) end def test_custom_template_with_rollover_index_overwrite cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name myapp_alias_template template_file #{template_file} template_overwrite true customize_template {"--appid--": "myapp-logs","--index_prefix--":"mylogs"} deflector_alias myapp_deflector rollover_index true index_prefix mylogs application_name myapp } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 200, :body => "", :headers => {}) # creation stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template"). to_return(:status => 200, :body => "", :headers => {}) # creation of index which can rollover stub_request(:put, "https://john:doe@logs.google.com:777/es//%3Cmylogs-myapp-%7Bnow%2Fd%7D-000001%3E"). to_return(:status => 200, :body => "", :headers => {}) # check if alias exists stub_request(:head, "https://john:doe@logs.google.com:777/es//_alias/myapp_deflector"). to_return(:status => 404, :body => "", :headers => {}) # put the alias for the index stub_request(:put, "https://john:doe@logs.google.com:777/es//%3Cmylogs-myapp-%7Bnow%2Fd%7D-000001%3E/_alias/myapp_deflector"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/myapp_alias_template", times: 1) end def test_template_create_invalid_filename config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name logstash template_file /abc123 } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 404, :body => "", :headers => {}) assert_raise(RuntimeError) { driver(config) } end def test_template_retry_install cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 778 scheme https path /es/ user john password doe template_name logstash template_file #{template_file} max_retry_putting_template 3 } connection_resets = 0 # check if template exists stub_request(:get, "https://john:doe@logs.google.com:778/es//_template/logstash").with do |req| connection_resets += 1 raise Faraday::ConnectionFailed, "Test message" end assert_raise(Fluent::Plugin::ElasticsearchError::RetryableOperationExhaustedFailure) do driver(config) end assert_equal(connection_resets, 4) end def test_templates_create cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe templates {"logstash1":"#{template_file}", "logstash2":"#{template_file}","logstash3":"#{template_file}" } } stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 404, :body => "", :headers => {}) stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 404, :body => "", :headers => {}) stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash3"). to_return(:status => 200, :body => "", :headers => {}) #exists stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash3"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested( :put, "https://john:doe@logs.google.com:777/es//_template/logstash1", times: 1) assert_requested( :put, "https://john:doe@logs.google.com:777/es//_template/logstash2", times: 1) assert_not_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash3") #exists end def test_templates_overwrite cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe templates {"logstash1":"#{template_file}", "logstash2":"#{template_file}","logstash3":"#{template_file}" } template_overwrite true } stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash3"). to_return(:status => 200, :body => "", :headers => {}) #exists stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash3"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash1", times: 1) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash2", times: 1) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash3", times: 1) end def test_templates_not_used cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe template_name logstash template_file #{template_file} templates {"logstash1":"#{template_file}", "logstash2":"#{template_file}" } } # connection start stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 404, :body => "", :headers => {}) stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 404, :body => "", :headers => {}) stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 404, :body => "", :headers => {}) #creation stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 200, :body => "", :headers => {}) driver(config) assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash", times: 1) assert_not_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash1") assert_not_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash2") end def test_templates_can_be_partially_created_if_error_occurs cwd = File.dirname(__FILE__) template_file = File.join(cwd, 'test_template.json') config = %{ host logs.google.com port 777 scheme https path /es/ user john password doe templates {"logstash1":"#{template_file}", "logstash2":"/abc" } } stub_request(:head, "https://john:doe@logs.google.com:777/es//"). to_return(:status => 200, :body => "", :headers => {}) # check if template exists stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 404, :body => "", :headers => {}) stub_request(:get, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 404, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash1"). to_return(:status => 200, :body => "", :headers => {}) stub_request(:put, "https://john:doe@logs.google.com:777/es//_template/logstash2"). to_return(:status => 200, :body => "", :headers => {}) assert_raise(RuntimeError) { driver(config) } assert_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash1", times: 1) assert_not_requested(:put, "https://john:doe@logs.google.com:777/es//_template/logstash2") end def test_legacy_hosts_list config = %{ hosts host1:50,host2:100,host3 scheme https path /es/ port 123 } instance = driver(config).instance assert_equal 3, instance.get_connection_options[:hosts].length host1, host2, host3 = instance.get_connection_options[:hosts] assert_equal 'host1', host1[:host] assert_equal 50, host1[:port] assert_equal 'https', host1[:scheme] assert_equal '/es/', host2[:path] assert_equal 'host3', host3[:host] assert_equal 123, host3[:port] assert_equal 'https', host3[:scheme] assert_equal '/es/', host3[:path] end def test_hosts_list config = %{ hosts https://john:password@host1:443/elastic/,http://host2 path /default_path user default_user password default_password } instance = driver(config).instance assert_equal 2, instance.get_connection_options[:hosts].length host1, host2 = instance.get_connection_options[:hosts] assert_equal 'host1', host1[:host] assert_equal 443, host1[:port] assert_equal 'https', host1[:scheme] assert_equal 'john', host1[:user] assert_equal 'password', host1[:password] assert_equal '/elastic/', host1[:path] assert_equal 'host2', host2[:host] assert_equal 'http', host2[:scheme] assert_equal 'default_user', host2[:user] assert_equal 'default_password', host2[:password] assert_equal '/default_path', host2[:path] end def test_hosts_list_with_escape_placeholders config = %{ hosts https://%{j+hn}:%{passw@rd}@host1:443/elastic/,http://host2 path /default_path user default_user password default_password } instance = driver(config).instance assert_equal 2, instance.get_connection_options[:hosts].length host1, host2 = instance.get_connection_options[:hosts] assert_equal 'host1', host1[:host] assert_equal 443, host1[:port] assert_equal 'https', host1[:scheme] assert_equal 'j%2Bhn', host1[:user] assert_equal 'passw%40rd', host1[:password] assert_equal '/elastic/', host1[:path] assert_equal 'host2', host2[:host] assert_equal 'http', host2[:scheme] assert_equal 'default_user', host2[:user] assert_equal 'default_password', host2[:password] assert_equal '/default_path', host2[:path] end def test_single_host_params_and_defaults config = %{ host logs.google.com user john password doe } instance = driver(config).instance assert_equal 1, instance.get_connection_options[:hosts].length host1 = instance.get_connection_options[:hosts][0] assert_equal 'logs.google.com', host1[:host] assert_equal 9200, host1[:port] assert_equal 'http', host1[:scheme] assert_equal 'john', host1[:user] assert_equal 'doe', host1[:password] assert_equal nil, host1[:path] end def test_single_host_params_and_defaults_with_escape_placeholders config = %{ host logs.google.com user %{j+hn} password %{d@e} } instance = driver(config).instance assert_equal 1, instance.get_connection_options[:hosts].length host1 = instance.get_connection_options[:hosts][0] assert_equal 'logs.google.com', host1[:host] assert_equal 9200, host1[:port] assert_equal 'http', host1[:scheme] assert_equal 'j%2Bhn', host1[:user] assert_equal 'd%40e', host1[:password] assert_equal nil, host1[:path] end def test_content_type_header stub_request(:head, "http://localhost:9200/"). to_return(:status => 200, :body => "", :headers => {}) if Elasticsearch::VERSION >= "6.0.2" elastic_request = stub_request(:post, "http://localhost:9200/_bulk"). with(headers: { "Content-Type" => "application/x-ndjson" }) else elastic_request = stub_request(:post, "http://localhost:9200/_bulk"). with(headers: { "Content-Type" => "application/json" }) end driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_requested(elastic_request) end def test_write_message_with_bad_chunk driver.configure("target_index_key bad_value\n@log_level debug\n") stub_elastic driver.run(default_tag: 'test') do driver.feed({'bad_value'=>"\255"}) end error_log = driver.error_events.map {|e| e.last.message } assert_logs_include(error_log, /(input string invalid)|(invalid byte sequence in UTF-8)/) end def test_writes_to_default_index stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal('fluentd', index_cmds.first['index']['_index']) end def test_writes_to_default_type stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(default_type_name, index_cmds.first['index']['_type']) end def test_writes_to_speficied_index driver.configure("index_name myindex\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal('myindex', index_cmds.first['index']['_index']) end class IndexNamePlaceholdersTest < self def test_writes_to_speficied_index_with_tag_placeholder driver.configure("index_name myindex.${tag}\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal('myindex.test', index_cmds.first['index']['_index']) end def test_writes_to_speficied_index_with_time_placeholder driver.configure(Fluent::Config::Element.new( 'ROOT', '', { '@type' => 'elasticsearch', 'index_name' => 'myindex.%Y.%m.%d', }, [ Fluent::Config::Element.new('buffer', 'tag,time', { 'chunk_keys' => ['tag', 'time'], 'timekey' => 3600, }, []) ] )) stub_elastic time = Time.parse Date.today.iso8601 driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal("myindex.#{time.utc.strftime("%Y.%m.%d")}", index_cmds.first['index']['_index']) end def test_writes_to_speficied_index_with_custom_key_placeholder driver.configure(Fluent::Config::Element.new( 'ROOT', '', { '@type' => 'elasticsearch', 'index_name' => 'myindex.${pipeline_id}', }, [ Fluent::Config::Element.new('buffer', 'tag,pipeline_id', {}, []) ] )) time = Time.parse Date.today.iso8601 pipeline_id = "mypipeline" logstash_index = "myindex.#{pipeline_id}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record.merge({"pipeline_id" => pipeline_id})) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end end def test_writes_to_speficied_index_uppercase driver.configure("index_name MyIndex\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end # Allthough index_name has upper-case characters, # it should be set as lower-case when sent to elasticsearch. assert_equal('myindex', index_cmds.first['index']['_index']) end def test_writes_to_target_index_key driver.configure("target_index_key @target_index\n") stub_elastic record = sample_record.clone driver.run(default_tag: 'test') do driver.feed(sample_record.merge('@target_index' => 'local-override')) end assert_equal('local-override', index_cmds.first['index']['_index']) assert_nil(index_cmds[1]['@target_index']) end def test_writes_to_target_index_key_logstash driver.configure("target_index_key @target_index logstash_format true") time = Time.parse Date.today.iso8601 stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record.merge('@target_index' => 'local-override')) end assert_equal('local-override', index_cmds.first['index']['_index']) end def test_writes_to_target_index_key_logstash_uppercase driver.configure("target_index_key @target_index logstash_format true") time = Time.parse Date.today.iso8601 stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record.merge('@target_index' => 'local-override')) end # Allthough @target_index has upper-case characters, # it should be set as lower-case when sent to elasticsearch. assert_equal('local-override', index_cmds.first['index']['_index']) end def test_writes_to_default_index_with_pipeline pipeline = "fluentd" driver.configure("pipeline #{pipeline}") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(pipeline, index_cmds.first['index']['pipeline']) end def test_writes_to_target_index_key_fallack driver.configure("target_index_key @target_index\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal('fluentd', index_cmds.first['index']['_index']) end def test_writes_to_target_index_key_fallack_logstash driver.configure("target_index_key @target_index\n logstash_format true") time = Time.parse Date.today.iso8601 logstash_index = "logstash-#{time.getutc.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end data("border" => {"es_version" => 6, "_type" => "mytype"}, "fixed_behavior"=> {"es_version" => 7, "_type" => "_doc"}, ) def test_writes_to_speficied_type(data) driver('', data["es_version"]).configure("type_name mytype\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(data['_type'], index_cmds.first['index']['_type']) end data("border" => {"es_version" => 6, "_type" => "mytype.test"}, "fixed_behavior"=> {"es_version" => 7, "_type" => "_doc"}, ) def test_writes_to_speficied_type_with_placeholders(data) driver('', data["es_version"]).configure("type_name mytype.${tag}\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(data['_type'], index_cmds.first['index']['_type']) end data("old" => {"es_version" => 2, "_type" => "local-override"}, "old_behavior" => {"es_version" => 5, "_type" => "local-override"}, "border" => {"es_version" => 6, "_type" => "fluentd"}, "fixed_behavior"=> {"es_version" => 7, "_type" => "_doc"}, ) def test_writes_to_target_type_key(data) driver('', data["es_version"]).configure("target_type_key @target_type\n") stub_elastic record = sample_record.clone driver.run(default_tag: 'test') do driver.feed(sample_record.merge('@target_type' => 'local-override')) end assert_equal(data["_type"], index_cmds.first['index']['_type']) assert_nil(index_cmds[1]['@target_type']) end def test_writes_to_target_type_key_fallack_to_default driver.configure("target_type_key @target_type\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(default_type_name, index_cmds.first['index']['_type']) end def test_writes_to_target_type_key_fallack_to_type_name driver.configure("target_type_key @target_type type_name mytype") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal('mytype', index_cmds.first['index']['_type']) end data("old" => {"es_version" => 2, "_type" => "local-override"}, "old_behavior" => {"es_version" => 5, "_type" => "local-override"}, "border" => {"es_version" => 6, "_type" => "fluentd"}, "fixed_behavior"=> {"es_version" => 7, "_type" => "_doc"}, ) def test_writes_to_target_type_key_nested(data) driver('', data["es_version"]).configure("target_type_key kubernetes.labels.log_type\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record.merge('kubernetes' => { 'labels' => { 'log_type' => 'local-override' } })) end assert_equal(data["_type"], index_cmds.first['index']['_type']) assert_nil(index_cmds[1]['kubernetes']['labels']['log_type']) end def test_writes_to_target_type_key_fallack_to_default_nested driver.configure("target_type_key kubernetes.labels.log_type\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record.merge('kubernetes' => { 'labels' => { 'other_labels' => 'test' } })) end assert_equal(default_type_name, index_cmds.first['index']['_type']) end def test_writes_to_speficied_host driver.configure("host 192.168.33.50\n") elastic_request = stub_elastic("http://192.168.33.50:9200/_bulk") driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_requested(elastic_request) end def test_writes_to_speficied_port driver.configure("port 9201\n") elastic_request = stub_elastic("http://localhost:9201/_bulk") driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_requested(elastic_request) end def test_writes_to_multi_hosts hosts = [['192.168.33.50', 9201], ['192.168.33.51', 9201], ['192.168.33.52', 9201]] hosts_string = hosts.map {|x| "#{x[0]}:#{x[1]}"}.compact.join(',') driver.configure("hosts #{hosts_string}") hosts.each do |host_info| host, port = host_info stub_elastic_with_store_index_command_counts("http://#{host}:#{port}/_bulk") end driver.run(default_tag: 'test') do 1000.times do driver.feed(sample_record.merge('age'=>rand(100))) end end # @note: we cannot make multi chunks with options (flush_interval, buffer_chunk_limit) # it's Fluentd test driver's constraint # so @index_command_counts.size is always 1 assert(@index_command_counts.size > 0, "not working with hosts options") total = 0 @index_command_counts.each do |url, count| total += count end assert_equal(2000, total) end def test_nested_record_with_flattening_on driver.configure("flatten_hashes true flatten_hashes_separator |") original_hash = {"foo" => {"bar" => "baz"}, "people" => [ {"age" => "25", "height" => "1ft"}, {"age" => "30", "height" => "2ft"} ]} expected_output = {"foo|bar"=>"baz", "people" => [ {"age" => "25", "height" => "1ft"}, {"age" => "30", "height" => "2ft"} ]} stub_elastic driver.run(default_tag: 'test') do driver.feed(original_hash) end assert_equal expected_output, index_cmds[1] end def test_nested_record_with_flattening_off # flattening off by default original_hash = {"foo" => {"bar" => "baz"}} expected_output = {"foo" => {"bar" => "baz"}} stub_elastic driver.run(default_tag: 'test') do driver.feed(original_hash) end assert_equal expected_output, index_cmds[1] end def test_makes_bulk_request stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) driver.feed(sample_record.merge('age' => 27)) end assert_equal(4, index_cmds.count) end def test_all_records_are_preserved_in_bulk stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) driver.feed(sample_record.merge('age' => 27)) end assert_equal(26, index_cmds[1]['age']) assert_equal(27, index_cmds[3]['age']) end def test_writes_to_logstash_index driver.configure("logstash_format true\n") # # This is 1 second past midnight in BST, so the UTC index should be the day before dt = DateTime.new(2015, 6, 1, 0, 0, 1, "+01:00") logstash_index = "logstash-2015.05.31" stub_elastic driver.run(default_tag: 'test') do driver.feed(dt.to_time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end def test_writes_to_logstash_non_utc_index driver.configure("logstash_format true utc_index false") # When using `utc_index false` the index time will be the local day of # ingestion time time = Date.today.to_time index = "logstash-#{time.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(index, index_cmds.first['index']['_index']) end def test_writes_to_logstash_index_with_specified_prefix driver.configure("logstash_format true logstash_prefix myprefix") time = Time.parse Date.today.iso8601 logstash_index = "myprefix-#{time.getutc.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end def test_writes_to_logstash_index_with_specified_prefix_and_separator separator = '_' driver.configure("logstash_format true logstash_prefix_separator #{separator} logstash_prefix myprefix") time = Time.parse Date.today.iso8601 logstash_index = "myprefix#{separator}#{time.getutc.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end class LogStashPrefixPlaceholdersTest < self def test_writes_to_logstash_index_with_specified_prefix_and_tag_placeholder driver.configure("logstash_format true logstash_prefix myprefix-${tag}") time = Time.parse Date.today.iso8601 logstash_index = "myprefix-test-#{time.getutc.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end def test_writes_to_logstash_index_with_specified_prefix_and_time_placeholder driver.configure(Fluent::Config::Element.new( 'ROOT', '', { '@type' => 'elasticsearch', 'logstash_format' => true, 'logstash_prefix' => 'myprefix-%H', }, [ Fluent::Config::Element.new('buffer', 'tag,time', { 'chunk_keys' => ['tag', 'time'], 'timekey' => 3600, }, []) ] )) time = Time.parse Date.today.iso8601 logstash_index = "myprefix-#{time.getutc.strftime("%H")}-#{time.getutc.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end def test_writes_to_logstash_index_with_specified_prefix_and_custom_key_placeholder driver.configure(Fluent::Config::Element.new( 'ROOT', '', { '@type' => 'elasticsearch', 'logstash_format' => true, 'logstash_prefix' => 'myprefix-${pipeline_id}', }, [ Fluent::Config::Element.new('buffer', 'tag,pipeline_id', {}, []) ] )) time = Time.parse Date.today.iso8601 pipeline_id = "mypipeline" logstash_index = "myprefix-#{pipeline_id}-#{time.getutc.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record.merge({"pipeline_id" => pipeline_id})) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end end def test_writes_to_logstash_index_with_specified_prefix_uppercase driver.configure("logstash_format true logstash_prefix MyPrefix") time = Time.parse Date.today.iso8601 logstash_index = "myprefix-#{time.getutc.strftime("%Y.%m.%d")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end # Allthough logstash_prefix has upper-case characters, # it should be set as lower-case when sent to elasticsearch. assert_equal(logstash_index, index_cmds.first['index']['_index']) end def test_writes_to_logstash_index_with_specified_dateformat driver.configure("logstash_format true logstash_dateformat %Y.%m") time = Time.parse Date.today.iso8601 logstash_index = "logstash-#{time.getutc.strftime("%Y.%m")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end def test_writes_to_logstash_index_with_specified_prefix_and_dateformat driver.configure("logstash_format true logstash_prefix myprefix logstash_dateformat %Y.%m") time = Time.parse Date.today.iso8601 logstash_index = "myprefix-#{time.getutc.strftime("%Y.%m")}" stub_elastic driver.run(default_tag: 'test') do driver.feed(time.to_i, sample_record) end assert_equal(logstash_index, index_cmds.first['index']['_index']) end def test_error_if_tag_not_in_chunk_keys assert_raise(Fluent::ConfigError) { config = %{ <buffer foo> </buffer> } driver.configure(config) } end def test_can_use_custom_chunk_along_with_tag config = %{ <buffer tag, foo> </buffer> } driver.configure(config) end def test_doesnt_add_logstash_timestamp_by_default stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_nil(index_cmds[1]['@timestamp']) end def test_adds_timestamp_when_logstash driver.configure("logstash_format true\n") stub_elastic ts = DateTime.now time = Fluent::EventTime.from_time(ts.to_time) driver.run(default_tag: 'test') do driver.feed(time, sample_record) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts.iso8601(9)) end def test_adds_timestamp_when_include_timestamp driver.configure("include_timestamp true\n") stub_elastic ts = DateTime.now time = Fluent::EventTime.from_time(ts.to_time) driver.run(default_tag: 'test') do driver.feed(time, sample_record) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts.iso8601(9)) end def test_uses_custom_timestamp_when_included_in_record driver.configure("logstash_format true\n") stub_elastic ts = DateTime.new(2001,2,3).iso8601 driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('@timestamp' => ts)) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts) end def test_uses_custom_timestamp_when_included_in_record_without_logstash driver.configure("include_timestamp true\n") stub_elastic ts = DateTime.new(2001,2,3).iso8601 driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('@timestamp' => ts)) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts) end def test_uses_custom_time_key driver.configure("logstash_format true time_key vtm\n") stub_elastic ts = DateTime.new(2001,2,3).iso8601(9) driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('vtm' => ts)) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts) end def test_uses_custom_time_key_with_format driver.configure("logstash_format true time_key_format %Y-%m-%d %H:%M:%S.%N%z time_key vtm\n") stub_elastic ts = "2001-02-03 13:14:01.673+02:00" driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('vtm' => ts)) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], DateTime.parse(ts).iso8601(9)) assert_equal("logstash-2001.02.03", index_cmds[0]['index']['_index']) end def test_uses_custom_time_key_with_format_without_logstash driver.configure("include_timestamp true index_name test time_key_format %Y-%m-%d %H:%M:%S.%N%z time_key vtm\n") stub_elastic ts = "2001-02-03 13:14:01.673+02:00" driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('vtm' => ts)) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], DateTime.parse(ts).iso8601(9)) assert_equal("test", index_cmds[0]['index']['_index']) end def test_uses_custom_time_key_exclude_timekey driver.configure("logstash_format true time_key vtm time_key_exclude_timestamp true\n") stub_elastic ts = DateTime.new(2001,2,3).iso8601 driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('vtm' => ts)) end assert(!index_cmds[1].key?('@timestamp'), '@timestamp should be messing') end def test_uses_custom_time_key_format driver.configure("logstash_format true time_key_format %Y-%m-%dT%H:%M:%S.%N%z\n") stub_elastic ts = "2001-02-03T13:14:01.673+02:00" driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('@timestamp' => ts)) end assert_equal("logstash-2001.02.03", index_cmds[0]['index']['_index']) assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts) end def test_uses_custom_time_key_format_without_logstash driver.configure("include_timestamp true index_name test time_key_format %Y-%m-%dT%H:%M:%S.%N%z\n") stub_elastic ts = "2001-02-03T13:14:01.673+02:00" driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('@timestamp' => ts)) end assert_equal("test", index_cmds[0]['index']['_index']) assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts) end data(:default => nil, :custom_tag => 'es_plugin.output.time.error') def test_uses_custom_time_key_format_logs_an_error(tag_for_error) tag_config = tag_for_error ? "time_parse_error_tag #{tag_for_error}" : '' tag_for_error = 'Fluent::ElasticsearchOutput::TimeParser.error' if tag_for_error.nil? driver.configure("logstash_format true time_key_format %Y-%m-%dT%H:%M:%S.%N%z\n#{tag_config}\n") stub_elastic ts = "2001/02/03 13:14:01,673+02:00" index = "logstash-#{Date.today.strftime("%Y.%m.%d")}" flexmock(driver.instance.router).should_receive(:emit_error_event) .with(tag_for_error, Fluent::EventTime, Hash, ArgumentError).once driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('@timestamp' => ts)) end assert_equal(index, index_cmds[0]['index']['_index']) assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts) end def test_uses_custom_time_key_format_obscure_format driver.configure("logstash_format true time_key_format %a %b %d %H:%M:%S %Z %Y\n") stub_elastic ts = "Thu Nov 29 14:33:20 GMT 2001" driver.run(default_tag: 'test') do driver.feed(sample_record.merge!('@timestamp' => ts)) end assert_equal("logstash-2001.11.29", index_cmds[0]['index']['_index']) assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], ts) end def test_uses_nanosecond_precision_by_default driver.configure("logstash_format true\n") stub_elastic time = Fluent::EventTime.new(Time.now.to_i, 123456789) driver.run(default_tag: 'test') do driver.feed(time, sample_record) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], Time.at(time).iso8601(9)) end def test_uses_subsecond_precision_when_configured driver.configure("logstash_format true time_precision 3\n") stub_elastic time = Fluent::EventTime.new(Time.now.to_i, 123456789) driver.run(default_tag: 'test') do driver.feed(time, sample_record) end assert(index_cmds[1].has_key? '@timestamp') assert_equal(index_cmds[1]['@timestamp'], Time.at(time).iso8601(3)) end def test_doesnt_add_tag_key_by_default stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_nil(index_cmds[1]['tag']) end def test_adds_tag_key_when_configured driver.configure("include_tag_key true\n") stub_elastic driver.run(default_tag: 'mytag') do driver.feed(sample_record) end assert(index_cmds[1].has_key?('tag')) assert_equal(index_cmds[1]['tag'], 'mytag') end def test_adds_id_key_when_configured driver.configure("id_key request_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(index_cmds[0]['index']['_id'], '42') end class NestedIdKeyTest < self def test_adds_nested_id_key_with_dot driver.configure("id_key nested.request_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_id'], '42') end def test_adds_nested_id_key_with_dollar_dot driver.configure("id_key $.nested.request_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_id'], '42') end def test_adds_nested_id_key_with_bracket driver.configure("id_key $['nested']['request_id']\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_id'], '42') end end def test_doesnt_add_id_key_if_missing_when_configured driver.configure("id_key another_request_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(!index_cmds[0]['index'].has_key?('_id')) end def test_adds_id_key_when_not_configured stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(!index_cmds[0]['index'].has_key?('_id')) end def test_adds_parent_key_when_configured driver.configure("parent_key parent_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(index_cmds[0]['index']['_parent'], 'parent') end class NestedParentKeyTest < self def test_adds_nested_parent_key_with_dot driver.configure("parent_key nested.parent_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_parent'], 'parent') end def test_adds_nested_parent_key_with_dollar_dot driver.configure("parent_key $.nested.parent_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_parent'], 'parent') end def test_adds_nested_parent_key_with_bracket driver.configure("parent_key $['nested']['parent_id']\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_parent'], 'parent') end end def test_doesnt_add_parent_key_if_missing_when_configured driver.configure("parent_key another_parent_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(!index_cmds[0]['index'].has_key?('_parent')) end def test_adds_parent_key_when_not_configured stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(!index_cmds[0]['index'].has_key?('_parent')) end class AddsRoutingKeyWhenConfiguredTest < self def test_es6 driver('', 6).configure("routing_key routing_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(index_cmds[0]['index']['_routing'], 'routing') end def test_es7 driver('', 7).configure("routing_key routing_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(index_cmds[0]['index']['routing'], 'routing') end end class NestedRoutingKeyTest < self def test_adds_nested_routing_key_with_dot driver.configure("routing_key nested.routing_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_routing'], 'routing') end def test_adds_nested_routing_key_with_dollar_dot driver.configure("routing_key $.nested.routing_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_routing'], 'routing') end def test_adds_nested_routing_key_with_bracket driver.configure("routing_key $['nested']['routing_id']\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(nested_sample_record) end assert_equal(index_cmds[0]['index']['_routing'], 'routing') end end def test_doesnt_add_routing_key_if_missing_when_configured driver.configure("routing_key another_routing_id\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(!index_cmds[0]['index'].has_key?('_routing')) end def test_adds_routing_key_when_not_configured stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(!index_cmds[0]['index'].has_key?('_routing')) end def test_remove_one_key driver.configure("remove_keys key1\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record.merge('key1' => 'v1', 'key2' => 'v2')) end assert(!index_cmds[1].has_key?('key1')) assert(index_cmds[1].has_key?('key2')) end def test_remove_multi_keys driver.configure("remove_keys key1, key2\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record.merge('key1' => 'v1', 'key2' => 'v2')) end assert(!index_cmds[1].has_key?('key1')) assert(!index_cmds[1].has_key?('key2')) end def test_request_error stub_elastic_unavailable assert_raise(Fluent::Plugin::ElasticsearchOutput::RecoverableRequestFailure) { driver.run(default_tag: 'test', shutdown: false) do driver.feed(sample_record) end } end def test_request_forever omit("retry_forever test is unstable.") if ENV["CI"] stub_elastic driver.configure(Fluent::Config::Element.new( 'ROOT', '', { '@type' => 'elasticsearch', }, [ Fluent::Config::Element.new('buffer', '', { 'retry_forever' => true }, []) ] )) stub_elastic_timeout assert_raise(Timeout::Error) { driver.run(default_tag: 'test', timeout: 10, force_flush_retry: true) do driver.feed(sample_record) end } end def test_connection_failed connection_resets = 0 stub_request(:post, "http://localhost:9200/_bulk").with do |req| connection_resets += 1 raise Faraday::ConnectionFailed, "Test message" end assert_raise(Fluent::Plugin::ElasticsearchOutput::RecoverableRequestFailure) { driver.run(default_tag: 'test', shutdown: false) do driver.feed(sample_record) end } assert_equal(connection_resets, 1) end def test_reconnect_on_error_enabled connection_resets = 0 stub_request(:post, "http://localhost:9200/_bulk").with do |req| connection_resets += 1 raise ZeroDivisionError, "any not host_unreachable_exceptions exception" end driver.configure("reconnect_on_error true\n") assert_raise(Fluent::Plugin::ElasticsearchOutput::RecoverableRequestFailure) { driver.run(default_tag: 'test', shutdown: false) do driver.feed(sample_record) end } assert_raise(Timeout::Error) { driver.run(default_tag: 'test', shutdown: false) do driver.feed(sample_record) end } # FIXME: Consider keywords arguments in #run and how to test this later. # Because v0.14 test driver does not have 1 to 1 correspondence between #run and #flush in tests. assert_equal(connection_resets, 1) end def test_reconnect_on_error_disabled connection_resets = 0 stub_request(:post, "http://localhost:9200/_bulk").with do |req| connection_resets += 1 raise ZeroDivisionError, "any not host_unreachable_exceptions exception" end driver.configure("reconnect_on_error false\n") assert_raise(Fluent::Plugin::ElasticsearchOutput::RecoverableRequestFailure) { driver.run(default_tag: 'test', shutdown: false) do driver.feed(sample_record) end } assert_raise(Timeout::Error) { driver.run(default_tag: 'test', shutdown: false) do driver.feed(sample_record) end } assert_equal(connection_resets, 1) end def test_bulk_error_retags_when_configured driver.configure("retry_tag retry\n") stub_request(:post, 'http://localhost:9200/_bulk') .to_return(lambda do |req| { :status => 200, :headers => { 'Content-Type' => 'json' }, :body => %({ "took" : 1, "errors" : true, "items" : [ { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "abc", "status" : 500, "error" : { "type" : "some unrecognized type", "reason":"some error to cause version mismatch" } } } ] }) } end) driver.run(default_tag: 'test') do driver.feed(1, sample_record) end assert_equal [['retry', 1, sample_record]], driver.events end def test_create_should_write_records_with_ids_and_skip_those_without driver.configure("write_operation create\nid_key my_id\n@log_level debug") stub_request(:post, 'http://localhost:9200/_bulk') .to_return(lambda do |req| { :status => 200, :headers => { 'Content-Type' => 'json' }, :body => %({ "took" : 1, "errors" : true, "items" : [ { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "abc" } }, { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "xyz", "status" : 500, "error" : { "type" : "some unrecognized type", "reason":"some error to cause version mismatch" } } } ] }) } end) sample_record1 = sample_record('my_id' => 'abc') sample_record4 = sample_record('my_id' => 'xyz') driver.run(default_tag: 'test') do driver.feed(1, sample_record1) driver.feed(2, sample_record) driver.feed(3, sample_record) driver.feed(4, sample_record4) end logs = driver.logs # one record succeeded while the other should be 'retried' assert_equal [['test', 4, sample_record4]], driver.events assert_logs_include(logs, /(Dropping record)/, 2) end def test_create_should_write_records_with_ids_and_emit_those_without driver.configure("write_operation create\nid_key my_id\nemit_error_for_missing_id true\n@log_level debug") stub_request(:post, 'http://localhost:9200/_bulk') .to_return(lambda do |req| { :status => 200, :headers => { 'Content-Type' => 'json' }, :body => %({ "took" : 1, "errors" : true, "items" : [ { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "abc" } }, { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "xyz", "status" : 500, "error" : { "type" : "some unrecognized type", "reason":"some error to cause version mismatch" } } } ] }) } end) sample_record1 = sample_record('my_id' => 'abc') sample_record4 = sample_record('my_id' => 'xyz') driver.run(default_tag: 'test') do driver.feed(1, sample_record1) driver.feed(2, sample_record) driver.feed(3, sample_record) driver.feed(4, sample_record4) end error_log = driver.error_events.map {|e| e.last.message } # one record succeeded while the other should be 'retried' assert_equal [['test', 4, sample_record4]], driver.events assert_logs_include(error_log, /(Missing '_id' field)/, 2) end def test_bulk_error stub_request(:post, 'http://localhost:9200/_bulk') .to_return(lambda do |req| { :status => 200, :headers => { 'Content-Type' => 'json' }, :body => %({ "took" : 1, "errors" : true, "items" : [ { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "abc", "status" : 500, "error" : { "type" : "some unrecognized type", "reason":"some error to cause version mismatch" } } }, { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "abc", "status" : 201 } }, { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "abc", "status" : 500, "error" : { "type" : "some unrecognized type", "reason":"some error to cause version mismatch" } } }, { "create" : { "_index" : "foo", "_type" : "bar", "_id" : "abc", "_id" : "abc", "status" : 409 } } ] }) } end) driver.run(default_tag: 'test') do driver.feed(1, sample_record) driver.feed(2, sample_record) driver.feed(3, sample_record) driver.feed(4, sample_record) end expect = [['test', 1, sample_record], ['test', 3, sample_record]] assert_equal expect, driver.events end def test_update_should_not_write_if_theres_no_id driver.configure("write_operation update\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_nil(index_cmds) end def test_upsert_should_not_write_if_theres_no_id driver.configure("write_operation upsert\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_nil(index_cmds) end def test_create_should_not_write_if_theres_no_id driver.configure("write_operation create\n") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_nil(index_cmds) end def test_update_should_write_update_op_and_doc_as_upsert_is_false driver.configure("write_operation update id_key request_id") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(index_cmds[0].has_key?("update")) assert(!index_cmds[1]["doc_as_upsert"]) assert(!index_cmds[1]["upsert"]) end def test_update_should_remove_keys_from_doc_when_keys_are_skipped driver.configure("write_operation update id_key request_id remove_keys_on_update parent_id") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(index_cmds[1]["doc"]) assert(!index_cmds[1]["doc"]["parent_id"]) end def test_upsert_should_write_update_op_and_doc_as_upsert_is_true driver.configure("write_operation upsert id_key request_id") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(index_cmds[0].has_key?("update")) assert(index_cmds[1]["doc_as_upsert"]) assert(!index_cmds[1]["upsert"]) end def test_upsert_should_write_update_op_upsert_and_doc_when_keys_are_skipped driver.configure("write_operation upsert id_key request_id remove_keys_on_update parent_id") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(index_cmds[0].has_key?("update")) assert(!index_cmds[1]["doc_as_upsert"]) assert(index_cmds[1]["upsert"]) assert(index_cmds[1]["doc"]) end def test_upsert_should_remove_keys_from_doc_when_keys_are_skipped driver.configure("write_operation upsert id_key request_id remove_keys_on_update parent_id") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(index_cmds[1]["upsert"] != index_cmds[1]["doc"]) assert(!index_cmds[1]["doc"]["parent_id"]) assert(index_cmds[1]["upsert"]["parent_id"]) end def test_upsert_should_remove_multiple_keys_when_keys_are_skipped driver.configure("write_operation upsert id_key id remove_keys_on_update foo,baz") stub_elastic driver.run(default_tag: 'test') do driver.feed("id" => 1, "foo" => "bar", "baz" => "quix", "zip" => "zam") end assert( index_cmds[1]["doc"] == { "id" => 1, "zip" => "zam", } ) assert( index_cmds[1]["upsert"] == { "id" => 1, "foo" => "bar", "baz" => "quix", "zip" => "zam", } ) end def test_upsert_should_remove_keys_from_when_the_keys_are_in_the_record driver.configure("write_operation upsert id_key id remove_keys_on_update_key keys_to_skip") stub_elastic driver.run(default_tag: 'test') do driver.feed("id" => 1, "foo" => "bar", "baz" => "quix", "keys_to_skip" => ["baz"]) end assert( index_cmds[1]["doc"] == { "id" => 1, "foo" => "bar", } ) assert( index_cmds[1]["upsert"] == { "id" => 1, "foo" => "bar", "baz" => "quix", } ) end def test_upsert_should_remove_keys_from_key_on_record_has_higher_presedence_than_config driver.configure("write_operation upsert id_key id remove_keys_on_update foo,bar remove_keys_on_update_key keys_to_skip") stub_elastic driver.run(default_tag: 'test') do driver.feed("id" => 1, "foo" => "bar", "baz" => "quix", "keys_to_skip" => ["baz"]) end assert( index_cmds[1]["doc"] == { "id" => 1, # we only expect baz to be stripped here, if the config was more important # foo would be stripped too. "foo" => "bar", } ) assert( index_cmds[1]["upsert"] == { "id" => 1, "foo" => "bar", "baz" => "quix", } ) end def test_create_should_write_create_op driver.configure("write_operation create id_key request_id") stub_elastic driver.run(default_tag: 'test') do driver.feed(sample_record) end assert(index_cmds[0].has_key?("create")) end def test_include_index_in_url stub_elastic('http://localhost:9200/logstash-2018.01.01/_bulk') driver.configure("index_name logstash-2018.01.01 include_index_in_url true") driver.run(default_tag: 'test') do driver.feed(sample_record) end assert_equal(index_cmds.length, 2) assert_equal(index_cmds.first['index']['_index'], nil) end def test_use_simple_sniffer require 'fluent/plugin/elasticsearch_simple_sniffer' stub_elastic_info stub_elastic config = %[ sniffer_class_name Fluent::Plugin::ElasticsearchSimpleSniffer log_level debug with_transporter_log true reload_connections true reload_after 1 ] driver(config, nil) driver.run(default_tag: 'test') do driver.feed(sample_record) end log = driver.logs # 2 or 3 - one for the ping, one for the _bulk, (and client.info) assert_logs_include_compare_size(3, ">", log, /In Fluent::Plugin::ElasticsearchSimpleSniffer hosts/) assert_logs_include_compare_size(1, "<=", log, /In Fluent::Plugin::ElasticsearchSimpleSniffer hosts/) end end
Gem::Specification.new do |s| s.name = 'virtual_merchant' s.version = '0.0.2' s.date = '2012-10-07' s.summary = "Virtual Merchant API" s.description = "Makes it easy to charge credit cards with the VirtualMerchant API." s.authors = ["Lee Quarella"] s.email = 'lee@lucidfrog.com' s.files = ["lib/virtual_merchant.rb", "lib/virtual_merchant/amount.rb", "lib/virtual_merchant/credentials.rb", "lib/virtual_merchant/credit_card.rb", "lib/virtual_merchant/response.rb"] s.homepage = 'https://github.com/leequarella/VirtualMerchant-Ruby' end up to version 0.0.3 Gem::Specification.new do |s| s.name = 'virtual_merchant' s.version = '0.0.3' s.date = '2012-10-07' s.summary = "Virtual Merchant API" s.description = "Makes it easy to charge credit cards with the VirtualMerchant API." s.authors = ["Lee Quarella"] s.email = 'lee@lucidfrog.com' s.files = ["lib/virtual_merchant.rb", "lib/virtual_merchant/amount.rb", "lib/virtual_merchant/credentials.rb", "lib/virtual_merchant/credit_card.rb", "lib/virtual_merchant/response.rb"] s.homepage = 'https://github.com/leequarella/VirtualMerchant-Ruby' end
require 'common' require 'net/ssh/transport/cipher_factory' module Transport class TestCipherFactory < Test::Unit::TestCase def self.if_supported?(name) yield if Net::SSH::Transport::CipherFactory.supported?(name) end def test_lengths_for_none assert_equal [0,0], factory.get_lengths("none") assert_equal [0,0], factory.get_lengths("bogus") end def test_lengths_for_blowfish_cbc assert_equal [16,8], factory.get_lengths("blowfish-cbc") end if_supported?("idea-cbc") do def test_lengths_for_idea_cbc assert_equal [16,8], factory.get_lengths("idea-cbc") end end def test_lengths_for_rijndael_cbc assert_equal [32,16], factory.get_lengths("rijndael-cbc@lysator.liu.se") end def test_lengths_for_cast128_cbc assert_equal [16,8], factory.get_lengths("cast128-cbc") end def test_lengths_for_3des_cbc assert_equal [24,8], factory.get_lengths("3des-cbc") end def test_lengths_for_aes192_cbc assert_equal [24,16], factory.get_lengths("aes192-cbc") end def test_lengths_for_aes128_cbc assert_equal [16,16], factory.get_lengths("aes128-cbc") end def test_lengths_for_aes256_cbc assert_equal [32,16], factory.get_lengths("aes256-cbc") end BLOWFISH = "\210\021\200\315\240_\026$\352\204g\233\244\242x\332e\370\001\327\224Nv@9_\323\037\252kb\037\036\237\375]\343/y\037\237\312Q\f7]\347Y\005\275%\377\0010$G\272\250B\265Nd\375\342\372\025r6}+Y\213y\n\237\267\\\374^\346BdJ$\353\220Ik\023<\236&H\277=\225" def test_blowfish_cbc_for_encryption assert_equal BLOWFISH, encrypt("blowfish-cbc") end def test_blowfish_cbc_for_decryption assert_equal TEXT, decrypt("blowfish-cbc", BLOWFISH) end if_supported?("idea-cbc") do IDEA = "W\234\017G\231\b\357\370H\b\256U]\343M\031k\233]~\023C\363\263\177\262-\261\341$\022\376mv\217\322\b\2763\270H\306\035\343z\313\312\3531\351\t\201\302U\022\360\300\354ul7$z\320O]\360g\024\305\005`V\005\335A\351\312\270c\320D\232\eQH1\340\265\2118\031g*\303v" def test_idea_cbc_for_encryption assert_equal IDEA, encrypt("idea-cbc") end def test_idea_cbc_for_decryption assert_equal TEXT, decrypt("idea-cbc", IDEA) end end RIJNDAEL = "$\253\271\255\005Z\354\336&\312\324\221\233\307Mj\315\360\310Fk\241EfN\037\231\213\361{'\310\204\347I\343\271\005\240`\325;\034\346uM>#\241\231C`\374\261\vo\226;Z\302:\b\250\366T\330\\#V\330\340\226\363\374!\bm\266\232\207!\232\347\340\t\307\370\356z\236\343=v\210\206y" def test_rijndael_cbc_for_encryption assert_equal RIJNDAEL, encrypt("rijndael-cbc@lysator.liu.se") end def test_rijndael_cbc_for_decryption assert_equal TEXT, decrypt("rijndael-cbc@lysator.liu.se", RIJNDAEL) end CAST128 = "qW\302\331\333P\223t[9 ~(sg\322\271\227\272\022I\223\373p\255>k\326\314\260\2003\236C_W\211\227\373\205>\351\334\322\227\223\e\236\202Ii\032!P\214\035:\017\360h7D\371v\210\264\317\236a\262w1\2772\023\036\331\227\240:\f/X\351\324I\t[x\350\323E\2301\016m" def test_cast128_cbc_for_encryption assert_equal CAST128, encrypt("cast128-cbc") end def test_cast128_cbc_for_decryption assert_equal TEXT, decrypt("cast128-cbc", CAST128) end TRIPLE_DES = "\322\252\216D\303Q\375gg\367A{\177\313\3436\272\353%\223K?\257\206|\r&\353/%\340\336 \203E8rY\206\234\004\274\267\031\233T/{\"\227/B!i?[qGaw\306T\206\223\213n \212\032\244%]@\355\250\334\312\265E\251\017\361\270\357\230\274KP&^\031r+r%\370" def test_3des_cbc_for_encryption assert_equal TRIPLE_DES, encrypt("3des-cbc") end def test_3des_cbc_for_decryption assert_equal TEXT, decrypt("3des-cbc", TRIPLE_DES) end AES128 = "k\026\350B\366-k\224\313\3277}B\035\004\200\035\r\233\024$\205\261\231Q\2214r\245\250\360\315\237\266hg\262C&+\321\346Pf\267v\376I\215P\327\345-\232&HK\375\326_\030<\a\276\212\303g\342C\242O\233\260\006\001a&V\345`\\T\e\236.\207\223l\233ri^\v\252\363\245" def test_aes128_cbc_for_encryption assert_equal AES128, encrypt("aes128-cbc") end def test_aes128_cbc_for_decryption assert_equal TEXT, decrypt("aes128-cbc", AES128) end AES192 = "\256\017)x\270\213\336\303L\003f\235'jQ\3231k9\225\267\242\364C4\370\224\201\302~\217I\202\374\2167='\272\037\225\223\177Y\r\212\376(\275\n\3553\377\177\252C\254\236\016MA\274Z@H\331<\rL\317\205\323[\305X8\376\237=\374\352bH9\244\0231\353\204\352p\226\326~J\242" def test_aes192_cbc_for_encryption assert_equal AES192, encrypt("aes192-cbc") end def test_aes192_cbc_for_decryption assert_equal TEXT, decrypt("aes192-cbc", AES192) end AES256 = "$\253\271\255\005Z\354\336&\312\324\221\233\307Mj\315\360\310Fk\241EfN\037\231\213\361{'\310\204\347I\343\271\005\240`\325;\034\346uM>#\241\231C`\374\261\vo\226;Z\302:\b\250\366T\330\\#V\330\340\226\363\374!\bm\266\232\207!\232\347\340\t\307\370\356z\236\343=v\210\206y" def test_aes256_cbc_for_encryption assert_equal AES256, encrypt("aes256-cbc") end def test_aes256_cbc_for_decryption assert_equal TEXT, decrypt("aes256-cbc", AES256) end def test_none_for_encryption assert_equal TEXT, encrypt("none").strip end def test_none_for_decryption assert_equal TEXT, decrypt("none", TEXT) end private TEXT = "But soft! What light through yonder window breaks? It is the east, and Juliet is the sun!" OPTIONS = { :iv => "ABC", :key => "abc", :digester => OpenSSL::Digest::MD5, :shared => "1234567890123456780", :hash => '!@#$%#$^%$&^&%#$@$' } def factory Net::SSH::Transport::CipherFactory end def encrypt(type) cipher = factory.get(type, OPTIONS.merge(:encrypt => true)) padding = TEXT.length % cipher.block_size result = cipher.update(TEXT.dup) result << cipher.update(" " * (cipher.block_size - padding)) if padding > 0 result << cipher.final end def decrypt(type, data) cipher = factory.get(type, OPTIONS.merge(:decrypt => true)) result = cipher.update(data.dup) result << cipher.final result.strip end end end Added tests for arcfour lengths require 'common' require 'net/ssh/transport/cipher_factory' module Transport class TestCipherFactory < Test::Unit::TestCase def self.if_supported?(name) yield if Net::SSH::Transport::CipherFactory.supported?(name) end def test_lengths_for_none assert_equal [0,0], factory.get_lengths("none") assert_equal [0,0], factory.get_lengths("bogus") end def test_lengths_for_blowfish_cbc assert_equal [16,8], factory.get_lengths("blowfish-cbc") end if_supported?("idea-cbc") do def test_lengths_for_idea_cbc assert_equal [16,8], factory.get_lengths("idea-cbc") end end def test_lengths_for_rijndael_cbc assert_equal [32,16], factory.get_lengths("rijndael-cbc@lysator.liu.se") end def test_lengths_for_cast128_cbc assert_equal [16,8], factory.get_lengths("cast128-cbc") end def test_lengths_for_3des_cbc assert_equal [24,8], factory.get_lengths("3des-cbc") end def test_lengths_for_aes192_cbc assert_equal [24,16], factory.get_lengths("aes192-cbc") end def test_lengths_for_aes128_cbc assert_equal [16,16], factory.get_lengths("aes128-cbc") end def test_lengths_for_aes256_cbc assert_equal [32,16], factory.get_lengths("aes256-cbc") end def test_lengths_for_arcfour128 assert_equal [16,8], factory.get_lengths("arcfour128") end def test_lengths_for_arcfour256 assert_equal [32,8], factory.get_lengths("arcfour256") end def test_lengths_for_arcfour512 assert_equal [64,8], factory.get_lengths("arcfour512") end BLOWFISH = "\210\021\200\315\240_\026$\352\204g\233\244\242x\332e\370\001\327\224Nv@9_\323\037\252kb\037\036\237\375]\343/y\037\237\312Q\f7]\347Y\005\275%\377\0010$G\272\250B\265Nd\375\342\372\025r6}+Y\213y\n\237\267\\\374^\346BdJ$\353\220Ik\023<\236&H\277=\225" def test_blowfish_cbc_for_encryption assert_equal BLOWFISH, encrypt("blowfish-cbc") end def test_blowfish_cbc_for_decryption assert_equal TEXT, decrypt("blowfish-cbc", BLOWFISH) end if_supported?("idea-cbc") do IDEA = "W\234\017G\231\b\357\370H\b\256U]\343M\031k\233]~\023C\363\263\177\262-\261\341$\022\376mv\217\322\b\2763\270H\306\035\343z\313\312\3531\351\t\201\302U\022\360\300\354ul7$z\320O]\360g\024\305\005`V\005\335A\351\312\270c\320D\232\eQH1\340\265\2118\031g*\303v" def test_idea_cbc_for_encryption assert_equal IDEA, encrypt("idea-cbc") end def test_idea_cbc_for_decryption assert_equal TEXT, decrypt("idea-cbc", IDEA) end end RIJNDAEL = "$\253\271\255\005Z\354\336&\312\324\221\233\307Mj\315\360\310Fk\241EfN\037\231\213\361{'\310\204\347I\343\271\005\240`\325;\034\346uM>#\241\231C`\374\261\vo\226;Z\302:\b\250\366T\330\\#V\330\340\226\363\374!\bm\266\232\207!\232\347\340\t\307\370\356z\236\343=v\210\206y" def test_rijndael_cbc_for_encryption assert_equal RIJNDAEL, encrypt("rijndael-cbc@lysator.liu.se") end def test_rijndael_cbc_for_decryption assert_equal TEXT, decrypt("rijndael-cbc@lysator.liu.se", RIJNDAEL) end CAST128 = "qW\302\331\333P\223t[9 ~(sg\322\271\227\272\022I\223\373p\255>k\326\314\260\2003\236C_W\211\227\373\205>\351\334\322\227\223\e\236\202Ii\032!P\214\035:\017\360h7D\371v\210\264\317\236a\262w1\2772\023\036\331\227\240:\f/X\351\324I\t[x\350\323E\2301\016m" def test_cast128_cbc_for_encryption assert_equal CAST128, encrypt("cast128-cbc") end def test_cast128_cbc_for_decryption assert_equal TEXT, decrypt("cast128-cbc", CAST128) end TRIPLE_DES = "\322\252\216D\303Q\375gg\367A{\177\313\3436\272\353%\223K?\257\206|\r&\353/%\340\336 \203E8rY\206\234\004\274\267\031\233T/{\"\227/B!i?[qGaw\306T\206\223\213n \212\032\244%]@\355\250\334\312\265E\251\017\361\270\357\230\274KP&^\031r+r%\370" def test_3des_cbc_for_encryption assert_equal TRIPLE_DES, encrypt("3des-cbc") end def test_3des_cbc_for_decryption assert_equal TEXT, decrypt("3des-cbc", TRIPLE_DES) end AES128 = "k\026\350B\366-k\224\313\3277}B\035\004\200\035\r\233\024$\205\261\231Q\2214r\245\250\360\315\237\266hg\262C&+\321\346Pf\267v\376I\215P\327\345-\232&HK\375\326_\030<\a\276\212\303g\342C\242O\233\260\006\001a&V\345`\\T\e\236.\207\223l\233ri^\v\252\363\245" def test_aes128_cbc_for_encryption assert_equal AES128, encrypt("aes128-cbc") end def test_aes128_cbc_for_decryption assert_equal TEXT, decrypt("aes128-cbc", AES128) end AES192 = "\256\017)x\270\213\336\303L\003f\235'jQ\3231k9\225\267\242\364C4\370\224\201\302~\217I\202\374\2167='\272\037\225\223\177Y\r\212\376(\275\n\3553\377\177\252C\254\236\016MA\274Z@H\331<\rL\317\205\323[\305X8\376\237=\374\352bH9\244\0231\353\204\352p\226\326~J\242" def test_aes192_cbc_for_encryption assert_equal AES192, encrypt("aes192-cbc") end def test_aes192_cbc_for_decryption assert_equal TEXT, decrypt("aes192-cbc", AES192) end AES256 = "$\253\271\255\005Z\354\336&\312\324\221\233\307Mj\315\360\310Fk\241EfN\037\231\213\361{'\310\204\347I\343\271\005\240`\325;\034\346uM>#\241\231C`\374\261\vo\226;Z\302:\b\250\366T\330\\#V\330\340\226\363\374!\bm\266\232\207!\232\347\340\t\307\370\356z\236\343=v\210\206y" def test_aes256_cbc_for_encryption assert_equal AES256, encrypt("aes256-cbc") end def test_aes256_cbc_for_decryption assert_equal TEXT, decrypt("aes256-cbc", AES256) end def test_none_for_encryption assert_equal TEXT, encrypt("none").strip end def test_none_for_decryption assert_equal TEXT, decrypt("none", TEXT) end private TEXT = "But soft! What light through yonder window breaks? It is the east, and Juliet is the sun!" OPTIONS = { :iv => "ABC", :key => "abc", :digester => OpenSSL::Digest::MD5, :shared => "1234567890123456780", :hash => '!@#$%#$^%$&^&%#$@$' } def factory Net::SSH::Transport::CipherFactory end def encrypt(type) cipher = factory.get(type, OPTIONS.merge(:encrypt => true)) padding = TEXT.length % cipher.block_size result = cipher.update(TEXT.dup) result << cipher.update(" " * (cipher.block_size - padding)) if padding > 0 result << cipher.final end def decrypt(type, data) cipher = factory.get(type, OPTIONS.merge(:decrypt => true)) result = cipher.update(data.dup) result << cipher.final result.strip end end end
require File.expand_path("../../base", __FILE__) require "json" require "pathname" require "tempfile" require "tmpdir" require "vagrant/util/file_mode" describe Vagrant::Environment do include_context "unit" let(:env) do isolated_environment.tap do |e| e.box2("base", :virtualbox) e.vagrantfile <<-VF Vagrant.configure("2") do |config| config.vm.box = "base" end VF end end let(:instance) { env.create_vagrant_env } describe "active machines" do it "should be empty if the machines folder doesn't exist" do folder = instance.local_data_path.join("machines") folder.should_not be_exist instance.active_machines.should be_empty end it "should return the name and provider of active machines" do machines = instance.local_data_path.join("machines") # Valid machine, with "foo" and virtualbox machine_foo = machines.join("foo/virtualbox") machine_foo.mkpath machine_foo.join("id").open("w+") { |f| f.write("") } # Invalid machine (no ID) machine_bar = machines.join("bar/virtualbox") machine_bar.mkpath instance.active_machines.should == [[:foo, :virtualbox]] end end describe "batching" do let(:batch) do double("batch") do |b| b.stub(:run) end end context "without the disabling env var" do it "should run without disabling parallelization" do with_temp_env("VAGRANT_NO_PARALLEL" => nil) do Vagrant::BatchAction.should_receive(:new).with(false).and_return(batch) batch.should_receive(:run) instance.batch {} end end end context "with the disabling env var" do it "should run with disabling parallelization" do with_temp_env("VAGRANT_NO_PARALLEL" => "yes") do Vagrant::BatchAction.should_receive(:new).with(true).and_return(batch) batch.should_receive(:run) instance.batch {} end end end end describe "current working directory" do it "is the cwd by default" do Dir.mktmpdir do |temp_dir| Dir.chdir(temp_dir) do with_temp_env("VAGRANT_CWD" => nil) do described_class.new.cwd.should == Pathname.new(Dir.pwd) end end end end it "is set to the cwd given" do Dir.mktmpdir do |directory| instance = described_class.new(:cwd => directory) instance.cwd.should == Pathname.new(directory) end end it "is set to the environmental variable VAGRANT_CWD" do Dir.mktmpdir do |directory| instance = with_temp_env("VAGRANT_CWD" => directory) do described_class.new end instance.cwd.should == Pathname.new(directory) end end it "raises an exception if the CWD doesn't exist" do expect { described_class.new(:cwd => "doesntexist") }. to raise_error(Vagrant::Errors::EnvironmentNonExistentCWD) end end describe "home path" do it "is set to the home path given" do Dir.mktmpdir do |dir| instance = described_class.new(:home_path => dir) instance.home_path.should == Pathname.new(dir) end end it "is set to the environmental variable VAGRANT_HOME" do Dir.mktmpdir do |dir| instance = with_temp_env("VAGRANT_HOME" => dir) do described_class.new end instance.home_path.should == Pathname.new(dir) end end it "is set to the DEFAULT_HOME by default" do expected = Pathname.new(File.expand_path(described_class::DEFAULT_HOME)) described_class.new.home_path.should == expected end it "throws an exception if inaccessible" do expect { described_class.new(:home_path => "/") }.to raise_error(Vagrant::Errors::HomeDirectoryNotAccessible) end end describe "local data path" do it "is set to the proper default" do default = instance.root_path.join(described_class::DEFAULT_LOCAL_DATA) instance.local_data_path.should == default end it "is expanded relative to the cwd" do instance = described_class.new(:local_data_path => "foo") instance.local_data_path.should == instance.cwd.join("foo") end it "is set to the given value" do Dir.mktmpdir do |dir| instance = described_class.new(:local_data_path => dir) instance.local_data_path.to_s.should == dir end end describe "upgrading V1 dotfiles" do let(:v1_dotfile_tempfile) { Tempfile.new("vagrant") } let(:v1_dotfile) { Pathname.new(v1_dotfile_tempfile.path) } let(:local_data_path) { v1_dotfile_tempfile.path } let(:instance) { described_class.new(:local_data_path => local_data_path) } it "should be fine if dotfile is empty" do v1_dotfile.open("w+") do |f| f.write("") end expect { instance }.to_not raise_error Pathname.new(local_data_path).should be_directory end it "should upgrade all active VMs" do active_vms = { "foo" => "foo_id", "bar" => "bar_id" } v1_dotfile.open("w+") do |f| f.write(JSON.dump({ "active" => active_vms })) end expect { instance }.to_not raise_error local_data_pathname = Pathname.new(local_data_path) foo_id_file = local_data_pathname.join("machines/foo/virtualbox/id") foo_id_file.should be_file foo_id_file.read.should == "foo_id" bar_id_file = local_data_pathname.join("machines/bar/virtualbox/id") bar_id_file.should be_file bar_id_file.read.should == "bar_id" end it "should raise an error if invalid JSON" do v1_dotfile.open("w+") do |f| f.write("bad") end expect { instance }. to raise_error(Vagrant::Errors::DotfileUpgradeJSONError) end end end describe "default provider" do it "should return virtualbox" do instance.default_provider.should == :virtualbox end end describe "copying the private SSH key" do it "copies the SSH key into the home directory" do env = isolated_environment instance = described_class.new(:home_path => env.homedir) pk = env.homedir.join("insecure_private_key") pk.should be_exist Vagrant::Util::FileMode.from_octal(pk.stat.mode).should == "600" end end it "has a box collection pointed to the proper directory" do collection = instance.boxes collection.should be_kind_of(Vagrant::BoxCollection) collection.directory.should == instance.boxes_path end describe "action runner" do it "has an action runner" do instance.action_runner.should be_kind_of(Vagrant::Action::Runner) end it "has a `ui` in the globals" do result = nil callable = lambda { |env| result = env[:ui] } instance.action_runner.run(callable) result.should eql(instance.ui) end end describe "#hook" do it "should call the action runner with the proper hook" do hook_name = :foo instance.action_runner.should_receive(:run).with do |callable, env| env[:action_name].should == hook_name end instance.hook(hook_name) end it "should return the result of the action runner run" do instance.action_runner.should_receive(:run).and_return(:foo) instance.hook(:bar).should == :foo end end describe "primary machine name" do it "should be the only machine if not a multi-machine environment" do instance.primary_machine_name.should == instance.machine_names.first end it "should be the machine marked as the primary" do environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define :foo config.vm.define :bar, :primary => true end VF env.box2("base", :virtualbox) end env = environment.create_vagrant_env env.primary_machine_name.should == :bar end it "should be nil if no primary is specified in a multi-machine environment" do environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define :foo config.vm.define :bar end VF env.box2("base", :virtualbox) end env = environment.create_vagrant_env env.primary_machine_name.should be_nil end end describe "loading configuration" do it "should load global configuration" do environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.ssh.port = 200 end VF end env = environment.create_vagrant_env env.config_global.ssh.port.should == 200 end it "should load from a custom Vagrantfile" do environment = isolated_environment do |env| env.file("non_standard_name", <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 200 end VF end env = environment.create_vagrant_env(:vagrantfile_name => "non_standard_name") env.config_global.ssh.port.should == 200 end it "should load from a custom Vagrantfile specified by env var" do environment = isolated_environment do |env| env.file("some_other_name", <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 400 end VF end env = with_temp_env("VAGRANT_VAGRANTFILE" => "some_other_name") do environment.create_vagrant_env end env.config_global.ssh.port.should == 400 end end describe "ui" do it "should be a silent UI by default" do described_class.new.ui.should be_kind_of(Vagrant::UI::Silent) end it "should be a UI given in the constructor" do # Create a custom UI for our test class CustomUI < Vagrant::UI::Interface; end instance = described_class.new(:ui_class => CustomUI) instance.ui.should be_kind_of(CustomUI) end end describe "#unload" do it "should run the unload hook" do instance.should_receive(:hook).with(:environment_unload).once instance.unload end end describe "getting a machine" do # A helper to register a provider for use in tests. def register_provider(name, config_class=nil) provider_cls = Class.new(Vagrant.plugin("2", :provider)) register_plugin("2") do |p| p.provider(name) { provider_cls } if config_class p.config(name, :provider) { config_class } end end provider_cls end it "should return a machine object with the correct provider" do # Create a provider foo_provider = register_provider("foo") # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define "foo" end VF e.box2("base", :foo) end # Verify that we can get the machine env = isolated_env.create_vagrant_env machine = env.machine(:foo, :foo) machine.should be_kind_of(Vagrant::Machine) machine.name.should == :foo machine.provider.should be_kind_of(foo_provider) machine.provider_config.should be_nil end it "should return a machine object with the machine configuration" do # Create a provider foo_config = Class.new(Vagrant.plugin("2", :config)) do attr_accessor :value end foo_provider = register_provider("foo", foo_config) # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define "foo" config.vm.provider :foo do |fooconfig| fooconfig.value = 100 end end VF e.box2("base", :foo) end # Verify that we can get the machine env = isolated_env.create_vagrant_env machine = env.machine(:foo, :foo) machine.should be_kind_of(Vagrant::Machine) machine.name.should == :foo machine.provider.should be_kind_of(foo_provider) machine.provider_config.value.should == 100 end it "should cache the machine objects by name and provider" do # Create a provider foo_provider = register_provider("foo") bar_provider = register_provider("bar") # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define "vm1" config.vm.define "vm2" end VF e.box2("base", :foo) e.box2("base", :bar) end env = isolated_env.create_vagrant_env vm1_foo = env.machine(:vm1, :foo) vm1_bar = env.machine(:vm1, :bar) vm2_foo = env.machine(:vm2, :foo) vm1_foo.should eql(env.machine(:vm1, :foo)) vm1_bar.should eql(env.machine(:vm1, :bar)) vm1_foo.should_not eql(vm1_bar) vm2_foo.should eql(env.machine(:vm2, :foo)) end it "should load a machine without a box" do register_provider("foo") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "i-dont-exist" end VF end env = environment.create_vagrant_env machine = env.machine(:default, :foo) machine.box.should be_nil end it "should load the machine configuration" do register_provider("foo") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.ssh.port = 1 config.vm.box = "base" config.vm.define "vm1" do |inner| inner.ssh.port = 100 end end VF env.box2("base", :foo) end env = environment.create_vagrant_env machine = env.machine(:vm1, :foo) machine.config.ssh.port.should == 100 machine.config.vm.box.should == "base" end it "should load the box configuration for a V2 box" do register_provider("foo") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" end VF env.box2("base", :foo, :vagrantfile => <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 100 end VF end env = environment.create_vagrant_env machine = env.machine(:default, :foo) machine.config.ssh.port.should == 100 end it "should reload the cache if refresh is set" do # Create a provider foo_provider = register_provider("foo") # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" end VF e.box2("base", :foo) end env = isolated_env.create_vagrant_env vm1 = env.machine(:default, :foo) vm2 = env.machine(:default, :foo, true) vm3 = env.machine(:default, :foo) vm1.should_not eql(vm2) vm2.should eql(vm3) end it "should raise an error if the VM is not found" do expect { instance.machine("i-definitely-dont-exist", :virtualbox) }. to raise_error(Vagrant::Errors::MachineNotFound) end it "should raise an error if the provider is not found" do expect { instance.machine(:default, :lol_no) }. to raise_error(Vagrant::Errors::ProviderNotFound) end end describe "getting machine names" do it "should return the default machine if no multi-VM is used" do # Create the config isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| end VF end env = isolated_env.create_vagrant_env env.machine_names.should == [:default] end it "should return the machine names in order" do # Create the config isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.define "foo" config.vm.define "bar" end VF end env = isolated_env.create_vagrant_env env.machine_names.should == [:foo, :bar] end end end Tests for GH-1478 require File.expand_path("../../base", __FILE__) require "json" require "pathname" require "tempfile" require "tmpdir" require "vagrant/util/file_mode" describe Vagrant::Environment do include_context "unit" let(:env) do isolated_environment.tap do |e| e.box2("base", :virtualbox) e.vagrantfile <<-VF Vagrant.configure("2") do |config| config.vm.box = "base" end VF end end let(:instance) { env.create_vagrant_env } subject { instance } describe "active machines" do it "should be empty if the machines folder doesn't exist" do folder = instance.local_data_path.join("machines") folder.should_not be_exist instance.active_machines.should be_empty end it "should return the name and provider of active machines" do machines = instance.local_data_path.join("machines") # Valid machine, with "foo" and virtualbox machine_foo = machines.join("foo/virtualbox") machine_foo.mkpath machine_foo.join("id").open("w+") { |f| f.write("") } # Invalid machine (no ID) machine_bar = machines.join("bar/virtualbox") machine_bar.mkpath instance.active_machines.should == [[:foo, :virtualbox]] end end describe "batching" do let(:batch) do double("batch") do |b| b.stub(:run) end end context "without the disabling env var" do it "should run without disabling parallelization" do with_temp_env("VAGRANT_NO_PARALLEL" => nil) do Vagrant::BatchAction.should_receive(:new).with(false).and_return(batch) batch.should_receive(:run) instance.batch {} end end end context "with the disabling env var" do it "should run with disabling parallelization" do with_temp_env("VAGRANT_NO_PARALLEL" => "yes") do Vagrant::BatchAction.should_receive(:new).with(true).and_return(batch) batch.should_receive(:run) instance.batch {} end end end end describe "current working directory" do it "is the cwd by default" do Dir.mktmpdir do |temp_dir| Dir.chdir(temp_dir) do with_temp_env("VAGRANT_CWD" => nil) do described_class.new.cwd.should == Pathname.new(Dir.pwd) end end end end it "is set to the cwd given" do Dir.mktmpdir do |directory| instance = described_class.new(:cwd => directory) instance.cwd.should == Pathname.new(directory) end end it "is set to the environmental variable VAGRANT_CWD" do Dir.mktmpdir do |directory| instance = with_temp_env("VAGRANT_CWD" => directory) do described_class.new end instance.cwd.should == Pathname.new(directory) end end it "raises an exception if the CWD doesn't exist" do expect { described_class.new(:cwd => "doesntexist") }. to raise_error(Vagrant::Errors::EnvironmentNonExistentCWD) end end describe "default provider" do it "is virtualbox without any environmental variable" do with_temp_env("VAGRANT_DEFAULT_PROVIDER" => nil) do subject.default_provider.should == :virtualbox end end it "is whatever the environmental variable is if set" do with_temp_env("VAGRANT_DEFAULT_PROVIDER" => "foo") do subject.default_provider.should == :foo end end end describe "home path" do it "is set to the home path given" do Dir.mktmpdir do |dir| instance = described_class.new(:home_path => dir) instance.home_path.should == Pathname.new(dir) end end it "is set to the environmental variable VAGRANT_HOME" do Dir.mktmpdir do |dir| instance = with_temp_env("VAGRANT_HOME" => dir) do described_class.new end instance.home_path.should == Pathname.new(dir) end end it "is set to the DEFAULT_HOME by default" do expected = Pathname.new(File.expand_path(described_class::DEFAULT_HOME)) described_class.new.home_path.should == expected end it "throws an exception if inaccessible" do expect { described_class.new(:home_path => "/") }.to raise_error(Vagrant::Errors::HomeDirectoryNotAccessible) end end describe "local data path" do it "is set to the proper default" do default = instance.root_path.join(described_class::DEFAULT_LOCAL_DATA) instance.local_data_path.should == default end it "is expanded relative to the cwd" do instance = described_class.new(:local_data_path => "foo") instance.local_data_path.should == instance.cwd.join("foo") end it "is set to the given value" do Dir.mktmpdir do |dir| instance = described_class.new(:local_data_path => dir) instance.local_data_path.to_s.should == dir end end describe "upgrading V1 dotfiles" do let(:v1_dotfile_tempfile) { Tempfile.new("vagrant") } let(:v1_dotfile) { Pathname.new(v1_dotfile_tempfile.path) } let(:local_data_path) { v1_dotfile_tempfile.path } let(:instance) { described_class.new(:local_data_path => local_data_path) } it "should be fine if dotfile is empty" do v1_dotfile.open("w+") do |f| f.write("") end expect { instance }.to_not raise_error Pathname.new(local_data_path).should be_directory end it "should upgrade all active VMs" do active_vms = { "foo" => "foo_id", "bar" => "bar_id" } v1_dotfile.open("w+") do |f| f.write(JSON.dump({ "active" => active_vms })) end expect { instance }.to_not raise_error local_data_pathname = Pathname.new(local_data_path) foo_id_file = local_data_pathname.join("machines/foo/virtualbox/id") foo_id_file.should be_file foo_id_file.read.should == "foo_id" bar_id_file = local_data_pathname.join("machines/bar/virtualbox/id") bar_id_file.should be_file bar_id_file.read.should == "bar_id" end it "should raise an error if invalid JSON" do v1_dotfile.open("w+") do |f| f.write("bad") end expect { instance }. to raise_error(Vagrant::Errors::DotfileUpgradeJSONError) end end end describe "default provider" do it "should return virtualbox" do instance.default_provider.should == :virtualbox end end describe "copying the private SSH key" do it "copies the SSH key into the home directory" do env = isolated_environment instance = described_class.new(:home_path => env.homedir) pk = env.homedir.join("insecure_private_key") pk.should be_exist Vagrant::Util::FileMode.from_octal(pk.stat.mode).should == "600" end end it "has a box collection pointed to the proper directory" do collection = instance.boxes collection.should be_kind_of(Vagrant::BoxCollection) collection.directory.should == instance.boxes_path end describe "action runner" do it "has an action runner" do instance.action_runner.should be_kind_of(Vagrant::Action::Runner) end it "has a `ui` in the globals" do result = nil callable = lambda { |env| result = env[:ui] } instance.action_runner.run(callable) result.should eql(instance.ui) end end describe "#hook" do it "should call the action runner with the proper hook" do hook_name = :foo instance.action_runner.should_receive(:run).with do |callable, env| env[:action_name].should == hook_name end instance.hook(hook_name) end it "should return the result of the action runner run" do instance.action_runner.should_receive(:run).and_return(:foo) instance.hook(:bar).should == :foo end end describe "primary machine name" do it "should be the only machine if not a multi-machine environment" do instance.primary_machine_name.should == instance.machine_names.first end it "should be the machine marked as the primary" do environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define :foo config.vm.define :bar, :primary => true end VF env.box2("base", :virtualbox) end env = environment.create_vagrant_env env.primary_machine_name.should == :bar end it "should be nil if no primary is specified in a multi-machine environment" do environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define :foo config.vm.define :bar end VF env.box2("base", :virtualbox) end env = environment.create_vagrant_env env.primary_machine_name.should be_nil end end describe "loading configuration" do it "should load global configuration" do environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.ssh.port = 200 end VF end env = environment.create_vagrant_env env.config_global.ssh.port.should == 200 end it "should load from a custom Vagrantfile" do environment = isolated_environment do |env| env.file("non_standard_name", <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 200 end VF end env = environment.create_vagrant_env(:vagrantfile_name => "non_standard_name") env.config_global.ssh.port.should == 200 end it "should load from a custom Vagrantfile specified by env var" do environment = isolated_environment do |env| env.file("some_other_name", <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 400 end VF end env = with_temp_env("VAGRANT_VAGRANTFILE" => "some_other_name") do environment.create_vagrant_env end env.config_global.ssh.port.should == 400 end end describe "ui" do it "should be a silent UI by default" do described_class.new.ui.should be_kind_of(Vagrant::UI::Silent) end it "should be a UI given in the constructor" do # Create a custom UI for our test class CustomUI < Vagrant::UI::Interface; end instance = described_class.new(:ui_class => CustomUI) instance.ui.should be_kind_of(CustomUI) end end describe "#unload" do it "should run the unload hook" do instance.should_receive(:hook).with(:environment_unload).once instance.unload end end describe "getting a machine" do # A helper to register a provider for use in tests. def register_provider(name, config_class=nil) provider_cls = Class.new(Vagrant.plugin("2", :provider)) register_plugin("2") do |p| p.provider(name) { provider_cls } if config_class p.config(name, :provider) { config_class } end end provider_cls end it "should return a machine object with the correct provider" do # Create a provider foo_provider = register_provider("foo") # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define "foo" end VF e.box2("base", :foo) end # Verify that we can get the machine env = isolated_env.create_vagrant_env machine = env.machine(:foo, :foo) machine.should be_kind_of(Vagrant::Machine) machine.name.should == :foo machine.provider.should be_kind_of(foo_provider) machine.provider_config.should be_nil end it "should return a machine object with the machine configuration" do # Create a provider foo_config = Class.new(Vagrant.plugin("2", :config)) do attr_accessor :value end foo_provider = register_provider("foo", foo_config) # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define "foo" config.vm.provider :foo do |fooconfig| fooconfig.value = 100 end end VF e.box2("base", :foo) end # Verify that we can get the machine env = isolated_env.create_vagrant_env machine = env.machine(:foo, :foo) machine.should be_kind_of(Vagrant::Machine) machine.name.should == :foo machine.provider.should be_kind_of(foo_provider) machine.provider_config.value.should == 100 end it "should cache the machine objects by name and provider" do # Create a provider foo_provider = register_provider("foo") bar_provider = register_provider("bar") # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define "vm1" config.vm.define "vm2" end VF e.box2("base", :foo) e.box2("base", :bar) end env = isolated_env.create_vagrant_env vm1_foo = env.machine(:vm1, :foo) vm1_bar = env.machine(:vm1, :bar) vm2_foo = env.machine(:vm2, :foo) vm1_foo.should eql(env.machine(:vm1, :foo)) vm1_bar.should eql(env.machine(:vm1, :bar)) vm1_foo.should_not eql(vm1_bar) vm2_foo.should eql(env.machine(:vm2, :foo)) end it "should load a machine without a box" do register_provider("foo") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "i-dont-exist" end VF end env = environment.create_vagrant_env machine = env.machine(:default, :foo) machine.box.should be_nil end it "should load the machine configuration" do register_provider("foo") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.ssh.port = 1 config.vm.box = "base" config.vm.define "vm1" do |inner| inner.ssh.port = 100 end end VF env.box2("base", :foo) end env = environment.create_vagrant_env machine = env.machine(:vm1, :foo) machine.config.ssh.port.should == 100 machine.config.vm.box.should == "base" end it "should load the box configuration for a V2 box" do register_provider("foo") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" end VF env.box2("base", :foo, :vagrantfile => <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 100 end VF end env = environment.create_vagrant_env machine = env.machine(:default, :foo) machine.config.ssh.port.should == 100 end it "should reload the cache if refresh is set" do # Create a provider foo_provider = register_provider("foo") # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" end VF e.box2("base", :foo) end env = isolated_env.create_vagrant_env vm1 = env.machine(:default, :foo) vm2 = env.machine(:default, :foo, true) vm3 = env.machine(:default, :foo) vm1.should_not eql(vm2) vm2.should eql(vm3) end it "should raise an error if the VM is not found" do expect { instance.machine("i-definitely-dont-exist", :virtualbox) }. to raise_error(Vagrant::Errors::MachineNotFound) end it "should raise an error if the provider is not found" do expect { instance.machine(:default, :lol_no) }. to raise_error(Vagrant::Errors::ProviderNotFound) end end describe "getting machine names" do it "should return the default machine if no multi-VM is used" do # Create the config isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| end VF end env = isolated_env.create_vagrant_env env.machine_names.should == [:default] end it "should return the machine names in order" do # Create the config isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.define "foo" config.vm.define "bar" end VF end env = isolated_env.create_vagrant_env env.machine_names.should == [:foo, :bar] end end end
require File.expand_path("../../base", __FILE__) require "json" require "pathname" require "tempfile" require "tmpdir" require "vagrant/util/file_mode" require "vagrant/util/platform" describe Vagrant::Environment do include_context "unit" include_context "capability_helpers" let(:env) do isolated_environment.tap do |e| e.box3("base", "1.0", :virtualbox) e.vagrantfile <<-VF Vagrant.configure("2") do |config| config.vm.box = "base" end VF end end let(:instance) { env.create_vagrant_env } subject { instance } describe "#home_path" do it "is set to the home path given" do Dir.mktmpdir do |dir| instance = described_class.new(home_path: dir) expect(instance.home_path).to eq(Pathname.new(dir)) end end it "is set to the environmental variable VAGRANT_HOME" do Dir.mktmpdir do |dir| instance = with_temp_env("VAGRANT_HOME" => dir) do described_class.new end expect(instance.home_path).to eq(Pathname.new(dir)) end end it "throws an exception if inaccessible", skip_windows: true do expect { described_class.new(home_path: "/") }.to raise_error(Vagrant::Errors::HomeDirectoryNotAccessible) end context "with setup version file" do it "creates a setup version flie" do path = subject.home_path.join("setup_version") expect(path).to be_file expect(path.read).to eq(Vagrant::Environment::CURRENT_SETUP_VERSION) end it "is okay if it has the current version" do Dir.mktmpdir do |dir| Pathname.new(dir).join("setup_version").open("w") do |f| f.write(Vagrant::Environment::CURRENT_SETUP_VERSION) end instance = described_class.new(home_path: dir) path = instance.home_path.join("setup_version") expect(path).to be_file expect(path.read).to eq(Vagrant::Environment::CURRENT_SETUP_VERSION) end end it "raises an exception if the version is newer than ours" do Dir.mktmpdir do |dir| Pathname.new(dir).join("setup_version").open("w") do |f| f.write("100.5") end expect { described_class.new(home_path: dir) }. to raise_error(Vagrant::Errors::HomeDirectoryLaterVersion) end end it "raises an exception if there is an unknown home directory version" do Dir.mktmpdir do |dir| Pathname.new(dir).join("setup_version").open("w") do |f| f.write("0.7") end expect { described_class.new(home_path: dir) }. to raise_error(Vagrant::Errors::HomeDirectoryUnknownVersion) end end end context "upgrading a v1.1 directory structure" do let(:env) { isolated_environment } before do env.homedir.join("setup_version").open("w") do |f| f.write("1.1") end allow_any_instance_of(Vagrant::UI::Silent). to receive(:ask) end it "replaces the setup version with the new version" do expect(subject.home_path.join("setup_version").read). to eq(Vagrant::Environment::CURRENT_SETUP_VERSION) end it "moves the boxes into the new directory structure" do # Kind of hacky but avoids two instantiations of BoxCollection Vagrant::Environment.any_instance.stub(boxes: double("boxes")) collection = double("collection") expect(Vagrant::BoxCollection).to receive(:new).with( env.homedir.join("boxes"), anything).and_return(collection) expect(collection).to receive(:upgrade_v1_1_v1_5).once subject end end end describe "#host" do let(:plugin_hosts) { {} } let(:plugin_host_caps) { {} } before do m = Vagrant.plugin("2").manager m.stub(hosts: plugin_hosts) m.stub(host_capabilities: plugin_host_caps) end it "should default to some host even if there are none" do env.vagrantfile <<-VF Vagrant.configure("2") do |config| config.vagrant.host = nil end VF expect(subject.host).to be end it "should attempt to detect a host if no host is set" do env.vagrantfile <<-VF Vagrant.configure("2") do |config| config.vagrant.host = nil end VF plugin_hosts[:foo] = [detect_class(true), nil] plugin_host_caps[:foo] = { bar: Class } result = subject.host expect(result.capability?(:bar)).to be_true end it "should attempt to detect a host if host is :detect" do env.vagrantfile <<-VF Vagrant.configure("2") do |config| config.vagrant.host = :detect end VF plugin_hosts[:foo] = [detect_class(true), nil] plugin_host_caps[:foo] = { bar: Class } result = subject.host expect(result.capability?(:bar)).to be_true end it "should use an exact host if specified" do env.vagrantfile <<-VF Vagrant.configure("2") do |config| config.vagrant.host = "foo" end VF plugin_hosts[:foo] = [detect_class(false), nil] plugin_hosts[:bar] = [detect_class(true), nil] plugin_host_caps[:foo] = { bar: Class } result = subject.host expect(result.capability?(:bar)).to be_true end it "should raise an error if an exact match was specified but not found" do env.vagrantfile <<-VF Vagrant.configure("2") do |config| config.vagrant.host = "bar" end VF expect { subject.host }. to raise_error(Vagrant::Errors::HostExplicitNotDetected) end end describe "#lock" do def lock_count subject.data_dir. children. find_all { |c| c.to_s.end_with?("lock") }. length end it "does nothing if no block is given" do subject.lock end it "locks the environment" do another = env.create_vagrant_env raised = false subject.lock do begin another.lock {} rescue Vagrant::Errors::EnvironmentLockedError raised = true end end expect(raised).to be_true end it "allows nested locks on the same environment" do success = false subject.lock do subject.lock do success = true end end expect(success).to be_true end it "cleans up all lock files" do inner_count = nil expect(lock_count).to eq(0) subject.lock do inner_count = lock_count end expect(inner_count).to_not be_nil expect(inner_count).to eq(2) expect(lock_count).to eq(1) end end describe "#machine" do # A helper to register a provider for use in tests. def register_provider(name, config_class=nil, options=nil) provider_cls = Class.new(Vagrant.plugin("2", :provider)) register_plugin("2") do |p| p.provider(name, options) { provider_cls } if config_class p.config(name, :provider) { config_class } end end provider_cls end it "should return a machine object with the correct provider" do # Create a provider foo_provider = register_provider("foo") # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define "foo" end VF e.box3("base", "1.0", :foo) end # Verify that we can get the machine env = isolated_env.create_vagrant_env machine = env.machine(:foo, :foo) expect(machine).to be_kind_of(Vagrant::Machine) expect(machine.name).to eq(:foo) expect(machine.provider).to be_kind_of(foo_provider) expect(machine.provider_config).to be_nil end it "should return a machine object with the machine configuration" do # Create a provider foo_config = Class.new(Vagrant.plugin("2", :config)) do attr_accessor :value end foo_provider = register_provider("foo", foo_config) # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define "foo" config.vm.provider :foo do |fooconfig| fooconfig.value = 100 end end VF e.box3("base", "1.0", :foo) end # Verify that we can get the machine env = isolated_env.create_vagrant_env machine = env.machine(:foo, :foo) expect(machine).to be_kind_of(Vagrant::Machine) expect(machine.name).to eq(:foo) expect(machine.provider).to be_kind_of(foo_provider) expect(machine.provider_config.value).to eq(100) end it "should cache the machine objects by name and provider" do # Create a provider foo_provider = register_provider("foo") bar_provider = register_provider("bar") # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define "vm1" config.vm.define "vm2" end VF e.box3("base", "1.0", :foo) e.box3("base", "1.0", :bar) end env = isolated_env.create_vagrant_env vm1_foo = env.machine(:vm1, :foo) vm1_bar = env.machine(:vm1, :bar) vm2_foo = env.machine(:vm2, :foo) expect(vm1_foo).to eql(env.machine(:vm1, :foo)) expect(vm1_bar).to eql(env.machine(:vm1, :bar)) expect(vm1_foo).not_to eql(vm1_bar) expect(vm2_foo).to eql(env.machine(:vm2, :foo)) end it "should load a machine without a box" do register_provider("foo") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "i-dont-exist" end VF end env = environment.create_vagrant_env machine = env.machine(:default, :foo) expect(machine.box).to be_nil end it "should load the machine configuration" do register_provider("foo") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.ssh.port = 1 config.vm.box = "base" config.vm.define "vm1" do |inner| inner.ssh.port = 100 end end VF env.box3("base", "1.0", :foo) end env = environment.create_vagrant_env machine = env.machine(:vm1, :foo) expect(machine.config.ssh.port).to eq(100) expect(machine.config.vm.box).to eq("base") end it "should load the box configuration for a box" do register_provider("foo") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" end VF env.box3("base", "1.0", :foo, vagrantfile: <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 100 end VF end env = environment.create_vagrant_env machine = env.machine(:default, :foo) expect(machine.config.ssh.port).to eq(100) end it "should load the box configuration for a box and custom Vagrantfile name" do register_provider("foo") environment = isolated_environment do |env| env.file("some_other_name", <<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" end VF env.box3("base", "1.0", :foo, vagrantfile: <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 100 end VF end env = with_temp_env("VAGRANT_VAGRANTFILE" => "some_other_name") do environment.create_vagrant_env end machine = env.machine(:default, :foo) expect(machine.config.ssh.port).to eq(100) end it "should load the box configuration for other formats for a box" do register_provider("foo", nil, box_format: "bar") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" end VF env.box3("base", "1.0", :bar, vagrantfile: <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 100 end VF end env = environment.create_vagrant_env machine = env.machine(:default, :foo) expect(machine.config.ssh.port).to eq(100) end it "prefer sooner formats when multiple box formats are available" do register_provider("foo", nil, box_format: ["fA", "fB"]) environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" end VF env.box3("base", "1.0", :fA, vagrantfile: <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 100 end VF env.box3("base", "1.0", :fB, vagrantfile: <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 200 end VF end env = environment.create_vagrant_env machine = env.machine(:default, :foo) expect(machine.config.ssh.port).to eq(100) end it "should load the proper version of a box" do register_provider("foo") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.box_version = "~> 1.2" end VF env.box3("base", "1.0", :foo, vagrantfile: <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 100 end VF env.box3("base", "1.5", :foo, vagrantfile: <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 200 end VF end env = environment.create_vagrant_env machine = env.machine(:default, :foo) expect(machine.config.ssh.port).to eq(200) end it "should load the provider override if set" do register_provider("bar") register_provider("foo") isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "foo" config.vm.provider :foo do |_, c| c.vm.box = "bar" end end VF end env = isolated_env.create_vagrant_env foo_vm = env.machine(:default, :foo) bar_vm = env.machine(:default, :bar) expect(foo_vm.config.vm.box).to eq("bar") expect(bar_vm.config.vm.box).to eq("foo") end it "should reload the cache if refresh is set" do # Create a provider foo_provider = register_provider("foo") # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" end VF e.box3("base", "1.0", :foo) end env = isolated_env.create_vagrant_env vm1 = env.machine(:default, :foo) vm2 = env.machine(:default, :foo, true) vm3 = env.machine(:default, :foo) expect(vm1).not_to eql(vm2) expect(vm2).to eql(vm3) end it "should raise an error if the VM is not found" do expect { instance.machine("i-definitely-dont-exist", :virtualbox) }. to raise_error(Vagrant::Errors::MachineNotFound) end it "should raise an error if the provider is not found" do expect { instance.machine(:default, :lol_no) }. to raise_error(Vagrant::Errors::ProviderNotFound) end end describe "#machine_index" do it "returns a machine index" do expect(subject.machine_index).to be_kind_of(Vagrant::MachineIndex) end it "caches the result" do result = subject.machine_index expect(subject.machine_index).to equal(result) end it "uses a directory within the home directory by default" do klass = double("machine_index") stub_const("Vagrant::MachineIndex", klass) klass.should_receive(:new).with do |path| expect(path.to_s.start_with?(subject.home_path.to_s)).to be_true true end subject.machine_index end end describe "active machines" do it "should be empty if there is no root path" do Dir.mktmpdir do |temp_dir| instance = described_class.new(cwd: temp_dir) expect(instance.active_machines).to be_empty end end it "should be empty if the machines folder doesn't exist" do folder = instance.local_data_path.join("machines") expect(folder).not_to be_exist expect(instance.active_machines).to be_empty end it "should return the name and provider of active machines" do machines = instance.local_data_path.join("machines") # Valid machine, with "foo" and virtualbox machine_foo = machines.join("foo/virtualbox") machine_foo.mkpath machine_foo.join("id").open("w+") { |f| f.write("") } # Invalid machine (no ID) machine_bar = machines.join("bar/virtualbox") machine_bar.mkpath expect(instance.active_machines).to eq([[:foo, :virtualbox]]) end end describe "batching" do let(:batch) do double("batch") do |b| allow(b).to receive(:run) end end context "without the disabling env var" do it "should run without disabling parallelization" do with_temp_env("VAGRANT_NO_PARALLEL" => nil) do expect(Vagrant::BatchAction).to receive(:new).with(true).and_return(batch) expect(batch).to receive(:run) instance.batch {} end end it "should run with disabling parallelization if explicit" do with_temp_env("VAGRANT_NO_PARALLEL" => nil) do expect(Vagrant::BatchAction).to receive(:new).with(false).and_return(batch) expect(batch).to receive(:run) instance.batch(false) {} end end end context "with the disabling env var" do it "should run with disabling parallelization" do with_temp_env("VAGRANT_NO_PARALLEL" => "yes") do expect(Vagrant::BatchAction).to receive(:new).with(false).and_return(batch) expect(batch).to receive(:run) instance.batch {} end end end end describe "current working directory" do it "is the cwd by default" do Dir.mktmpdir do |temp_dir| Dir.chdir(temp_dir) do with_temp_env("VAGRANT_CWD" => nil) do expect(described_class.new.cwd).to eq(Pathname.new(Dir.pwd)) end end end end it "is set to the cwd given" do Dir.mktmpdir do |directory| instance = described_class.new(cwd: directory) expect(instance.cwd).to eq(Pathname.new(directory)) end end it "is set to the environmental variable VAGRANT_CWD" do Dir.mktmpdir do |directory| instance = with_temp_env("VAGRANT_CWD" => directory) do described_class.new end expect(instance.cwd).to eq(Pathname.new(directory)) end end it "raises an exception if the CWD doesn't exist" do expect { described_class.new(cwd: "doesntexist") }. to raise_error(Vagrant::Errors::EnvironmentNonExistentCWD) end end describe "default provider" do let(:plugin_providers) { {} } before do m = Vagrant.plugin("2").manager m.stub(providers: plugin_providers) end it "is the highest matching usable provider" do plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 2 }] plugin_providers[:boom] = [provider_usable_class(true), { priority: 3 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => nil) do expect(subject.default_provider).to eq(:bar) end end it "is the highest matching usable provider that is defaultable" do plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [ provider_usable_class(true), { defaultable: false, priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 2 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => nil) do expect(subject.default_provider).to eq(:foo) end end it "is the highest matching usable provider that isn't excluded" do plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 2 }] plugin_providers[:boom] = [provider_usable_class(true), { priority: 3 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => nil) do expect(subject.default_provider(exclude: [:bar, :foo])).to eq(:boom) end end it "is the default provider set if usable" do plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 2 }] plugin_providers[:boom] = [provider_usable_class(true), { priority: 3 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => "baz") do expect(subject.default_provider).to eq(:baz) end end it "is the default provider set even if unusable" do plugin_providers[:baz] = [provider_usable_class(false), { priority: 5 }] plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => "baz") do expect(subject.default_provider).to eq(:baz) end end it "is the usable despite default if not forced" do plugin_providers[:baz] = [provider_usable_class(false), { priority: 5 }] plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => "baz") do expect(subject.default_provider(force_default: false)).to eq(:bar) end end it "prefers the default even if not forced" do plugin_providers[:baz] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => "baz") do expect(subject.default_provider(force_default: false)).to eq(:baz) end end it "uses the first usable provider that isn't the default if excluded" do plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 8 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => "baz") do expect(subject.default_provider( exclude: [:baz], force_default: false)).to eq(:bar) end end it "is VirtualBox if nothing else is usable" do plugin_providers[:foo] = [provider_usable_class(false), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(false), { priority: 5 }] plugin_providers[:baz] = [provider_usable_class(false), { priority: 5 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => nil) do expect(subject.default_provider).to eq(:virtualbox) end end it "is the provider in the Vagrantfile that is usable" do subject.vagrantfile.config.vm.provider "foo" subject.vagrantfile.config.vm.provider "bar" subject.vagrantfile.config.vm.finalize! plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 2 }] plugin_providers[:boom] = [provider_usable_class(true), { priority: 3 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => nil) do expect(subject.default_provider).to eq(:foo) end end it "is the highest usable provider outside the Vagrantfile" do subject.vagrantfile.config.vm.provider "foo" subject.vagrantfile.config.vm.finalize! plugin_providers[:foo] = [provider_usable_class(false), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 2 }] plugin_providers[:boom] = [provider_usable_class(true), { priority: 3 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => nil) do expect(subject.default_provider).to eq(:bar) end end it "is the provider in the Vagrantfile that is usable for a machine" do subject.vagrantfile.config.vm.provider "foo" subject.vagrantfile.config.vm.define "sub" do |v| v.vm.provider "bar" end subject.vagrantfile.config.vm.finalize! plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 2 }] plugin_providers[:boom] = [provider_usable_class(true), { priority: 3 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => nil) do expect(subject.default_provider(machine: :sub)).to eq(:bar) end end end describe "local data path" do it "is set to the proper default" do default = instance.root_path.join(described_class::DEFAULT_LOCAL_DATA) expect(instance.local_data_path).to eq(default) end it "is expanded relative to the cwd" do instance = described_class.new(local_data_path: "foo") expect(instance.local_data_path).to eq(instance.cwd.join("foo")) end it "is set to the given value" do Dir.mktmpdir do |dir| instance = described_class.new(local_data_path: dir) expect(instance.local_data_path.to_s).to eq(dir) end end describe "upgrading V1 dotfiles" do let(:v1_dotfile_tempfile) do Tempfile.new("vagrant").tap do |f| f.close end end let(:v1_dotfile) { Pathname.new(v1_dotfile_tempfile.path) } let(:local_data_path) { v1_dotfile_tempfile.path } let(:instance) { described_class.new(local_data_path: local_data_path) } it "should be fine if dotfile is empty" do v1_dotfile.open("w+") do |f| f.write("") end expect { instance }.to_not raise_error expect(Pathname.new(local_data_path)).to be_directory end it "should upgrade all active VMs" do active_vms = { "foo" => "foo_id", "bar" => "bar_id" } v1_dotfile.open("w+") do |f| f.write(JSON.dump({ "active" => active_vms })) end expect { instance }.to_not raise_error local_data_pathname = Pathname.new(local_data_path) foo_id_file = local_data_pathname.join("machines/foo/virtualbox/id") expect(foo_id_file).to be_file expect(foo_id_file.read).to eq("foo_id") bar_id_file = local_data_pathname.join("machines/bar/virtualbox/id") expect(bar_id_file).to be_file expect(bar_id_file.read).to eq("bar_id") end it "should raise an error if invalid JSON" do v1_dotfile.open("w+") do |f| f.write("bad") end expect { instance }. to raise_error(Vagrant::Errors::DotfileUpgradeJSONError) end end end describe "copying the private SSH key" do it "copies the SSH key into the home directory" do env = isolated_environment instance = described_class.new(home_path: env.homedir) pk = env.homedir.join("insecure_private_key") expect(pk).to be_exist if !Vagrant::Util::Platform.windows? expect(Vagrant::Util::FileMode.from_octal(pk.stat.mode)).to eq("600") end end end it "has a box collection pointed to the proper directory" do collection = instance.boxes expect(collection).to be_kind_of(Vagrant::BoxCollection) expect(collection.directory).to eq(instance.boxes_path) # Reach into some internal state here but not sure how else # to test this at the moment. expect(collection.instance_variable_get(:@hook)). to eq(instance.method(:hook)) end describe "action runner" do it "has an action runner" do expect(instance.action_runner).to be_kind_of(Vagrant::Action::Runner) end it "has a `ui` in the globals" do result = nil callable = lambda { |env| result = env[:ui] } instance.action_runner.run(callable) expect(result).to eql(instance.ui) end end describe "#hook" do it "should call the action runner with the proper hook" do hook_name = :foo expect(instance.action_runner).to receive(:run).with { |callable, env| expect(env[:action_name]).to eq(hook_name) } instance.hook(hook_name) end it "should return the result of the action runner run" do expect(instance.action_runner).to receive(:run).and_return(:foo) expect(instance.hook(:bar)).to eq(:foo) end it "should allow passing in a custom action runner" do expect(instance.action_runner).not_to receive(:run) other_runner = double("runner") expect(other_runner).to receive(:run).and_return(:foo) expect(instance.hook(:bar, runner: other_runner)).to eq(:foo) end it "should allow passing in custom data" do expect(instance.action_runner).to receive(:run).with { |callable, env| expect(env[:foo]).to eq(:bar) } instance.hook(:foo, foo: :bar) end it "should allow passing a custom callable" do expect(instance.action_runner).to receive(:run).with { |callable, env| expect(callable).to eq(:what) } instance.hook(:foo, callable: :what) end end describe "primary machine name" do it "should be the only machine if not a multi-machine environment" do expect(instance.primary_machine_name).to eq(instance.machine_names.first) end it "should be the machine marked as the primary" do environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define :foo config.vm.define :bar, primary: true end VF env.box3("base", "1.0", :virtualbox) end env = environment.create_vagrant_env expect(env.primary_machine_name).to eq(:bar) end it "should be nil if no primary is specified in a multi-machine environment" do environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define :foo config.vm.define :bar end VF env.box3("base", "1.0", :virtualbox) end env = environment.create_vagrant_env expect(env.primary_machine_name).to be_nil end end describe "loading configuration" do it "should load global configuration" do environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.ssh.port = 200 end VF end env = environment.create_vagrant_env expect(env.vagrantfile.config.ssh.port).to eq(200) end it "should load from a custom Vagrantfile" do environment = isolated_environment do |env| env.file("non_standard_name", <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 200 end VF end env = environment.create_vagrant_env(vagrantfile_name: "non_standard_name") expect(env.vagrantfile.config.ssh.port).to eq(200) end it "should load from a custom Vagrantfile specified by env var" do environment = isolated_environment do |env| env.file("some_other_name", <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 400 end VF end env = with_temp_env("VAGRANT_VAGRANTFILE" => "some_other_name") do environment.create_vagrant_env end expect(env.vagrantfile.config.ssh.port).to eq(400) end end describe "ui" do it "should be a silent UI by default" do expect(described_class.new.ui).to be_kind_of(Vagrant::UI::Silent) end it "should be a UI given in the constructor" do # Create a custom UI for our test class CustomUI < Vagrant::UI::Interface; end instance = described_class.new(ui_class: CustomUI) expect(instance.ui).to be_kind_of(CustomUI) end end describe "#unload" do it "should run the unload hook" do expect(instance).to receive(:hook).with(:environment_unload).once instance.unload end end describe "getting machine names" do it "should return the default machine if no multi-VM is used" do # Create the config isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| end VF end env = isolated_env.create_vagrant_env expect(env.machine_names).to eq([:default]) end it "should return the machine names in order" do # Create the config isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.define "foo" config.vm.define "bar" end VF end env = isolated_env.create_vagrant_env expect(env.machine_names).to eq([:foo, :bar]) end end end test: fix test for default provider require File.expand_path("../../base", __FILE__) require "json" require "pathname" require "tempfile" require "tmpdir" require "vagrant/util/file_mode" require "vagrant/util/platform" describe Vagrant::Environment do include_context "unit" include_context "capability_helpers" let(:env) do isolated_environment.tap do |e| e.box3("base", "1.0", :virtualbox) e.vagrantfile <<-VF Vagrant.configure("2") do |config| config.vm.box = "base" end VF end end let(:instance) { env.create_vagrant_env } subject { instance } describe "#home_path" do it "is set to the home path given" do Dir.mktmpdir do |dir| instance = described_class.new(home_path: dir) expect(instance.home_path).to eq(Pathname.new(dir)) end end it "is set to the environmental variable VAGRANT_HOME" do Dir.mktmpdir do |dir| instance = with_temp_env("VAGRANT_HOME" => dir) do described_class.new end expect(instance.home_path).to eq(Pathname.new(dir)) end end it "throws an exception if inaccessible", skip_windows: true do expect { described_class.new(home_path: "/") }.to raise_error(Vagrant::Errors::HomeDirectoryNotAccessible) end context "with setup version file" do it "creates a setup version flie" do path = subject.home_path.join("setup_version") expect(path).to be_file expect(path.read).to eq(Vagrant::Environment::CURRENT_SETUP_VERSION) end it "is okay if it has the current version" do Dir.mktmpdir do |dir| Pathname.new(dir).join("setup_version").open("w") do |f| f.write(Vagrant::Environment::CURRENT_SETUP_VERSION) end instance = described_class.new(home_path: dir) path = instance.home_path.join("setup_version") expect(path).to be_file expect(path.read).to eq(Vagrant::Environment::CURRENT_SETUP_VERSION) end end it "raises an exception if the version is newer than ours" do Dir.mktmpdir do |dir| Pathname.new(dir).join("setup_version").open("w") do |f| f.write("100.5") end expect { described_class.new(home_path: dir) }. to raise_error(Vagrant::Errors::HomeDirectoryLaterVersion) end end it "raises an exception if there is an unknown home directory version" do Dir.mktmpdir do |dir| Pathname.new(dir).join("setup_version").open("w") do |f| f.write("0.7") end expect { described_class.new(home_path: dir) }. to raise_error(Vagrant::Errors::HomeDirectoryUnknownVersion) end end end context "upgrading a v1.1 directory structure" do let(:env) { isolated_environment } before do env.homedir.join("setup_version").open("w") do |f| f.write("1.1") end allow_any_instance_of(Vagrant::UI::Silent). to receive(:ask) end it "replaces the setup version with the new version" do expect(subject.home_path.join("setup_version").read). to eq(Vagrant::Environment::CURRENT_SETUP_VERSION) end it "moves the boxes into the new directory structure" do # Kind of hacky but avoids two instantiations of BoxCollection Vagrant::Environment.any_instance.stub(boxes: double("boxes")) collection = double("collection") expect(Vagrant::BoxCollection).to receive(:new).with( env.homedir.join("boxes"), anything).and_return(collection) expect(collection).to receive(:upgrade_v1_1_v1_5).once subject end end end describe "#host" do let(:plugin_hosts) { {} } let(:plugin_host_caps) { {} } before do m = Vagrant.plugin("2").manager m.stub(hosts: plugin_hosts) m.stub(host_capabilities: plugin_host_caps) end it "should default to some host even if there are none" do env.vagrantfile <<-VF Vagrant.configure("2") do |config| config.vagrant.host = nil end VF expect(subject.host).to be end it "should attempt to detect a host if no host is set" do env.vagrantfile <<-VF Vagrant.configure("2") do |config| config.vagrant.host = nil end VF plugin_hosts[:foo] = [detect_class(true), nil] plugin_host_caps[:foo] = { bar: Class } result = subject.host expect(result.capability?(:bar)).to be_true end it "should attempt to detect a host if host is :detect" do env.vagrantfile <<-VF Vagrant.configure("2") do |config| config.vagrant.host = :detect end VF plugin_hosts[:foo] = [detect_class(true), nil] plugin_host_caps[:foo] = { bar: Class } result = subject.host expect(result.capability?(:bar)).to be_true end it "should use an exact host if specified" do env.vagrantfile <<-VF Vagrant.configure("2") do |config| config.vagrant.host = "foo" end VF plugin_hosts[:foo] = [detect_class(false), nil] plugin_hosts[:bar] = [detect_class(true), nil] plugin_host_caps[:foo] = { bar: Class } result = subject.host expect(result.capability?(:bar)).to be_true end it "should raise an error if an exact match was specified but not found" do env.vagrantfile <<-VF Vagrant.configure("2") do |config| config.vagrant.host = "bar" end VF expect { subject.host }. to raise_error(Vagrant::Errors::HostExplicitNotDetected) end end describe "#lock" do def lock_count subject.data_dir. children. find_all { |c| c.to_s.end_with?("lock") }. length end it "does nothing if no block is given" do subject.lock end it "locks the environment" do another = env.create_vagrant_env raised = false subject.lock do begin another.lock {} rescue Vagrant::Errors::EnvironmentLockedError raised = true end end expect(raised).to be_true end it "allows nested locks on the same environment" do success = false subject.lock do subject.lock do success = true end end expect(success).to be_true end it "cleans up all lock files" do inner_count = nil expect(lock_count).to eq(0) subject.lock do inner_count = lock_count end expect(inner_count).to_not be_nil expect(inner_count).to eq(2) expect(lock_count).to eq(1) end end describe "#machine" do # A helper to register a provider for use in tests. def register_provider(name, config_class=nil, options=nil) provider_cls = Class.new(Vagrant.plugin("2", :provider)) register_plugin("2") do |p| p.provider(name, options) { provider_cls } if config_class p.config(name, :provider) { config_class } end end provider_cls end it "should return a machine object with the correct provider" do # Create a provider foo_provider = register_provider("foo") # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define "foo" end VF e.box3("base", "1.0", :foo) end # Verify that we can get the machine env = isolated_env.create_vagrant_env machine = env.machine(:foo, :foo) expect(machine).to be_kind_of(Vagrant::Machine) expect(machine.name).to eq(:foo) expect(machine.provider).to be_kind_of(foo_provider) expect(machine.provider_config).to be_nil end it "should return a machine object with the machine configuration" do # Create a provider foo_config = Class.new(Vagrant.plugin("2", :config)) do attr_accessor :value end foo_provider = register_provider("foo", foo_config) # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define "foo" config.vm.provider :foo do |fooconfig| fooconfig.value = 100 end end VF e.box3("base", "1.0", :foo) end # Verify that we can get the machine env = isolated_env.create_vagrant_env machine = env.machine(:foo, :foo) expect(machine).to be_kind_of(Vagrant::Machine) expect(machine.name).to eq(:foo) expect(machine.provider).to be_kind_of(foo_provider) expect(machine.provider_config.value).to eq(100) end it "should cache the machine objects by name and provider" do # Create a provider foo_provider = register_provider("foo") bar_provider = register_provider("bar") # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define "vm1" config.vm.define "vm2" end VF e.box3("base", "1.0", :foo) e.box3("base", "1.0", :bar) end env = isolated_env.create_vagrant_env vm1_foo = env.machine(:vm1, :foo) vm1_bar = env.machine(:vm1, :bar) vm2_foo = env.machine(:vm2, :foo) expect(vm1_foo).to eql(env.machine(:vm1, :foo)) expect(vm1_bar).to eql(env.machine(:vm1, :bar)) expect(vm1_foo).not_to eql(vm1_bar) expect(vm2_foo).to eql(env.machine(:vm2, :foo)) end it "should load a machine without a box" do register_provider("foo") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "i-dont-exist" end VF end env = environment.create_vagrant_env machine = env.machine(:default, :foo) expect(machine.box).to be_nil end it "should load the machine configuration" do register_provider("foo") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.ssh.port = 1 config.vm.box = "base" config.vm.define "vm1" do |inner| inner.ssh.port = 100 end end VF env.box3("base", "1.0", :foo) end env = environment.create_vagrant_env machine = env.machine(:vm1, :foo) expect(machine.config.ssh.port).to eq(100) expect(machine.config.vm.box).to eq("base") end it "should load the box configuration for a box" do register_provider("foo") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" end VF env.box3("base", "1.0", :foo, vagrantfile: <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 100 end VF end env = environment.create_vagrant_env machine = env.machine(:default, :foo) expect(machine.config.ssh.port).to eq(100) end it "should load the box configuration for a box and custom Vagrantfile name" do register_provider("foo") environment = isolated_environment do |env| env.file("some_other_name", <<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" end VF env.box3("base", "1.0", :foo, vagrantfile: <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 100 end VF end env = with_temp_env("VAGRANT_VAGRANTFILE" => "some_other_name") do environment.create_vagrant_env end machine = env.machine(:default, :foo) expect(machine.config.ssh.port).to eq(100) end it "should load the box configuration for other formats for a box" do register_provider("foo", nil, box_format: "bar") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" end VF env.box3("base", "1.0", :bar, vagrantfile: <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 100 end VF end env = environment.create_vagrant_env machine = env.machine(:default, :foo) expect(machine.config.ssh.port).to eq(100) end it "prefer sooner formats when multiple box formats are available" do register_provider("foo", nil, box_format: ["fA", "fB"]) environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" end VF env.box3("base", "1.0", :fA, vagrantfile: <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 100 end VF env.box3("base", "1.0", :fB, vagrantfile: <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 200 end VF end env = environment.create_vagrant_env machine = env.machine(:default, :foo) expect(machine.config.ssh.port).to eq(100) end it "should load the proper version of a box" do register_provider("foo") environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.box_version = "~> 1.2" end VF env.box3("base", "1.0", :foo, vagrantfile: <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 100 end VF env.box3("base", "1.5", :foo, vagrantfile: <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 200 end VF end env = environment.create_vagrant_env machine = env.machine(:default, :foo) expect(machine.config.ssh.port).to eq(200) end it "should load the provider override if set" do register_provider("bar") register_provider("foo") isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "foo" config.vm.provider :foo do |_, c| c.vm.box = "bar" end end VF end env = isolated_env.create_vagrant_env foo_vm = env.machine(:default, :foo) bar_vm = env.machine(:default, :bar) expect(foo_vm.config.vm.box).to eq("bar") expect(bar_vm.config.vm.box).to eq("foo") end it "should reload the cache if refresh is set" do # Create a provider foo_provider = register_provider("foo") # Create the configuration isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" end VF e.box3("base", "1.0", :foo) end env = isolated_env.create_vagrant_env vm1 = env.machine(:default, :foo) vm2 = env.machine(:default, :foo, true) vm3 = env.machine(:default, :foo) expect(vm1).not_to eql(vm2) expect(vm2).to eql(vm3) end it "should raise an error if the VM is not found" do expect { instance.machine("i-definitely-dont-exist", :virtualbox) }. to raise_error(Vagrant::Errors::MachineNotFound) end it "should raise an error if the provider is not found" do expect { instance.machine(:default, :lol_no) }. to raise_error(Vagrant::Errors::ProviderNotFound) end end describe "#machine_index" do it "returns a machine index" do expect(subject.machine_index).to be_kind_of(Vagrant::MachineIndex) end it "caches the result" do result = subject.machine_index expect(subject.machine_index).to equal(result) end it "uses a directory within the home directory by default" do klass = double("machine_index") stub_const("Vagrant::MachineIndex", klass) klass.should_receive(:new).with do |path| expect(path.to_s.start_with?(subject.home_path.to_s)).to be_true true end subject.machine_index end end describe "active machines" do it "should be empty if there is no root path" do Dir.mktmpdir do |temp_dir| instance = described_class.new(cwd: temp_dir) expect(instance.active_machines).to be_empty end end it "should be empty if the machines folder doesn't exist" do folder = instance.local_data_path.join("machines") expect(folder).not_to be_exist expect(instance.active_machines).to be_empty end it "should return the name and provider of active machines" do machines = instance.local_data_path.join("machines") # Valid machine, with "foo" and virtualbox machine_foo = machines.join("foo/virtualbox") machine_foo.mkpath machine_foo.join("id").open("w+") { |f| f.write("") } # Invalid machine (no ID) machine_bar = machines.join("bar/virtualbox") machine_bar.mkpath expect(instance.active_machines).to eq([[:foo, :virtualbox]]) end end describe "batching" do let(:batch) do double("batch") do |b| allow(b).to receive(:run) end end context "without the disabling env var" do it "should run without disabling parallelization" do with_temp_env("VAGRANT_NO_PARALLEL" => nil) do expect(Vagrant::BatchAction).to receive(:new).with(true).and_return(batch) expect(batch).to receive(:run) instance.batch {} end end it "should run with disabling parallelization if explicit" do with_temp_env("VAGRANT_NO_PARALLEL" => nil) do expect(Vagrant::BatchAction).to receive(:new).with(false).and_return(batch) expect(batch).to receive(:run) instance.batch(false) {} end end end context "with the disabling env var" do it "should run with disabling parallelization" do with_temp_env("VAGRANT_NO_PARALLEL" => "yes") do expect(Vagrant::BatchAction).to receive(:new).with(false).and_return(batch) expect(batch).to receive(:run) instance.batch {} end end end end describe "current working directory" do it "is the cwd by default" do Dir.mktmpdir do |temp_dir| Dir.chdir(temp_dir) do with_temp_env("VAGRANT_CWD" => nil) do expect(described_class.new.cwd).to eq(Pathname.new(Dir.pwd)) end end end end it "is set to the cwd given" do Dir.mktmpdir do |directory| instance = described_class.new(cwd: directory) expect(instance.cwd).to eq(Pathname.new(directory)) end end it "is set to the environmental variable VAGRANT_CWD" do Dir.mktmpdir do |directory| instance = with_temp_env("VAGRANT_CWD" => directory) do described_class.new end expect(instance.cwd).to eq(Pathname.new(directory)) end end it "raises an exception if the CWD doesn't exist" do expect { described_class.new(cwd: "doesntexist") }. to raise_error(Vagrant::Errors::EnvironmentNonExistentCWD) end end describe "default provider" do let(:plugin_providers) { {} } before do m = Vagrant.plugin("2").manager m.stub(providers: plugin_providers) end it "is the highest matching usable provider" do plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 2 }] plugin_providers[:boom] = [provider_usable_class(true), { priority: 3 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => nil) do expect(subject.default_provider).to eq(:bar) end end it "is the highest matching usable provider that is defaultable" do plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [ provider_usable_class(true), { defaultable: false, priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 2 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => nil) do expect(subject.default_provider).to eq(:foo) end end it "is the highest matching usable provider that isn't excluded" do plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 2 }] plugin_providers[:boom] = [provider_usable_class(true), { priority: 3 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => nil) do expect(subject.default_provider(exclude: [:bar, :foo])).to eq(:boom) end end it "is the default provider set if usable" do plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 2 }] plugin_providers[:boom] = [provider_usable_class(true), { priority: 3 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => "baz") do expect(subject.default_provider).to eq(:baz) end end it "is the default provider set even if unusable" do plugin_providers[:baz] = [provider_usable_class(false), { priority: 5 }] plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => "baz") do expect(subject.default_provider).to eq(:baz) end end it "is the usable despite default if not forced" do plugin_providers[:baz] = [provider_usable_class(false), { priority: 5 }] plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => "baz") do expect(subject.default_provider(force_default: false)).to eq(:bar) end end it "prefers the default even if not forced" do plugin_providers[:baz] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => "baz") do expect(subject.default_provider(force_default: false)).to eq(:baz) end end it "uses the first usable provider that isn't the default if excluded" do plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 8 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => "baz") do expect(subject.default_provider( exclude: [:baz], force_default: false)).to eq(:bar) end end it "raise an error if nothing else is usable" do plugin_providers[:foo] = [provider_usable_class(false), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(false), { priority: 5 }] plugin_providers[:baz] = [provider_usable_class(false), { priority: 5 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => nil) do expect { subject.default_provider }.to raise_error( Vagrant::Errors::NoDefaultProvider) end end it "is the provider in the Vagrantfile that is usable" do subject.vagrantfile.config.vm.provider "foo" subject.vagrantfile.config.vm.provider "bar" subject.vagrantfile.config.vm.finalize! plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 2 }] plugin_providers[:boom] = [provider_usable_class(true), { priority: 3 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => nil) do expect(subject.default_provider).to eq(:foo) end end it "is the highest usable provider outside the Vagrantfile" do subject.vagrantfile.config.vm.provider "foo" subject.vagrantfile.config.vm.finalize! plugin_providers[:foo] = [provider_usable_class(false), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 2 }] plugin_providers[:boom] = [provider_usable_class(true), { priority: 3 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => nil) do expect(subject.default_provider).to eq(:bar) end end it "is the provider in the Vagrantfile that is usable for a machine" do subject.vagrantfile.config.vm.provider "foo" subject.vagrantfile.config.vm.define "sub" do |v| v.vm.provider "bar" end subject.vagrantfile.config.vm.finalize! plugin_providers[:foo] = [provider_usable_class(true), { priority: 5 }] plugin_providers[:bar] = [provider_usable_class(true), { priority: 7 }] plugin_providers[:baz] = [provider_usable_class(true), { priority: 2 }] plugin_providers[:boom] = [provider_usable_class(true), { priority: 3 }] with_temp_env("VAGRANT_DEFAULT_PROVIDER" => nil) do expect(subject.default_provider(machine: :sub)).to eq(:bar) end end end describe "local data path" do it "is set to the proper default" do default = instance.root_path.join(described_class::DEFAULT_LOCAL_DATA) expect(instance.local_data_path).to eq(default) end it "is expanded relative to the cwd" do instance = described_class.new(local_data_path: "foo") expect(instance.local_data_path).to eq(instance.cwd.join("foo")) end it "is set to the given value" do Dir.mktmpdir do |dir| instance = described_class.new(local_data_path: dir) expect(instance.local_data_path.to_s).to eq(dir) end end describe "upgrading V1 dotfiles" do let(:v1_dotfile_tempfile) do Tempfile.new("vagrant").tap do |f| f.close end end let(:v1_dotfile) { Pathname.new(v1_dotfile_tempfile.path) } let(:local_data_path) { v1_dotfile_tempfile.path } let(:instance) { described_class.new(local_data_path: local_data_path) } it "should be fine if dotfile is empty" do v1_dotfile.open("w+") do |f| f.write("") end expect { instance }.to_not raise_error expect(Pathname.new(local_data_path)).to be_directory end it "should upgrade all active VMs" do active_vms = { "foo" => "foo_id", "bar" => "bar_id" } v1_dotfile.open("w+") do |f| f.write(JSON.dump({ "active" => active_vms })) end expect { instance }.to_not raise_error local_data_pathname = Pathname.new(local_data_path) foo_id_file = local_data_pathname.join("machines/foo/virtualbox/id") expect(foo_id_file).to be_file expect(foo_id_file.read).to eq("foo_id") bar_id_file = local_data_pathname.join("machines/bar/virtualbox/id") expect(bar_id_file).to be_file expect(bar_id_file.read).to eq("bar_id") end it "should raise an error if invalid JSON" do v1_dotfile.open("w+") do |f| f.write("bad") end expect { instance }. to raise_error(Vagrant::Errors::DotfileUpgradeJSONError) end end end describe "copying the private SSH key" do it "copies the SSH key into the home directory" do env = isolated_environment instance = described_class.new(home_path: env.homedir) pk = env.homedir.join("insecure_private_key") expect(pk).to be_exist if !Vagrant::Util::Platform.windows? expect(Vagrant::Util::FileMode.from_octal(pk.stat.mode)).to eq("600") end end end it "has a box collection pointed to the proper directory" do collection = instance.boxes expect(collection).to be_kind_of(Vagrant::BoxCollection) expect(collection.directory).to eq(instance.boxes_path) # Reach into some internal state here but not sure how else # to test this at the moment. expect(collection.instance_variable_get(:@hook)). to eq(instance.method(:hook)) end describe "action runner" do it "has an action runner" do expect(instance.action_runner).to be_kind_of(Vagrant::Action::Runner) end it "has a `ui` in the globals" do result = nil callable = lambda { |env| result = env[:ui] } instance.action_runner.run(callable) expect(result).to eql(instance.ui) end end describe "#hook" do it "should call the action runner with the proper hook" do hook_name = :foo expect(instance.action_runner).to receive(:run).with { |callable, env| expect(env[:action_name]).to eq(hook_name) } instance.hook(hook_name) end it "should return the result of the action runner run" do expect(instance.action_runner).to receive(:run).and_return(:foo) expect(instance.hook(:bar)).to eq(:foo) end it "should allow passing in a custom action runner" do expect(instance.action_runner).not_to receive(:run) other_runner = double("runner") expect(other_runner).to receive(:run).and_return(:foo) expect(instance.hook(:bar, runner: other_runner)).to eq(:foo) end it "should allow passing in custom data" do expect(instance.action_runner).to receive(:run).with { |callable, env| expect(env[:foo]).to eq(:bar) } instance.hook(:foo, foo: :bar) end it "should allow passing a custom callable" do expect(instance.action_runner).to receive(:run).with { |callable, env| expect(callable).to eq(:what) } instance.hook(:foo, callable: :what) end end describe "primary machine name" do it "should be the only machine if not a multi-machine environment" do expect(instance.primary_machine_name).to eq(instance.machine_names.first) end it "should be the machine marked as the primary" do environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define :foo config.vm.define :bar, primary: true end VF env.box3("base", "1.0", :virtualbox) end env = environment.create_vagrant_env expect(env.primary_machine_name).to eq(:bar) end it "should be nil if no primary is specified in a multi-machine environment" do environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.box = "base" config.vm.define :foo config.vm.define :bar end VF env.box3("base", "1.0", :virtualbox) end env = environment.create_vagrant_env expect(env.primary_machine_name).to be_nil end end describe "loading configuration" do it "should load global configuration" do environment = isolated_environment do |env| env.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.ssh.port = 200 end VF end env = environment.create_vagrant_env expect(env.vagrantfile.config.ssh.port).to eq(200) end it "should load from a custom Vagrantfile" do environment = isolated_environment do |env| env.file("non_standard_name", <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 200 end VF end env = environment.create_vagrant_env(vagrantfile_name: "non_standard_name") expect(env.vagrantfile.config.ssh.port).to eq(200) end it "should load from a custom Vagrantfile specified by env var" do environment = isolated_environment do |env| env.file("some_other_name", <<-VF) Vagrant.configure("2") do |config| config.ssh.port = 400 end VF end env = with_temp_env("VAGRANT_VAGRANTFILE" => "some_other_name") do environment.create_vagrant_env end expect(env.vagrantfile.config.ssh.port).to eq(400) end end describe "ui" do it "should be a silent UI by default" do expect(described_class.new.ui).to be_kind_of(Vagrant::UI::Silent) end it "should be a UI given in the constructor" do # Create a custom UI for our test class CustomUI < Vagrant::UI::Interface; end instance = described_class.new(ui_class: CustomUI) expect(instance.ui).to be_kind_of(CustomUI) end end describe "#unload" do it "should run the unload hook" do expect(instance).to receive(:hook).with(:environment_unload).once instance.unload end end describe "getting machine names" do it "should return the default machine if no multi-VM is used" do # Create the config isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| end VF end env = isolated_env.create_vagrant_env expect(env.machine_names).to eq([:default]) end it "should return the machine names in order" do # Create the config isolated_env = isolated_environment do |e| e.vagrantfile(<<-VF) Vagrant.configure("2") do |config| config.vm.define "foo" config.vm.define "bar" end VF end env = isolated_env.create_vagrant_env expect(env.machine_names).to eq([:foo, :bar]) end end end
# Copyright (c) 2010 Yves Adler <yves.adler@googlemail.com> # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without # restriction, including without limitation the rights to use, # copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following # conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. require 'evosynth' module Examples # This example is EvoSynth's interpretation of DeJong and Potter's CCGA presented in # "A Cooperative Coevolutionary Approach to Function Optimization" (1994) module CCGAExample def CCGAExample.fitness_function(xs) EvoSynth::Problems::FloatBenchmarkFuntions.rosenbrock(xs) end class CCGABenchmarkEvaluator < EvoSynth::Evaluator def initialize(populations, dimensions) super() @populations, @dimensions = populations, dimensions end def decode(individual) values = (0 .. @dimensions - 1).to_a values.map! do |index| if index == individual.population_index EvoSynth::Decoder.binary_to_real(individual.genome, -5.12, 5.12) else EvoSynth::Decoder.binary_to_real(@populations[index].best.genome, -5.12, 5.12) end end end def calculate_fitness(individual) CCGAExample.fitness_function(decode(individual)) end end class CCGAIndividual < EvoSynth::MinimizingIndividual attr_accessor :population_index def initialize(population_index, *args) super(*args) @population_index = population_index end end BITS = 16 DIMENSIONS = 5 MAX_EVALUATIONS = 25000 POP_SIZE = 25 # create a population for each dimension: populations = (0 .. DIMENSIONS - 1).to_a populations.map! do |dim| EvoSynth::Population.new(POP_SIZE) do CCGAIndividual.new(dim, EvoSynth::ArrayGenome.new(BITS) { EvoSynth.rand_bool }) end end # create the configuration: configuration = EvoSynth::Configuration.new do |c| c.mutation = EvoSynth::Mutations::BinaryMutation.new(EvoSynth::Mutations::Functions::FLIP_BOOLEAN) c.parent_selection = EvoSynth::Selections::FitnessProportionalSelection.new c.recombination = EvoSynth::Recombinations::KPointCrossover.new(2) c.recombination_probability = 0.6 c.populations = populations c.evaluator = CCGABenchmarkEvaluator.new(populations, populations.size) end # create a logger, this one has to do something special ;-) logger = EvoSynth::Logger.create(50, true, :gen) do |log| log.add_column("fitness", Proc.new { |evolver| best_genome = evolver.best_solution?.map { |individual| EvoSynth::Decoder.binary_to_real(individual.genome, -5.12, 5.12) } CCGAExample.fitness_function(best_genome) }) end # lets roll... evolver = EvoSynth::Evolvers::RoundRobinCoevolutionary.new(configuration) evolver.add_observer(logger) result = evolver.run_while { configuration.evaluator.called < MAX_EVALUATIONS } # print some results... puts "\nbest 'combined' individual:" best = result.map { |pop| puts "\t#{pop.best}"; pop.best } best_genome = best.map { |individual| EvoSynth::Decoder.binary_to_real(individual.genome, -5.12, 5.12) } puts "\nbest 'combined' genome:" puts "\t#{best_genome.inspect}" puts "\n\tfitness = #{CCGAExample.fitness_function(best_genome)}" end end rename var # Copyright (c) 2010 Yves Adler <yves.adler@googlemail.com> # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without # restriction, including without limitation the rights to use, # copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following # conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. require 'evosynth' module Examples # This example is EvoSynth's interpretation of DeJong and Potter's CCGA presented in # "A Cooperative Coevolutionary Approach to Function Optimization" (1994) module CCGAExample def CCGAExample.fitness_function(xs) EvoSynth::Problems::FloatBenchmarkFuntions.rosenbrock(xs) end class CCGABenchmarkEvaluator < EvoSynth::Evaluator def initialize(populations, dimensions) super() @populations, @dimensions = populations, dimensions end def decode(individual) values = (0 .. @dimensions - 1).to_a values.map! do |index| if index == individual.population_index EvoSynth::Decoder.binary_to_real(individual.genome, -5.12, 5.12) else EvoSynth::Decoder.binary_to_real(@populations[index].best.genome, -5.12, 5.12) end end end def calculate_fitness(individual) CCGAExample.fitness_function(decode(individual)) end end class CCGAIndividual < EvoSynth::MinimizingIndividual attr_accessor :population_index def initialize(population_index, *args) super(*args) @population_index = population_index end end BITS = 16 DIMENSIONS = 5 MAX_EVALUATIONS = 25000 POP_SIZE = 25 # create a population for each dimension: populations = (0 .. DIMENSIONS - 1).to_a populations.map! do |dim| EvoSynth::Population.new(POP_SIZE) do CCGAIndividual.new(dim, EvoSynth::ArrayGenome.new(BITS) { EvoSynth.rand_bool }) end end # create the configuration: configuration = EvoSynth::Configuration.new do |c| c.mutation = EvoSynth::Mutations::BinaryMutation.new(EvoSynth::Mutations::Functions::FLIP_BOOLEAN) c.parent_selection = EvoSynth::Selections::FitnessProportionalSelection.new c.recombination = EvoSynth::Recombinations::KPointCrossover.new(2) c.recombination_probability = 0.6 c.populations = populations c.evaluator = CCGABenchmarkEvaluator.new(populations, populations.size) end # create a logger, this one has to do something special ;-) logger = EvoSynth::Logger.create(50, true, :gen) do |log| log.add_column("fitness", Proc.new { |evolver| best_phenotype = evolver.best_solution?.map { |individual| EvoSynth::Decoder.binary_to_real(individual.genome, -5.12, 5.12) } CCGAExample.fitness_function(best_phenotype) }) end # lets roll... evolver = EvoSynth::Evolvers::RoundRobinCoevolutionary.new(configuration) evolver.add_observer(logger) result = evolver.run_while { configuration.evaluator.called < MAX_EVALUATIONS } # print some results... puts "\nbest 'combined' individual:" best = result.map { |pop| puts "\t#{pop.best}"; pop.best } best_genome = best.map { |individual| EvoSynth::Decoder.binary_to_real(individual.genome, -5.12, 5.12) } puts "\nbest 'combined' genome:" puts "\t#{best_genome.inspect}" puts "\n\tfitness = #{CCGAExample.fitness_function(best_genome)}" end end
Pod::Spec.new do |s| s.name = "ABPadLockScreen" s.version = "3.3.0" s.summary = "A simple, stylish keypad lock screen for your iPhone or iPad App" s.description = <<-DESC A simple, stylish keypad lock screen for your iPhone or iPad App ABPadLockScreen aims to provide a universal solution to providing a secure keypad/pin lock screen to your iPhone or ipad app. With just a few lines you can have a pin screen ready to go. The screen is entirely customisable through UIAppearance. You can make it suit the style of your application with just a few more lines. DESC s.homepage = "https://github.com/abury/ABPadLockScreen" s.screenshots = "http://aronbury.com/assets/images/abpadlockscreen/fb-blue.png", "http://aronbury.com/assets/images/abpadlockscreen/yellow-ipad.png" s.license = 'MIT' s.license = { :type => 'MIT', :file => 'LICENSE' } s.author = { "Aron Bury" => "aron.bury@gmail.com" } s.platform = :ios, '5.0' s.ios.deployment_target = '5.0' s.source_files = 'ABPadLockScreen', 'ABPadLockScreen/**/*.{h,m}' s.resource_bundles = { 'ABPadLockScreenBundle' => 'ABPadLockScreen/*.{lproj,strings}' } s.requires_arc = true end no message Pod::Spec.new do |s| s.name = "ABPadLockScreen" s.version = "3.3.0" s.summary = "A simple, stylish keypad lock screen for your iPhone or iPad App" s.description = <<-DESC A simple, stylish keypad lock screen for your iPhone or iPad App ABPadLockScreen aims to provide a universal solution to providing a secure keypad/pin lock screen to your iPhone or ipad app. With just a few lines you can have a pin screen ready to go. The screen is entirely customisable through UIAppearance. You can make it suit the style of your application with just a few more lines. DESC s.homepage = "https://github.com/abury/ABPadLockScreen" s.screenshots = "http://aronbury.com/assets/images/abpadlockscreen/fb-blue.png", "http://aronbury.com/assets/images/abpadlockscreen/yellow-ipad.png" s.license = 'MIT' s.license = { :type => 'MIT', :file => 'LICENSE' } s.author = { "Aron Bury" => "aron.bury@gmail.com" } s.platform = :ios, '5.0' s.ios.deployment_target = '5.0' s.source = { :git => "https://github.com/sujrd/ABPadLockScreen.git", :tag => s.version.to_s } s.source_files = 'ABPadLockScreen', 'ABPadLockScreen/**/*.{h,m}' s.resource_bundles = { 'ABPadLockScreenBundle' => 'ABPadLockScreen/*.{lproj,strings}' } s.requires_arc = true end
# # Be sure to run `pod lib lint AlamofireRouter.podspec' to ensure this is a # valid spec before submitting. # # Any lines starting with a # are optional, but their use is encouraged # To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html # Pod::Spec.new do |s| s.name = 'AlamofireRouter' s.version = '0.1.0' s.summary = 'Simple Alamofire router.' # This description is used to generate tags and improve search results. # * Think: What does it do? Why did you write it? What is the focus? # * Try to keep it short, snappy and to the point. # * Write the description between the DESC delimiters below. # * Finally, don't worry about the indent, CocoaPods strips it! s.description = <<-DESC TODO: Add long description of the pod here. DESC s.homepage = 'https://github.com/CorlaOnline/AlamofireRouter' # s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2' s.license = { :type => 'MIT', :file => 'LICENSE' } s.author = { 'Alex Corlatti' => 'alex.corlatti@gmail.com' } s.source = { :git => 'https://github.com/CorlaOnline/AlamofireRouter.git', :tag => s.version.to_s } # s.social_media_url = 'https://twitter.com/corlaonline' s.ios.deployment_target = '8.0' s.source_files = 'AlamofireRouter/Classes/**/*' # s.resource_bundles = { # 'AlamofireRouter' => ['AlamofireRouter/Assets/*.png'] # } # s.public_header_files = 'Pod/Classes/**/*.h' s.frameworks = 'UIKit' s.dependency 'Alamofire', '~> 3.2' end changed version # # Be sure to run `pod lib lint AlamofireRouter.podspec' to ensure this is a # valid spec before submitting. # # Any lines starting with a # are optional, but their use is encouraged # To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html # Pod::Spec.new do |s| s.name = 'AlamofireRouter' s.version = '0.0.1' s.summary = 'Simple Alamofire router.' # This description is used to generate tags and improve search results. # * Think: What does it do? Why did you write it? What is the focus? # * Try to keep it short, snappy and to the point. # * Write the description between the DESC delimiters below. # * Finally, don't worry about the indent, CocoaPods strips it! s.description = <<-DESC TODO: Add long description of the pod here. DESC s.homepage = 'https://github.com/CorlaOnline/AlamofireRouter' # s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2' s.license = { :type => 'MIT', :file => 'LICENSE' } s.author = { 'Alex Corlatti' => 'alex.corlatti@gmail.com' } s.source = { :git => 'https://github.com/CorlaOnline/AlamofireRouter.git', :tag => s.version.to_s } # s.social_media_url = 'https://twitter.com/corlaonline' s.ios.deployment_target = '8.0' s.source_files = 'AlamofireRouter/Classes/**/*' # s.resource_bundles = { # 'AlamofireRouter' => ['AlamofireRouter/Assets/*.png'] # } # s.public_header_files = 'Pod/Classes/**/*.h' s.frameworks = 'UIKit' s.dependency 'Alamofire', '~> 3.2' end
add git output driver require 'fromcvs' require 'enumerator' # We are outputting a git-fast-import stream class GitDestRepo def initialize(gitroot, status=lambda{|s|}) @status = status @gitroot = gitroot if not File.stat(@gitroot).directory? or not File.stat(File.join(@gitroot, '.git')).directory? raise Errono::ENOENT, "dest dir `#@gitroot' is no git repo" end @deleted = [] @modified = [] @mark = 0 @from = nil @branchcache = {} @files = Hash.new{|h,k| h[k] = {}} end def last_date log = _command(*%w{git-cat-file -p HEAD}) log.split("\n").each do |line| break if line.empty? line = line.split return Time.at(line[-2].to_i) if line[0] == "committer" end if log.empty? return Time.at(0) end raise RuntimeError, "Invalid output from git" end def filelist(tag) if tag == :complete # XXX to be implemented else tag ||= 'master' return @files[tag].keys if @files.has_key? tag files = _command(*(%w{git-ls-tree --name-only --full-name -r -z} + ["refs/heads/#{tag}"])).split("\0") files.collect! do |f| _unquote(f) end @files[tag] = Hash[*files.map{|f| [f, true]}.flatten] files end end def start @packname = "import-#{Time.now.to_i}" @gfi = IO.popen('-', 'w') if not @gfi # child Dir.chdir(@gitroot) exec('git-fast-import', ".git/#@packname") $stderr.puts "could not spawn git-fast-import" exit 1 end _command(*%w{git-ls-remote -h .}).split("\n").each do |line| sha, branch = line.split branch[/^.*\//] = "" @branchcache[branch] = sha end @pickupbranches = @branchcache.dup end def flush end def has_branch?(branch) @branchcache.has_key?(branch) end # This requires that no commits happen to the parent before # we don't commit to the new branch def create_branch(branch, parent, vendor_p, date) if @branchcache.has_key?(branch) raise RuntimeError, "creating existant branch" end @gfi.puts "reset refs/heads/#{branch}" if not vendor_p parent ||= 'master' @gfi.puts "from #{@branchcache[parent]}" end @gfi.puts @branchcache[branch] = @branchcache[parent] end def select_branch(branch) @curbranch = _quote(branch || 'master') end def remove(file) @deleted << _quote(file) @files[@curbranch].delete(file) end def update(file, data, mode, uid, gid) @mark += 1 @gfi.print <<-END blob mark :#@mark data #{data.size} #{data} END # Fix up mode for git if mode & 0111 != 0 mode |= 0111 end mode &= ~022 mode |= 0644 @modified << [_quote(file), mode, @mark] @files[@curbranch][file] = true end def commit(author, date, msg) _commit(author, date, msg) end def merge(branch, author, date, msg) _commit(author, date, msg, branch) end def finish @gfi.close_write if $?.success? File.rename(File.join(@gitroot, '.git', "#@packname.pack"), File.join(@gitroot, '.git', 'objects', 'pack', "#@packname.pack")) File.rename(File.join(@gitroot, '.git', "#@packname.idx"), File.join(@gitroot, '.git', 'objects', 'pack', "#@packname.idx")) else begin File.unlink(File.join(@gitroot, '.git', "#@packname.pack")) File.unlink(File.join(@gitroot, '.git', "#@packname.idx")) rescue Errno::ENOENT end raise RuntimeError, "git-fast-import did not succeed" end end private def _commit(author, date, msg, branch=nil) @mark += 1 @gfi.print <<-END commit refs/heads/#@curbranch mark :#@mark committer #{author} <#{author}> #{date.to_i} +0000 data #{msg.size} #{msg} END if @pickupbranches.has_key? @curbranch @pickupbranches.delete(@curbranch) # fix incremental runs, force gfi to pick up @gfi.puts "from refs/heads/#@curbranch^0" end if branch @gfi.puts "merge :#{branch}" end @deleted.each do |f| @gfi.puts "D #{f}" end @modified.each do |f, mode, mark| @gfi.puts "M #{mode.to_s(8)} :#{mark} #{f}" end @gfi.puts @branchcache[@curbranch] = ":#@mark" @deleted = [] @modified = [] @from = nil @mark end def _command(*args) IO.popen('-', 'r') do |io| if not io # child Dir.chdir(@gitroot) exec(*args) end io.read end end def _quote(str) if str =~ /[\\\n]/ '"'+str.gsub(/[\\\n]/) {|chr| "\\"+chr[0].to_s(8)}+'"' else str end end def _unquote(str) if str =~ /^".*"$/ str[1..-2].gsub(/\\\d\d\d/) {|str| str[1..-1].to_i(8).chr} else str end end end if $0 == __FILE__ status = lambda do |str| $stderr.puts str end if ARGV.length != 3 puts "call: togit <cvsroot> <module> <gitdir>" exit 1 end cvsdir, modul, gitdir = ARGV cvsrepo = Repo.new(cvsdir, modul, status) gitrepo = GitDestRepo.new(gitdir, status) cvsrepo.convert(gitrepo) end
require 'debugger' require 'colorize' require 'highline' require 'selenium-webdriver' require_relative 'selenium_adapter' NUM_EDITS = 500 ################################################################################ # JS Helpers ################################################################################ def execute_js(driver, src, args = nil) driver.switch_to.default_content result = driver.execute_script(src, *args) driver.switch_to.frame(driver.find_element(:tag_name, "iframe")) return result end def js_get_as_str(driver, ref) return execute_js driver, "return JSON.stringify(window.Fuzzer['#{ref}'])" end def js_get(driver, ref) return execute_js driver, "return window.Fuzzer['#{ref}']" end def js_set_scribe_delta(driver) return execute_js driver, "window.Fuzzer.initializeScribe()" end def js_set_delta_replay(driver, delta, delta_ref) src = "return window.Fuzzer.setDeltaReplay(arguments[0], arguments[1])" execute_js driver, src, [delta, delta_ref] end def js_set_random_delta(driver) src = "window.Fuzzer.randomDelta = window.Fuzzer.createRandomDelta()" execute_js driver, src end def js_get_cur_doc_delta_as_str(driver) return execute_js driver, "return JSON.stringify(editor.getDelta());" end def js_get_doc_delta_as_str(driver) return execute_js driver, "return JSON.stringify(window.Fuzzer.docDelta);" end def js_set_doc_delta(driver) execute_js driver, "window.Fuzzer.docDelta = window.Fuzzer.cleanup(editor.getDelta());" end def read_deltas_from_file(file) deltas = [] begin File.open("fails/#{file}") do |f| f.readlines.each do |line| deltas << line.chomp! end end return deltas rescue puts "Please provide a valid file name to replay a fuzzer run.".colorize(:red) abort end end def write_deltas_to_file(doc_delta, rand_delta) FileUtils.mkpath('./fails') unless File.directory?('./fails') file_path = "./fails/#{Time.now.to_i.to_s}" File.open(file_path, 'w+') do |f| f.puts doc_delta f.puts rand_delta end puts "Fuzzer failed. Writing state to #{file_path} for replays.".colorize(:red) end def delete_fail_file(file_name) begin FileUtils.rm("./fails/#{file_name}") rescue puts "Failed deleting file #{file_name}. Please ensure it still exists.".colorize(:red) abort end end def check_consistency(driver, replay_file) driver.switch_to.default_content success = driver.execute_script "return window.Fuzzer.checkConsistency();" if not success doc_delta = js_get_as_str(driver, "docDelta") rand_delta = js_get_as_str(driver, "randomDelta") actual_delta = js_get_cur_doc_delta_as_str(driver) write_deltas_to_file(doc_delta, rand_delta) unless replay_file puts "Inconsistent deltas:".red puts "doc_delta: #{doc_delta}, rand_delta: #{rand_delta}, actual: #{actual_delta}" abort elsif replay_file highline = HighLine.new delete = highline.agree "Congrats, it passed! Would you like to delete the fail file? (y/n)".colorize(:green) delete_fail_file(replay_file) if delete end driver.switch_to.frame(driver.find_element(:tag_name, "iframe")) end ################################################################################ # WebDriver setup ################################################################################ unless ARGV.length == 2 or ARGV.length == 3 puts "Usage: ruby _browserdriver_ _editor_url_ _fuzzer_file_".colorize(:blue) end browserdriver = ARGV[0].to_sym editor_url = ARGV[1] replay_file = ARGV[2] if browserdriver == :firefox profile = Selenium::WebDriver::Firefox::Profile.new profile.native_events = true driver = Selenium::WebDriver.for browserdriver, :profile => profile else driver = Selenium::WebDriver.for browserdriver end driver.manage.timeouts.implicit_wait = 10 driver.get editor_url driver.switch_to.frame(driver.find_element(:tag_name, "iframe")) editor = driver.find_element(:class, "editor") adapter = SeleniumAdapter.new driver, editor adapter.focus() ################################################################################ # Fuzzer logic ################################################################################ if replay_file doc_delta, rand_delta = read_deltas_from_file(replay_file) js_set_delta_replay(driver, doc_delta, 'docDelta') js_set_delta_replay(driver, rand_delta, 'randomDelta') doc_delta = js_get(driver, "docDelta") js_set_scribe_delta(driver) adapter.doc_length = doc_delta['endLength'] random_delta = js_get(driver, "randomDelta") adapter.apply_delta(random_delta) check_consistency(driver, replay_file) else js_set_doc_delta(driver) NUM_EDITS.times do |i| js_set_random_delta(driver) random_delta = js_get(driver, "randomDelta") puts i.to_s.colorize(:green) if i % 10 == 0 adapter.apply_delta(random_delta) check_consistency(driver, nil) js_set_doc_delta(driver) end end Bug fix. For some reason, calling setDelta on scribe causes some of the text to be highlighted. This messes up future edits, as we assume that no text is highlighted and the cursor is in the 0th position. This commit sanitizes in the initialization to fix the problem. require 'debugger' require 'colorize' require 'highline' require 'selenium-webdriver' require_relative 'selenium_adapter' NUM_EDITS = 500 ################################################################################ # JS Helpers ################################################################################ def execute_js(driver, src, args = nil) driver.switch_to.default_content result = driver.execute_script(src, *args) driver.switch_to.frame(driver.find_element(:tag_name, "iframe")) return result end def js_get_as_str(driver, ref) return execute_js driver, "return JSON.stringify(window.Fuzzer['#{ref}'])" end def js_get(driver, ref) return execute_js driver, "return window.Fuzzer['#{ref}']" end def js_set_scribe_delta(driver) return execute_js driver, "window.Fuzzer.initializeScribe()" end def js_set_delta_replay(driver, delta, delta_ref) src = "return window.Fuzzer.setDeltaReplay(arguments[0], arguments[1])" execute_js driver, src, [delta, delta_ref] end def js_set_random_delta(driver) src = "window.Fuzzer.randomDelta = window.Fuzzer.createRandomDelta()" execute_js driver, src end def js_get_cur_doc_delta_as_str(driver) return execute_js driver, "return JSON.stringify(editor.getDelta());" end def js_get_doc_delta_as_str(driver) return execute_js driver, "return JSON.stringify(window.Fuzzer.docDelta);" end def js_set_doc_delta(driver) execute_js driver, "window.Fuzzer.docDelta = window.Fuzzer.cleanup(editor.getDelta());" end def read_deltas_from_file(file) deltas = [] begin File.open("fails/#{file}") do |f| f.readlines.each do |line| deltas << line.chomp! end end return deltas rescue puts "Please provide a valid file name to replay a fuzzer run.".colorize(:red) abort end end def write_deltas_to_file(doc_delta, rand_delta) FileUtils.mkpath('./fails') unless File.directory?('./fails') file_path = "./fails/#{Time.now.to_i.to_s}" File.open(file_path, 'w+') do |f| f.puts doc_delta f.puts rand_delta end puts "Fuzzer failed. Writing state to #{file_path} for replays.".colorize(:red) end def delete_fail_file(file_name) begin FileUtils.rm("./fails/#{file_name}") rescue puts "Failed deleting file #{file_name}. Please ensure it still exists.".colorize(:red) abort end end def check_consistency(driver, replay_file) driver.switch_to.default_content success = driver.execute_script "return window.Fuzzer.checkConsistency();" if not success doc_delta = js_get_as_str(driver, "docDelta") rand_delta = js_get_as_str(driver, "randomDelta") actual_delta = js_get_cur_doc_delta_as_str(driver) write_deltas_to_file(doc_delta, rand_delta) unless replay_file puts "Inconsistent deltas:".red puts "doc_delta: #{doc_delta}, rand_delta: #{rand_delta}, actual: #{actual_delta}" abort elsif replay_file highline = HighLine.new delete = highline.agree "Congrats, it passed! Would you like to delete the fail file? (y/n)".colorize(:green) delete_fail_file(replay_file) if delete end driver.switch_to.frame(driver.find_element(:tag_name, "iframe")) end ################################################################################ # WebDriver setup ################################################################################ unless ARGV.length == 2 or ARGV.length == 3 puts "Usage: ruby _browserdriver_ _editor_url_ _fuzzer_file_".colorize(:blue) end browserdriver = ARGV[0].to_sym editor_url = ARGV[1] replay_file = ARGV[2] if browserdriver == :firefox profile = Selenium::WebDriver::Firefox::Profile.new profile.native_events = true driver = Selenium::WebDriver.for browserdriver, :profile => profile else driver = Selenium::WebDriver.for browserdriver end driver.manage.timeouts.implicit_wait = 10 driver.get editor_url driver.switch_to.frame(driver.find_element(:tag_name, "iframe")) editor = driver.find_element(:class, "editor") adapter = SeleniumAdapter.new driver, editor adapter.focus() ################################################################################ # Fuzzer logic ################################################################################ def initialize_scribe_from_replay_file(replay_file, driver, adapter, editor) doc_delta, rand_delta = read_deltas_from_file(replay_file) js_set_delta_replay(driver, doc_delta, 'docDelta') js_set_delta_replay(driver, rand_delta, 'randomDelta') doc_delta = js_get(driver, "docDelta") js_set_scribe_delta(driver) # Remove inexplicable highlighting that gets applied when setting delta and # reset cursor to 0th position editor.click() adapter.cursor_pos = doc_delta['endLength'] adapter.move_cursor 0 adapter.doc_length = doc_delta['endLength'] end if replay_file initialize_scribe_from_replay_file(replay_file, driver, adapter, editor) random_delta = js_get(driver, "randomDelta") adapter.apply_delta(random_delta) check_consistency(driver, replay_file) else js_set_doc_delta(driver) NUM_EDITS.times do |i| js_set_random_delta(driver) random_delta = js_get(driver, "randomDelta") puts i.to_s.colorize(:green) if i % 10 == 0 adapter.apply_delta(random_delta) check_consistency(driver, nil) js_set_doc_delta(driver) end end
OpWebsite::Application.routes.draw do resources :sessions, :only => [:index, :show] do get 'votings' end resources :votings, :only => [] do get 'by_name' end resources :members, :only => [:index, :show] root :to => "application#home" # The priority is based upon order of creation: first created -> highest priority. # See how all your routes lay out with "rake routes". # You can have the root of your site routed with "root" # root 'welcome#index' # Example of regular route: # get 'products/:id' => 'catalog#view' # Example of named route that can be invoked with purchase_url(id: product.id) # get 'products/:id/purchase' => 'catalog#purchase', as: :purchase # Example resource route (maps HTTP verbs to controller actions automatically): # resources :products # Example resource route with options: # resources :products do # member do # get 'short' # post 'toggle' # end # # collection do # get 'sold' # end # end # Example resource route with sub-resources: # resources :products do # resources :comments, :sales # resource :seller # end # Example resource route with more complex sub-resources: # resources :products do # resources :comments # resources :sales do # get 'recent', on: :collection # end # end # Example resource route with concerns: # concern :toggleable do # post 'toggle' # end # resources :posts, concerns: :toggleable # resources :photos, concerns: :toggleable # Example resource route within a namespace: # namespace :admin do # # Directs /admin/products/* to Admin::ProductsController # # (app/controllers/admin/products_controller.rb) # resources :products # end end Changed root action. OpWebsite::Application.routes.draw do resources :sessions, :only => [:index, :show] do get 'votings' end resources :votings, :only => [] do get 'by_name' end resources :members, :only => [:index, :show] root :to => "members#index" # The priority is based upon order of creation: first created -> highest priority. # See how all your routes lay out with "rake routes". # You can have the root of your site routed with "root" # root 'welcome#index' # Example of regular route: # get 'products/:id' => 'catalog#view' # Example of named route that can be invoked with purchase_url(id: product.id) # get 'products/:id/purchase' => 'catalog#purchase', as: :purchase # Example resource route (maps HTTP verbs to controller actions automatically): # resources :products # Example resource route with options: # resources :products do # member do # get 'short' # post 'toggle' # end # # collection do # get 'sold' # end # end # Example resource route with sub-resources: # resources :products do # resources :comments, :sales # resource :seller # end # Example resource route with more complex sub-resources: # resources :products do # resources :comments # resources :sales do # get 'recent', on: :collection # end # end # Example resource route with concerns: # concern :toggleable do # post 'toggle' # end # resources :posts, concerns: :toggleable # resources :photos, concerns: :toggleable # Example resource route within a namespace: # namespace :admin do # # Directs /admin/products/* to Admin::ProductsController # # (app/controllers/admin/products_controller.rb) # resources :products # end end
service :campfire do |data, payload| repository = payload['repository']['name'] branch = payload['ref'].split('/').last commits = payload['commits'] campfire = Tinder::Campfire.new(data['subdomain'], :ssl => data['ssl'].to_i == 1) throw(:halt, 400) unless campfire && campfire.login(data['email'], data['password']) throw(:halt, 400) unless room = campfire.find_room_by_name(data['room']) if commits.size > 5 commit = commits[payload['after']] room.speak "[#{repository}/#{branch}] #{commit['message']} (+#{commits.size - 1} more commits...) - #{commit['author']['name']} (#{commit['url']})" else commits.each do |commit| room.speak "[#{repository}/#{branch}] #{commit['message']} - #{commit['author']['name']} (#{commit['url']})" end end room.leave campfire.logout end after is a sha, not a commit service :campfire do |data, payload| repository = payload['repository']['name'] branch = payload['ref'].split('/').last commits = payload['commits'] campfire = Tinder::Campfire.new(data['subdomain'], :ssl => data['ssl'].to_i == 1) throw(:halt, 400) unless campfire && campfire.login(data['email'], data['password']) throw(:halt, 400) unless room = campfire.find_room_by_name(data['room']) if commits.size > 5 commit = commits.last room.speak "[#{repository}/#{branch}] #{commit['message']} (+#{commits.size - 1} more commits...) - #{commit['author']['name']} (#{commit['url']})" else commits.each do |commit| room.speak "[#{repository}/#{branch}] #{commit['message']} - #{commit['author']['name']} (#{commit['url']})" end end room.leave campfire.logout end
class Service::Campfire < Service class << self attr_accessor :campfire_class end self.campfire_class = Tinder::Campfire string :subdomain, :room, :token boolean :master_only, :play_sound, :long_url def receive_push url = data['long_url'].to_i == 1 ? summary_url : shorten_url(summary_url) messages = [] messages << "#{summary_message}: #{url}" messages += commit_messages.first(8) if messages.first =~ /pushed 1 new commit/ messages.shift # drop summary message messages.first << " ( #{distinct_commits.first['url']} )" end send_messages messages end def receive_pull_request send_messages summary_message if opened? end alias receive_issues receive_pull_request def send_messages(messages) raise_config_error 'Missing campfire token' if data['token'].to_s.empty? return if data['master_only'].to_i == 1 and branch_name != 'master' play_sound = data['play_sound'].to_i == 1 unless room = find_room raise_config_error 'No such campfire room' end Array(messages).each { |line| room.speak line } room.play "rimshot" if play_sound && room.respond_to?(:play) rescue OpenSSL::SSL::SSLError => boom raise_config_error "SSL Error: #{boom}" rescue Tinder::AuthenticationFailed => boom raise_config_error "Authentication Error: #{boom}" rescue Faraday::Error::ConnectionFailed raise_config_error "Connection refused- invalid campfire subdomain." end attr_writer :campfire def campfire @campfire ||= self.class.campfire_class.new(campfire_domain, :ssl => true, :token => data['token']) end def campfire_domain data['subdomain'].to_s.sub /\.campfirenow\.com$/i, '' end def find_room room = campfire.find_room_by_name(data['room']) rescue StandardError end end Campfire hooks listen for push/issues/pull_request events class Service::Campfire < Service class << self attr_accessor :campfire_class end self.campfire_class = Tinder::Campfire string :subdomain, :room, :token boolean :master_only, :play_sound, :long_url default_events :push, :pull_request, :issues def receive_push url = data['long_url'].to_i == 1 ? summary_url : shorten_url(summary_url) messages = [] messages << "#{summary_message}: #{url}" messages += commit_messages.first(8) if messages.first =~ /pushed 1 new commit/ messages.shift # drop summary message messages.first << " ( #{distinct_commits.first['url']} )" end send_messages messages end def receive_pull_request send_messages summary_message if opened? end alias receive_issues receive_pull_request def send_messages(messages) raise_config_error 'Missing campfire token' if data['token'].to_s.empty? return if data['master_only'].to_i == 1 and branch_name != 'master' play_sound = data['play_sound'].to_i == 1 unless room = find_room raise_config_error 'No such campfire room' end Array(messages).each { |line| room.speak line } room.play "rimshot" if play_sound && room.respond_to?(:play) rescue OpenSSL::SSL::SSLError => boom raise_config_error "SSL Error: #{boom}" rescue Tinder::AuthenticationFailed => boom raise_config_error "Authentication Error: #{boom}" rescue Faraday::Error::ConnectionFailed raise_config_error "Connection refused- invalid campfire subdomain." end attr_writer :campfire def campfire @campfire ||= self.class.campfire_class.new(campfire_domain, :ssl => true, :token => data['token']) end def campfire_domain data['subdomain'].to_s.sub /\.campfirenow\.com$/i, '' end def find_room room = campfire.find_room_by_name(data['room']) rescue StandardError end end
require "test_helper" =begin Here we test: - Typus::Orm::ActiveRecord::AdminUserV1 =end class TypusUserTest < ActiveSupport::TestCase test "validate email" do assert FactoryGirl.build(:typus_user, :email => "dong").invalid? assert FactoryGirl.build(:typus_user, :email => "john@example.com").valid? assert FactoryGirl.build(:typus_user, :email => nil).invalid? end test "validate :role" do assert FactoryGirl.build(:typus_user, :role => nil).invalid? end test "validate :password" do assert FactoryGirl.build(:typus_user, :password => "0"*5).invalid? assert FactoryGirl.build(:typus_user, :password => "0"*6).valid? assert FactoryGirl.build(:typus_user, :password => "0"*40).valid? assert FactoryGirl.build(:typus_user, :password => "0"*41).invalid? end test "status is protected from mass_assignment" do assert TypusUser.attr_protected[:default].include?(:status) end test "fields" do expected = %w(id first_name last_name email role status salt crypted_password token preferences created_at updated_at).sort output = TypusUser.columns.map(&:name).sort assert_equal expected, output end test "generate" do assert !TypusUser.generate options = { :email => FactoryGirl.build(:typus_user).email } typus_user = TypusUser.generate(options) assert_equal options[:email], typus_user.email typus_user_factory = FactoryGirl.build(:typus_user) options = { :email => typus_user_factory.email, :password => typus_user_factory.password } typus_user = TypusUser.generate(options) assert_equal options[:email], typus_user.email typus_user_factory = FactoryGirl.build(:typus_user) options = { :email => typus_user_factory.email, :role => typus_user_factory.role } typus_user = TypusUser.generate(options) assert_equal options[:email], typus_user.email assert_equal options[:role], typus_user.role end context "TypusUser" do setup do @typus_user = FactoryGirl.create(:typus_user) end should "verify salt never changes" do expected = @typus_user.salt @typus_user.update_attributes(:password => '11111111', :password_confirmation => '11111111') assert_equal expected, @typus_user.salt end should "verify authenticated? returns true or false" do assert @typus_user.authenticated?('12345678') assert !@typus_user.authenticated?('87654321') end should "verify preferences are nil by default" do assert @typus_user.preferences.nil? end should "return default_locale when no preferences are set" do assert @typus_user.locale.eql?(:en) end should "be able to set a locale" do @typus_user.locale = :jp expected = {:locale => :jp} assert_equal expected, @typus_user.preferences assert @typus_user.locale.eql?(:jp) end should "be able to set preferences" do @typus_user.preferences = {:chunky => "bacon"} assert @typus_user.preferences.present? end should "set locale preference without overriding previously set preferences" do @typus_user.preferences = {:chunky => "bacon"} @typus_user.locale = :jp expected = {:locale => :jp, :chunky => "bacon"} assert_equal expected, @typus_user.preferences end end test "to_label" do user = FactoryGirl.build(:typus_user) assert_equal user.email, user.to_label user = FactoryGirl.build(:typus_user, :first_name => "John") assert_equal "John", user.to_label user = FactoryGirl.build(:typus_user, :last_name => "Locke") assert_equal "Locke", user.to_label user = FactoryGirl.build(:typus_user, :first_name => "John", :last_name => "Locke") assert_equal "John Locke", user.to_label end test "admin gets a list of all applications" do typus_user = FactoryGirl.build(:typus_user) assert_equal Typus.applications, typus_user.applications end =begin # TODO: Decide if we want this test ... test "admin gets a list of application resources for crud extended application" do typus_user = FactoryGirl.build(:typus_user) expected = %w(Asset Case Comment EntryDefault Page Post Article::Entry ReadOnlyEntry).sort assert_equal expected, typus_user.application("CRUD Extended").sort end =end test "admin gets a list of application resources for Admin application" do typus_user = FactoryGirl.build(:typus_user) expected = %w(AdminUser TypusUser DeviseUser).sort assert_equal expected, typus_user.application("Admin").sort end test "editor get a list of all applications" do typus_user = FactoryGirl.build(:typus_user, :role => "editor") expected = ["Admin", "CRUD Extended"] expected.each { |e| assert Typus.applications.include?(e) } end test "editor gets a list of application resources" do typus_user = FactoryGirl.build(:typus_user, :role => "editor") assert_equal %w(Comment Post), typus_user.application("CRUD Extended") assert typus_user.application("Admin").empty? end test "user owns a resource" do typus_user = FactoryGirl.build(:typus_user) resource = FactoryGirl.build(:post, :typus_user => typus_user) assert typus_user.owns?(resource) end test "user does not own a resource" do typus_user = FactoryGirl.create(:typus_user) resource = FactoryGirl.create(:post, :typus_user => FactoryGirl.create(:typus_user)) assert !typus_user.owns?(resource) end test "token changes everytime we save the user" do admin_user = FactoryGirl.create(:typus_user) first_token = admin_user.token admin_user.save second_token = admin_user.token assert !first_token.eql?(second_token) end end Removed old test. require "test_helper" =begin Here we test: - Typus::Orm::ActiveRecord::AdminUserV1 =end class TypusUserTest < ActiveSupport::TestCase test "validate email" do assert FactoryGirl.build(:typus_user, :email => "dong").invalid? assert FactoryGirl.build(:typus_user, :email => "john@example.com").valid? assert FactoryGirl.build(:typus_user, :email => nil).invalid? end test "validate :role" do assert FactoryGirl.build(:typus_user, :role => nil).invalid? end test "validate :password" do assert FactoryGirl.build(:typus_user, :password => "0"*5).invalid? assert FactoryGirl.build(:typus_user, :password => "0"*6).valid? assert FactoryGirl.build(:typus_user, :password => "0"*40).valid? assert FactoryGirl.build(:typus_user, :password => "0"*41).invalid? end test "status is protected from mass_assignment" do assert TypusUser.attr_protected[:default].include?(:status) end test "fields" do expected = %w(id first_name last_name email role status salt crypted_password token preferences created_at updated_at).sort output = TypusUser.columns.map(&:name).sort assert_equal expected, output end test "generate" do assert !TypusUser.generate options = { :email => FactoryGirl.build(:typus_user).email } typus_user = TypusUser.generate(options) assert_equal options[:email], typus_user.email typus_user_factory = FactoryGirl.build(:typus_user) options = { :email => typus_user_factory.email, :password => typus_user_factory.password } typus_user = TypusUser.generate(options) assert_equal options[:email], typus_user.email typus_user_factory = FactoryGirl.build(:typus_user) options = { :email => typus_user_factory.email, :role => typus_user_factory.role } typus_user = TypusUser.generate(options) assert_equal options[:email], typus_user.email assert_equal options[:role], typus_user.role end context "TypusUser" do setup do @typus_user = FactoryGirl.create(:typus_user) end should "verify salt never changes" do expected = @typus_user.salt @typus_user.update_attributes(:password => '11111111', :password_confirmation => '11111111') assert_equal expected, @typus_user.salt end should "verify authenticated? returns true or false" do assert @typus_user.authenticated?('12345678') assert !@typus_user.authenticated?('87654321') end should "verify preferences are nil by default" do assert @typus_user.preferences.nil? end should "return default_locale when no preferences are set" do assert @typus_user.locale.eql?(:en) end should "be able to set a locale" do @typus_user.locale = :jp expected = {:locale => :jp} assert_equal expected, @typus_user.preferences assert @typus_user.locale.eql?(:jp) end should "be able to set preferences" do @typus_user.preferences = {:chunky => "bacon"} assert @typus_user.preferences.present? end should "set locale preference without overriding previously set preferences" do @typus_user.preferences = {:chunky => "bacon"} @typus_user.locale = :jp expected = {:locale => :jp, :chunky => "bacon"} assert_equal expected, @typus_user.preferences end end test "to_label" do user = FactoryGirl.build(:typus_user) assert_equal user.email, user.to_label user = FactoryGirl.build(:typus_user, :first_name => "John") assert_equal "John", user.to_label user = FactoryGirl.build(:typus_user, :last_name => "Locke") assert_equal "Locke", user.to_label user = FactoryGirl.build(:typus_user, :first_name => "John", :last_name => "Locke") assert_equal "John Locke", user.to_label end test "admin gets a list of all applications" do typus_user = FactoryGirl.build(:typus_user) assert_equal Typus.applications, typus_user.applications end test "admin gets a list of application resources for Admin application" do typus_user = FactoryGirl.build(:typus_user) expected = %w(AdminUser TypusUser DeviseUser).sort assert_equal expected, typus_user.application("Admin").sort end test "editor get a list of all applications" do typus_user = FactoryGirl.build(:typus_user, :role => "editor") expected = ["Admin", "CRUD Extended"] expected.each { |e| assert Typus.applications.include?(e) } end test "editor gets a list of application resources" do typus_user = FactoryGirl.build(:typus_user, :role => "editor") assert_equal %w(Comment Post), typus_user.application("CRUD Extended") assert typus_user.application("Admin").empty? end test "user owns a resource" do typus_user = FactoryGirl.build(:typus_user) resource = FactoryGirl.build(:post, :typus_user => typus_user) assert typus_user.owns?(resource) end test "user does not own a resource" do typus_user = FactoryGirl.create(:typus_user) resource = FactoryGirl.create(:post, :typus_user => FactoryGirl.create(:typus_user)) assert !typus_user.owns?(resource) end test "token changes everytime we save the user" do admin_user = FactoryGirl.create(:typus_user) first_token = admin_user.token admin_user.save second_token = admin_user.token assert !first_token.eql?(second_token) end end
Pod::Spec.new do |s| s.platform = :ios s.ios.deployment_target = '7.0' s.name = "AutoLayoutCells" s.version = "0.4.2" s.summary = "AutoLayoutCells makes working with dynamic table view cells easy." s.homepage = "https://github.com/JRG-Developer/AutoLayoutCells" s.license = { :type => "MIT", :file => "LICENSE" } s.author = { "Joshua Greene" => "jrg.developer@gmail.com" } s.source = { :git => "https://github.com/JRG-Developer/AutoLayoutCells.git", :tag => "#{s.version}"} s.requires_arc = true s.framework = "UIKit" s.subspec 'SharedCategories' do |ss| ss.source_files = "AutoLayoutCells/SharedCategories/*{h,m}" end s.subspec 'TableViewCells' do |ss| ss.dependency 'AutoLayoutCells/SharedCategories' ss.dependency 'ALLabel', '~> 1.0' ss.dependency 'AutoLayoutTextViews', '~> 1.0' ss.resource_bundles = {'ALTableViewCellsBundle' => ['AutoLayoutCells/TableViewCells/ResourcesBundle/*']} ss.source_files = "AutoLayoutCells/TableViewCells/*.{h,m}" end end Updated podspec Pod::Spec.new do |s| s.platform = :ios s.ios.deployment_target = '7.0' s.name = "AutoLayoutCells" s.version = "0.4.3" s.summary = "AutoLayoutCells makes working with dynamic table view cells easy." s.homepage = "https://github.com/JRG-Developer/AutoLayoutCells" s.license = { :type => "MIT", :file => "LICENSE" } s.author = { "Joshua Greene" => "jrg.developer@gmail.com" } s.source = { :git => "https://github.com/JRG-Developer/AutoLayoutCells.git", :tag => "#{s.version}"} s.requires_arc = true s.framework = "UIKit" s.subspec 'SharedCategories' do |ss| ss.source_files = "AutoLayoutCells/SharedCategories/*{h,m}" end s.subspec 'TableViewCells' do |ss| ss.dependency 'AutoLayoutCells/SharedCategories' ss.dependency 'ALLabel', '~> 1.0' ss.dependency 'AutoLayoutTextViews', '~> 1.0' ss.resource_bundles = {'ALTableViewCellsBundle' => ['AutoLayoutCells/TableViewCells/ResourcesBundle/*']} ss.source_files = "AutoLayoutCells/TableViewCells/*.{h,m}" end end
Pod::Spec.new do |s| name = "BETOAuth2Client" url = "https://github.com/screeninteraction/#{name}" git_url = "#{url}.git" version = "1.0.2-beta" source_files = "#{name}/**/*.{h,m}" s.name = name s.version = version s.summary = "Lighweight and easy to use OAuth 2 Client based on NSURLSession" s.description = <<-DESC OAuth 2 client for the Cocoa platform (iOS and Mac OS X) * Authenticate through OAuth 2 * Light weight * Allows for blocks or delegates * Offers archivable credentials * Easy to read implementation and interface DESC s.homepage = url s.license = 'MIT' s.author = { "Screen Interaction" => "contact@screeninteraction.com" } s.source = { :git => git_url, :tag => version} s.ios.deployment_target = '7.0' s.osx.deployment_target = '10.9' s.dependency 'BETURLSession', '~> 1.0' s.source_files = source_files s.requires_arc = true end beta1 Pod::Spec.new do |s| name = "BETOAuth2Client" url = "https://github.com/screeninteraction/#{name}" git_url = "#{url}.git" version = "1.0.2-beta1" source_files = "#{name}/**/*.{h,m}" s.name = name s.version = version s.summary = "Lighweight and easy to use OAuth 2 Client based on NSURLSession" s.description = <<-DESC OAuth 2 client for the Cocoa platform (iOS and Mac OS X) * Authenticate through OAuth 2 * Light weight * Allows for blocks or delegates * Offers archivable credentials * Easy to read implementation and interface DESC s.homepage = url s.license = 'MIT' s.author = { "Screen Interaction" => "contact@screeninteraction.com" } s.source = { :git => git_url, :tag => version} s.ios.deployment_target = '7.0' s.osx.deployment_target = '10.9' s.dependency 'BETURLSession', '~> 1.0' s.source_files = source_files s.requires_arc = true end
change BLRippleProcess.podspec # # Be sure to run `pod spec lint BLRippleProcess.podspec' to ensure this is a # valid spec and to remove all comments including this before submitting the spec. # # To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html # To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/ # Pod::Spec.new do |s| # ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # These will help people to find your library, and whilst it # can feel like a chore to fill in it's definitely to your advantage. The # summary should be tweet-length, and the description more in depth. # s.name = "BLRippleProcess" s.version = "0.0.1" s.summary = "A short description of BLRippleProcess." # This description is used to generate tags and improve search results. # * Think: What does it do? Why did you write it? What is the focus? # * Try to keep it short, snappy and to the point. # * Write the description between the DESC delimiters below. # * Finally, don't worry about the indent, CocoaPods strips it! s.description = <<-DESC DESC s.homepage = "https://github.com/qinhubao/BLRippleProcess" # s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif" # ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Licensing your code is important. See http://choosealicense.com for more info. # CocoaPods will detect a license file if there is a named LICENSE* # Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'. # s.license = "MIT" # s.license = { :type => "MIT", :file => "FILE_LICENSE" } # ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Specify the authors of the library, with email addresses. Email addresses # of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also # accepts just a name if you'd rather not provide an email address. # # Specify a social_media_url where others can refer to, for example a twitter # profile URL. # s.author = { "qinhubao" => "qiny1010@163.com" } # Or just: s.author = "qinhubao" # s.authors = { "qinhubao" => "qiny1010@163.com" } # s.social_media_url = "http://twitter.com/qinhubao" # ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # If this Pod runs only on iOS or OS X, then specify the platform and # the deployment target. You can optionally include the target after the platform. # # s.platform = :ios s.platform = :ios, "5.0" # When using multiple platforms # s.ios.deployment_target = "5.0" # s.osx.deployment_target = "10.7" # s.watchos.deployment_target = "2.0" # s.tvos.deployment_target = "9.0" # ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Specify the location from where the source should be retrieved. # Supports git, hg, bzr, svn and HTTP. # s.source = { :git => "https://github.com/qinhubao/BLRippleProcess.git", :tag => "0.0.1" } # ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # CocoaPods is smart about how it includes source code. For source files # giving a folder will include any swift, h, m, mm, c & cpp files. # For header files it will include any header in the folder. # Not including the public_header_files will make all headers public. # s.source_files = "GradientProgress", "GradientProgress/*.{h,m}" s.public_header_files = "GradientProgress/*.h" # ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # A list of resources included with the Pod. These are copied into the # target bundle with a build phase script. Anything else will be cleaned. # You can preserve files from being cleaned, please don't preserve # non-essential files like tests, examples and documentation. # # s.resource = "icon.png" # s.resources = "Resources/*.png" # s.preserve_paths = "FilesToSave", "MoreFilesToSave" # ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Link your library with frameworks, or libraries. Libraries do not include # the lib prefix of their name. # # s.framework = "SomeFramework" # s.frameworks = "SomeFramework", "AnotherFramework" # s.library = "iconv" # s.libraries = "iconv", "xml2" # ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # If your library depends on compiler flags you can set them in the xcconfig hash # where they will only apply to your library. If you depend on other Podspecs # you can include multiple dependencies to ensure it works. s.requires_arc = true # s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" } # s.dependency "JSONKit", "~> 1.4" end
Adds example method calls on string # Examples of methods called on strings s = "Hello world" puts(s.upcase) # all characters upper puts(s.capitalize) puts(s.capitalize!) # does not return anything so output will not change # only in consecutive call this change will be visible puts(s.reverse) puts(s.swapcase) puts(s.downcase) puts(s.length) puts("Splitted into array:") puts(s.split) # split into array
Pod::Spec.new do |spec| #Information spec.name = 'UIViewController+StoreKit' spec.version = '1.0.6' spec.license = { :type => 'MIT', :file => 'LICENSE' } spec.homepage = 'https://github.com/mergesort/UIViewController-StoreKit' spec.author = { 'Joe Fabisevich' => 'github@fabisevi.ch' } spec.summary = 'A category on UIViewController allowing you to pull up an iTunes item with just one method.' spec.source = { :git => 'https://github.com/mergesort/UIViewController-StoreKit.git', :tag => "#{spec.version}" } spec.source_files = '*.{h,m}' spec.framework = 'Foundation' spec.requires_arc = true spec.social_media_url = 'https://twitter.com/mergesort' spec.ios.deployment_target = '7.0' #Depdencies spec.dependency 'NSString+Validation' end Updating podspec Pod::Spec.new do |spec| #Information spec.name = 'UIViewController+StoreKit' spec.version = '1.0.7' spec.license = { :type => 'MIT', :file => 'LICENSE' } spec.homepage = 'https://github.com/mergesort/UIViewController-StoreKit' spec.author = { 'Joe Fabisevich' => 'github@fabisevi.ch' } spec.summary = 'A category on UIViewController allowing you to pull up an iTunes item with just one method.' spec.source = { :git => 'https://github.com/mergesort/UIViewController-StoreKit.git', :tag => "#{spec.version}" } spec.source_files = '*.{h,m}' spec.framework = 'Foundation' spec.requires_arc = true spec.social_media_url = 'https://twitter.com/mergesort' spec.ios.deployment_target = '7.0' #Depdencies spec.dependency 'NSString+Validation' end
require File.dirname(__FILE__) + '/../spec_helper' describe Netzke::ActiveRecord::RelationExtensions do it "should accept different options in extend_with" do # Preparations 10.times do |i| Factory(:user, :first_name => "First Name #{i}", :role_id => i) end User.scope(:role_id_gt_7, User.where(["role_id > ?", 7])) User.scope(:role_id_gt, lambda{ |param| User.where(["role_id > ?", param]) }) # Tests User.where({}).extend_with(["role_id >= ?", 5]).count.should == 5 User.where({}).extend_with(:role_id_gt_7).count.should == 2 User.where({}).extend_with(:role_id_gt, 2).count.should == 7 User.where({}).extend_with([:role_id_gt, 3]).count.should == 6 User.where({}).extend_with(:role_id => 5).first.first_name.should == "First Name 5" User.where(["role_id < ?", 7]).extend_with(lambda{ |relation| relation.where(["role_id > ?", 4]) }).count.should == 2 # User.where({}).extend_with("select * from users where role_id > 6").all.size.should == 3 end it "should be extendable with extend_with_netzke_conditions" do # Preparations roles = [Factory(:role, :name => "admin"), Factory(:role, :name => "user"), Factory(:role, :name => "superadmin")] # 3 users of each role 9.times do |i| Factory(:user, :role_id => roles[i%3].id) end # Tests User.where({}).extend_with_netzke_conditions(:role_id__eq => roles.last.id).count.should == 3 User.where({}).extend_with_netzke_conditions(:role__name__eq => "admin").count.should == 3 User.where({}).extend_with_netzke_conditions(:role__name__like => "%admin%").count.should == 6 end end commented out test until i understand what it does require File.dirname(__FILE__) + '/../spec_helper' describe Netzke::ActiveRecord::RelationExtensions do it "should accept different options in extend_with" do # Preparations 10.times do |i| Factory(:user, :first_name => "First Name #{i}", :role_id => i) end User.scope(:role_id_gt_7, User.where(["role_id > ?", 7])) User.scope(:role_id_gt, lambda{ |param| User.where(["role_id > ?", param]) }) # Tests User.where({}).extend_with(["role_id >= ?", 5]).count.should == 5 User.where({}).extend_with(:role_id_gt_7).count.should == 2 User.where({}).extend_with(:role_id_gt, 2).count.should == 7 User.where({}).extend_with([:role_id_gt, 3]).count.should == 6 User.where({}).extend_with(:role_id => 5).first.first_name.should == "First Name 5" User.where(["role_id < ?", 7]).extend_with(lambda{ |relation| relation.where(["role_id > ?", 4]) }).count.should == 2 # User.where({}).extend_with("select * from users where role_id > 6").all.size.should == 3 end it "should be extendable with extend_with_netzke_conditions" do # Preparations roles = [Factory(:role, :name => "admin"), Factory(:role, :name => "user"), Factory(:role, :name => "superadmin")] # 3 users of each role 9.times do |i| Factory(:user, :role_id => roles[i%3].id) end # Tests # User.where({}).extend_with_netzke_conditions(:role_id__eq => roles.last.id).count.should == 3 # User.where({}).extend_with_netzke_conditions(:role_name__eq => "admin").count.should == 3 # User.where({}).extend_with_netzke_conditions(:role__name__like => "%admin%").count.should == 6 end end
require "albacore" require "release/filesystem" task :default => [:unitTests] desc "Inits the build" task :initBuild do FileSystem.EnsurePath("reports") FileSystem.DeleteDirectory("deploy") FileSystem.EnsurePath("deploy/package/lib") end desc "Generate assembly info." assemblyinfo :assemblyInfo => :initBuild do |asm| asm.version = ENV["GO_PIPELINE_LABEL"] + ".0" asm.company_name = "Ultraviolet Catastrophe" asm.product_name = "Gribble" asm.title = "Gribble" asm.description = "Gribble ORM" asm.copyright = "Copyright (c) 2010 Ultraviolet Catastrophe" asm.output_file = "src/Gribble/Properties/AssemblyInfo.cs" end desc "Builds the library." msbuild :buildLibrary => :assemblyInfo do |msb| msb.properties :configuration => :Release msb.targets :Clean, :Build msb.solution = "src/Gribble/Gribble.csproj" end desc "Builds the test project." msbuild :buildTestProject => :buildLibrary do |msb| msb.properties :configuration => :Release msb.targets :Clean, :Build msb.solution = "src/Tests/Tests.csproj" end desc "NUnit Test Runner" nunit :unitTests => :buildTestProject do |nunit| nunit.command = "src/packages/NUnit.2.5.9.10348/Tools/nunit-console.exe" nunit.assemblies "src/Tests/bin/Release/Tests.dll" nunit.options "/xml=reports/TestResult.xml" end desc "Create the nuspec" nuspec :createSpec => :unitTests do |nuspec| nuspec.id = "gribble" nuspec.version = ENV["GO_PIPELINE_LABEL"] nuspec.authors = "Mike O'Brien" nuspec.owners = "Mike O'Brien" nuspec.description = "Gribble is a simple, Linq enabled ORM designed to work with dynamically created tables." nuspec.summary = "Gribble is a simple, Linq enabled ORM designed to work with dynamically created tables." nuspec.language = "en-US" nuspec.licenseUrl = "https://github.com/mikeobrien/Gribble/blob/master/LICENSE" nuspec.projectUrl = "https://github.com/mikeobrien/Gribble" nuspec.working_directory = "deploy/package" nuspec.output_file = "gribble.nuspec" nuspec.tags = "orm sql" end desc "Push the package to the Nuget server" task :prepPackage => :createSpec do FileSystem.CopyFiles("src/Gribble/bin/Release/Gribble.dll", "deploy/package/lib") FileSystem.CopyFiles("src/Gribble/bin/Release/Gribble.pdb", "deploy/package/lib") end desc "Create the nuget package" nugetpack :createPackage => :prepPackage do |nugetpack| nugetpack.nuspec = "gribble.nuspec" nugetpack.base_folder = "deploy/package" nugetpack.output = "deploy/gribble.nupkg" nugetpack.log_level = :verbose end desc "Push the package to the Nuget server" task :pushPackage => :createPackage do end desc "Tag the current release" task :tagRelease do #result = system("git", "tag", "-a", "v#{ENV['GO_PIPELINE_LABEL']}", "-m", "release-v#{ENV['GO_PIPELINE_LABEL']}") #result = system("git", "push", "--tags") end Adding nuget package creation to the build script. require "albacore" require "release/filesystem" task :default => [:unitTests] desc "Inits the build" task :initBuild do FileSystem.EnsurePath("reports") FileSystem.DeleteDirectory("deploy") FileSystem.EnsurePath("deploy/package/lib") end desc "Generate assembly info." assemblyinfo :assemblyInfo => :initBuild do |asm| asm.version = ENV["GO_PIPELINE_LABEL"] + ".0" asm.company_name = "Ultraviolet Catastrophe" asm.product_name = "Gribble" asm.title = "Gribble" asm.description = "Gribble ORM" asm.copyright = "Copyright (c) 2010 Ultraviolet Catastrophe" asm.output_file = "src/Gribble/Properties/AssemblyInfo.cs" end desc "Builds the library." msbuild :buildLibrary => :assemblyInfo do |msb| msb.properties :configuration => :Release msb.targets :Clean, :Build msb.solution = "src/Gribble/Gribble.csproj" end desc "Builds the test project." msbuild :buildTestProject => :buildLibrary do |msb| msb.properties :configuration => :Release msb.targets :Clean, :Build msb.solution = "src/Tests/Tests.csproj" end desc "NUnit Test Runner" nunit :unitTests => :buildTestProject do |nunit| nunit.command = "src/packages/NUnit.2.5.9.10348/Tools/nunit-console.exe" nunit.assemblies "src/Tests/bin/Release/Tests.dll" nunit.options "/xml=reports/TestResult.xml" end desc "Create the nuspec" nuspec :createSpec => :unitTests do |nuspec| nuspec.id = "gribble" nuspec.version = ENV["GO_PIPELINE_LABEL"] nuspec.authors = "Mike O'Brien" nuspec.owners = "Mike O'Brien" nuspec.description = "Gribble is a simple, Linq enabled ORM designed to work with dynamically created tables." nuspec.summary = "Gribble is a simple, Linq enabled ORM designed to work with dynamically created tables." nuspec.language = "en-US" nuspec.licenseUrl = "https://github.com/mikeobrien/Gribble/blob/master/LICENSE" nuspec.projectUrl = "https://github.com/mikeobrien/Gribble" nuspec.working_directory = "deploy/package" nuspec.output_file = "gribble.nuspec" nuspec.tags = "orm sql" end desc "Push the package to the Nuget server" task :prepPackage => :createSpec do FileSystem.CopyFiles("src/Gribble/bin/Release/Gribble.dll", "deploy/package/lib") FileSystem.CopyFiles("src/Gribble/bin/Release/Gribble.pdb", "deploy/package/lib") end desc "Create the nuget package" nugetpack :createPackage => :prepPackage do |nugetpack| nugetpack.nuspec = "deploy/package/gribble.nuspec" nugetpack.base_folder = "deploy/package" nugetpack.output = "deploy" end desc "Push the package to the Nuget server" task :pushPackage => :createPackage do end desc "Tag the current release" task :tagRelease do #result = system("git", "tag", "-a", "v#{ENV['GO_PIPELINE_LABEL']}", "-m", "release-v#{ENV['GO_PIPELINE_LABEL']}") #result = system("git", "push", "--tags") end