CombinedText stringlengths 4 3.42M |
|---|
require File.expand_path('boot', __dir__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
Dotenv::Railtie.load
module TPS
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Paris'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :fr
config.i18n.load_path += Dir[Rails.root.join('config', 'locales', '**', '*.{rb,yml}')]
config.i18n.available_locales = [:fr]
config.autoload_paths += ["#{config.root}/lib"]
config.assets.paths << Rails.root.join('app', 'assets', 'javascript')
config.assets.paths << Rails.root.join('app', 'assets', 'fonts')
config.assets.precompile += ['.woff']
URL = ENV['APP_HOST'] || "http://localhost:3000/"
config.active_job.queue_adapter = :delayed_job
config.action_view.sanitized_allowed_tags = ActionView::Base.sanitized_allowed_tags + ['u']
end
end
[#835] Extra paths deserve eagler loading in production too
require File.expand_path('boot', __dir__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
Dotenv::Railtie.load
module TPS
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Paris'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :fr
config.i18n.load_path += Dir[Rails.root.join('config', 'locales', '**', '*.{rb,yml}')]
config.i18n.available_locales = [:fr]
config.paths.add "#{config.root}/lib", eager_load: true
config.assets.paths << Rails.root.join('app', 'assets', 'javascript')
config.assets.paths << Rails.root.join('app', 'assets', 'fonts')
config.assets.precompile += ['.woff']
URL = ENV['APP_HOST'] || "http://localhost:3000/"
config.active_job.queue_adapter = :delayed_job
config.action_view.sanitized_allowed_tags = ActionView::Base.sanitized_allowed_tags + ['u']
end
end
|
# Be sure to restart your server when you modify this file
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
ENV['RAILS_ENV'] ||= 'production'
#GDAL_PATH = ENV['GDAL_PATH'] || ""
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.3.5' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
Rails::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Skip frameworks you're not going to use (only works if using vendor/rails).
# To use Rails without a database, you must remove the Active Record framework
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Only load the plugins named here, in the order given. By default, all plugins
# in vendor/plugins are loaded in alphabetical order.
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector
# Make Active Record use UTC-base instead of local time
# config.active_record.default_timezone = :utc
config.active_record.observers = :user_observer
end
Fix for i18n flash internationalisation error where {{count}} were seen like that
# Be sure to restart your server when you modify this file
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
ENV['RAILS_ENV'] ||= 'production'
#GDAL_PATH = ENV['GDAL_PATH'] || ""
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.3.5' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
Rails::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Skip frameworks you're not going to use (only works if using vendor/rails).
# To use Rails without a database, you must remove the Active Record framework
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Only load the plugins named here, in the order given. By default, all plugins
# in vendor/plugins are loaded in alphabetical order.
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
config.gem "i18n", :version => "0.4.2"
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector
# Make Active Record use UTC-base instead of local time
# config.active_record.default_timezone = :utc
config.active_record.observers = :user_observer
end
|
require_relative 'boot'
require 'rails/all'
require 'net/ping/tcp'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Temporary
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.encoding = "utf-8"
config.middleware.insert 0, Rack::UTF8Sanitizer
config.time_zone = 'Helsinki'
config.action_view.embed_authenticity_token_in_remote_forms = true
# config.active_record.default_timezone = :local
config.active_record.time_zone_aware_types = [:datetime, :time]
config.cache_store = :redis_cache_store
#, "redis://localhost:6379/0/cache", { expires_in: 8.hours }
config.autoload_paths += %w(#{config.root}/app/models/ckeditor)
end
end
cache store
require_relative 'boot'
require 'rails/all'
require 'net/ping/tcp'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Temporary
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.encoding = "utf-8"
config.middleware.insert 0, Rack::UTF8Sanitizer
config.time_zone = 'Helsinki'
config.action_view.embed_authenticity_token_in_remote_forms = true
# config.active_record.default_timezone = :local
config.active_record.time_zone_aware_types = [:datetime, :time]
config.cache_store = :memory_store
#, "redis://localhost:6379/0/cache", { expires_in: 8.hours }
config.autoload_paths += %w(#{config.root}/app/models/ckeditor)
end
end
|
require 'pavlov_helper'
require_relative '../../../app/interactors/kill_object.rb'
require_relative '../../../app/interactors/queries/users_by_ids.rb'
require 'approvals/rspec'
describe Queries::UsersByIds do
include PavlovSupport
before do
stub_classes 'User', 'UserFollowingUsers'
end
describe '#call' do
it 'returns the good objects' do
stub_classes 'GraphUser'
user = double(id: 'a1', graph_user_id: '10')
query = described_class.new(user_ids: [0])
User.stub(:any_in).with(_id: [0]).and_return([user])
following_info = double(following_count: 3, followers_count: 2)
UserFollowingUsers
.stub(:new)
.with(user.graph_user_id)
.and_return(following_info)
verify(format: :json) { query.call.to_json }
end
it 'can search by graph_user ids' do
graph_user_ids = [0, 1]
stub_classes 'GraphUser'
user0 = double(graph_user_id: graph_user_ids[0], id: 'a1')
user1 = double(graph_user_id: graph_user_ids[1], id: 'a2')
query = described_class.new(user_ids: graph_user_ids, by: :graph_user_id)
User.stub(:any_in).with(graph_user_id: graph_user_ids).and_return([user0, user1])
following_info = double(following_count: 3, followers_count: 2)
UserFollowingUsers.stub(:new).and_return(following_info)
Pavlov.stub(:query)
expect(query.call.length).to eq 2
end
end
end
Removed unneeded requires
require 'spec_helper'
describe Queries::UsersByIds do
include PavlovSupport
before do
stub_classes 'User', 'UserFollowingUsers'
end
describe '#call' do
it 'returns the good objects' do
stub_classes 'GraphUser'
user = double(id: 'a1', graph_user_id: '10')
query = described_class.new(user_ids: [0])
User.stub(:any_in).with(_id: [0]).and_return([user])
following_info = double(following_count: 3, followers_count: 2)
UserFollowingUsers
.stub(:new)
.with(user.graph_user_id)
.and_return(following_info)
verify(format: :json) { query.call.to_json }
end
it 'can search by graph_user ids' do
graph_user_ids = [0, 1]
stub_classes 'GraphUser'
user0 = double(graph_user_id: graph_user_ids[0], id: 'a1')
user1 = double(graph_user_id: graph_user_ids[1], id: 'a2')
query = described_class.new(user_ids: graph_user_ids, by: :graph_user_id)
User.stub(:any_in).with(graph_user_id: graph_user_ids).and_return([user0, user1])
following_info = double(following_count: 3, followers_count: 2)
UserFollowingUsers.stub(:new).and_return(following_info)
Pavlov.stub(:query)
expect(query.call.length).to eq 2
end
end
end
|
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
# require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "sprockets/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
require "rails_12factor", :group => :production
module Mathemaster
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
fifth attempt to serve assets on Heroku
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
# require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "sprockets/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
#require "rails_12factor", :group => :production
require 'rails_12factor'
module Mathemaster
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
|
# Set up gems listed in the Gemfile.
# See: http://gembundler.com/bundler_setup.html
# http://stackoverflow.com/questions/7243486/why-do-you-need-require-bundler-setup
ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__)
require 'bundler/setup' if File.exists?(ENV['BUNDLE_GEMFILE'])
# Require gems we care about
require 'rubygems'
require 'uri'
require 'pathname'
require 'pg'
require 'active_record'
require 'logger'
require 'sinatra'
require "sinatra/reloader" if development?
require 'erb'
require 'pry'
require 'bcrypt'
require 'faker'
require 'faker'
# Some helper constants for path-centric logic
APP_ROOT = Pathname.new(File.expand_path('../../', __FILE__))
APP_NAME = APP_ROOT.basename.to_s
configure do
# By default, Sinatra assumes that the root is the file that calls the configure block.
# Since this is not the case for us, we set it manually.
set :root, APP_ROOT.to_path
# See: http://www.sinatrarb.com/faq.html#sessions
enable :sessions
set :session_secret, ENV['SESSION_SECRET'] || 'this is a secret shhhhh'
# Set the views to
set :views, File.join(Sinatra::Application.root, "app", "views")
end
# Set up the controllers and helpers
Dir[APP_ROOT.join('app', 'controllers', '*.rb')].each { |file| require file }
Dir[APP_ROOT.join('app', 'helpers', '*.rb')].each { |file| require file }
# Set up the database and models
require APP_ROOT.join('config', 'database')
Remove duplicate faker gem
# Set up gems listed in the Gemfile.
# See: http://gembundler.com/bundler_setup.html
# http://stackoverflow.com/questions/7243486/why-do-you-need-require-bundler-setup
ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__)
require 'bundler/setup' if File.exists?(ENV['BUNDLE_GEMFILE'])
# Require gems we care about
require 'rubygems'
require 'uri'
require 'pathname'
require 'pg'
require 'active_record'
require 'logger'
require 'sinatra'
require "sinatra/reloader" if development?
require 'erb'
require 'pry'
require 'bcrypt'
require 'faker'
# Some helper constants for path-centric logic
APP_ROOT = Pathname.new(File.expand_path('../../', __FILE__))
APP_NAME = APP_ROOT.basename.to_s
configure do
# By default, Sinatra assumes that the root is the file that calls the configure block.
# Since this is not the case for us, we set it manually.
set :root, APP_ROOT.to_path
# See: http://www.sinatrarb.com/faq.html#sessions
enable :sessions
set :session_secret, ENV['SESSION_SECRET'] || 'this is a secret shhhhh'
# Set the views to
set :views, File.join(Sinatra::Application.root, "app", "views")
end
# Set up the controllers and helpers
Dir[APP_ROOT.join('app', 'controllers', '*.rb')].each { |file| require file }
Dir[APP_ROOT.join('app', 'helpers', '*.rb')].each { |file| require file }
# Set up the database and models
require APP_ROOT.join('config', 'database')
|
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Jbhannah
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
config.autoload_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
config.active_record.observers = :post_sweeper
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Arizona'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
# config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
# Precompile assets
config.assets.precompile += ['disqus.js']
config.assets.precompile += [/welcome.(css|js)/, /posts.(css|js)/]
config.assets.precompile += [/devise\/sessions.(css|js)/]
end
end
Precompile CKEditor assets
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Jbhannah
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
config.autoload_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
config.active_record.observers = :post_sweeper
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Arizona'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
# config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
# Precompile assets
config.assets.precompile += ['disqus.js']
config.assets.precompile += [/welcome.(css|js)/, /posts.(css|js)/]
config.assets.precompile += [/devise\/sessions.(css|js)/]
config.assets.precompile ++ ['ckeditor/ckeditor']
end
end
|
#
# = Configuration
#
# This is essentially the "boot" script for the application. (See below for
# the precise order in which files are included and run.)
#
# Configurations in this file will affect all three modes: PRODUCTION,
# DEVELOPMENT and TEST. They can be overridden with mode-specific
# configurations in these three files:
#
# * config/environments/production.rb
# * config/environments/development.rb
# * config/environments/test.rb
#
# Make site-specific modifcations in these files:
#
# * config/consts-site.rb
# * config/environment-site.rb
# * config/environments/production-site.rb
# * config/environments/development-site.rb
# * config/environments/test-site.rb
#
# == Startup Procedure
#
# [1. script/server]
# Runs config/boot.rb, then runs commands/server (in rails gem).
#
# [2. config/boot.rb]
# Loads rubygems, loads rails gem, sets load path, DOES NOT RUN ANYTHING.
#
# [3. commands/server]
# Picks server and runs it.
#
# [4. commands/servers/webrick]
# Parses ARGV, runs config/environment.rb, runs server.
#
# [5. config/environment.rb]
# Does all the application-supplied configuration, in this order:
# 1. config/consts-site.rb (optional)
# 2. config/consts.rb
# 3. config/environment.rb
# 4. config/environment-site.rb (optional)
# 5. config/environments/RAILS_ENV.rb
# 6. config/environments/RAILS_ENC-site.rb (optional)
#
# == Global Constants
#
# Global Constants (e.g., DOMAIN and DEFAULT_LOCALE) are initialized in
# config/consts.rb.
#
# == Rails Configurator
#
# The environment files are evaled in the context of a Rails::Initializer
# instance. The "local" variable +config+ gives you access to the
# Rails::Configuration class. This, among other things, lets you set class
# variables in all the major Rails packages:
#
# # This sets @@default_timezone in ActiveRecord::Base.
# config.active_record.default_timezone = :utc
#
# Global constants _can_ be defined in these configurator blocks, too. Rails
# automatically copies them into the Object class namespace (thereby making
# them available to the entire application). However, currently, no global
# constants are defined this way -- they are defined outside, directly in the
# main namespace.
#
################################################################################
# Make sure it's already booted.
require File.join(File.dirname(__FILE__), 'boot')
# This must be here -- config/boot.rb greps this file looking for it(!!)
RAILS_GEM_VERSION = '2.1.1'
# Short-hand for the three execution modes.
PRODUCTION = (RAILS_ENV == 'production')
DEVELOPMENT = (RAILS_ENV == 'development')
TESTING = (RAILS_ENV == 'test')
# Should be one of [:normal, :silent]
# :silent turns off event logging and email notifications
class RunLevel
@@runlevel = :normal
def self.normal()
@@runlevel = :normal
end
def self.silent()
@@runlevel = :silent
end
def self.is_normal?()
@@runlevel == :normal
end
end
# RUN_LEVEL = :normal # :silent
# Do site-specific global constants first.
file = File.join(File.dirname(__FILE__), 'consts-site')
require file if File.exists?(file + '.rb')
# Now provide defaults for the rest.
require File.join(File.dirname(__FILE__), 'consts')
# --------------------------------------------------------------------
# General non-mode-specific, non-site-specific configurations here.
# --------------------------------------------------------------------
# Sacraficial goat and rubber chicken to get Globalite to behave correctly
# for rake tasks.
:some_new_symbol
Rails::Initializer.run do |config|
# Add our local classes and modules (e.g., Textile and LoginSystem) and class
# extensions (e.g., String and Symbol extensions) to the include path.
config.load_paths += %W(
#{RAILS_ROOT}/app/classes
#{RAILS_ROOT}/app/extensions
)
# Use the database for sessions instead of the file system
# (create the session table with 'rake create_sessions_table')
# config.action_controller.session_store = :active_record_store
# A secret is required to generate an integrity hash for cookie session data.
config.action_controller.session = {
:session_key => 'mo_session',
:secret => '1f58da43b4419cd9c1a7ffb87c062a910ebd2925d3475aefe298e2a44d5e86541125c91c2fb8482b7c04f7dd89ecf997c09a6e28a2d01fc4819c75d2996e6641'
}
# Enable page/fragment caching by setting a file-based store (remember to
# create the caching directory and make it readable to the application) .
# config.action_controller.fragment_cache_store = :file_store, "#{RAILS_ROOT}/cache"
# Make Active Record use UTC instead of local time. This is critical if we
# want to sync up remote servers. It causes Rails to store dates in UTC and
# convert from UTC to whatever we've set the timezone to when reading them
# back in. It shouldn't actually make any difference how the database is
# configured. It takes dates as a string, stores them however it chooses,
# performing whatever conversions it deems fit, then returns them back to us
# in exactly the same format we gave them to it. (NOTE: only the first line
# should be necessary, but for whatever reason, Rails is failing to do the
# other configs on some platforms.)
config.time_zone = ENV['TZ']
if config.time_zone.nil?
# Localization isn't loaded yet.
raise 'TZ environment variable must be set. Run "rake -D time" for a list of tasks for finding appropriate time zone names.'
end
# This instructs ActionView how to mark form fields which have an error.
# I just change the CSS class to "has_error", which gives it a red border.
# This is superior to the default, which encapsulates the field in a div,
# because that throws the layout off. Just changing the border, while less
# conspicuous, has no effect on the layout.
config.action_view.field_error_proc = Proc.new { |html_tag, instance|
html_tag.sub(/(<\w+)/, '\1 class="has_error"')
}
# Configure SMTP settings for ActionMailer.
config.action_mailer.smtp_settings = {
:address => MAIL_DOMAIN,
:port => 25,
:domain => DOMAIN,
# :authentication => :login,
# :user_name => "<username>",
# :password => "<password>",
}
# Include optional site-specific configs.
file = __FILE__.sub(/.rb$/, '-site.rb')
eval(IO.read(file), binding, file) if File.exists?(file)
end
# -------------------------------------
# "Temporary" third-party bug-fixes.
# -------------------------------------
# Get rid of ?<timestamp> so caching works better.
# See http://www.deathy.net/blog/2007/06/26/rails-asset-timestamping-and-why-its-bad/ for more details.
ENV['RAILS_ASSET_ID'] = ''
# RedCloth 4.x has quite a few bugs still. This should roughly fix them until
# Jason Garber has time to fix them properly in the parser rules. :stopdoc:
module RedCloth
class TextileDoc
def to_html(*rules)
# Pre-filters: losing square brackets if next to quotes, this
# introduces a space -- not perfect, but close.
self.gsub!('["', '[ "')
self.gsub!('"]', '" ]')
apply_rules(rules)
result = to(RedCloth::Formatters::HTML).to_s.clone
# Post-filters: not catching all the itallics, and seeing spans where
# they don't belong.
result.gsub!(/_+([A-Z][A-Za-z0-9]+)_+/, '<i>\\1</i>')
result.gsub!(/<span>(.*?)<\/span>/, '%\\1%')
return result
end
end
end
# There appears to be a bug in read_multipart. I checked, this is never used
# by the live server. But it *is* used by integration tests, and it fails
# hideously if you are foolish enough to try to upload a file in such tests.
# Apparently it simply forgot to unescape the parameter names. Easy fix. -JPH
module ActionController
class AbstractRequest
class << self
alias fubar_read_multipart read_multipart
def read_multipart(*args)
params = fubar_read_multipart(*args)
for key, val in params
params.delete(key)
params[URI.unescape(key)] = val
end
return params
end
end
end
end
# Add the option to "orphan" attachments if you use ":dependent => :orphan" in
# the has_many association options. This has the effect of doing *nothing* to
# the attachements. The other options allowed by Rails already are:
#
# :delete_all Delete directly from database without callbacks.
# :destroy Call "destroy" on all attachments.
# nil Set the parent id to NULL.
#
# New option:
#
# :orphan Do nothing.
#
module ActiveRecord
module Associations
class HasManyAssociation
alias original_delete_records delete_records
def delete_records(records)
if @reflection.options[:dependent] != :orphan
original_delete_records(records)
end
end
end
module ClassMethods
alias original_configure_dependency_for_has_many configure_dependency_for_has_many
def configure_dependency_for_has_many(reflection)
if reflection.options[:dependent] != :orphan
original_configure_dependency_for_has_many(reflection)
end
end
end
end
end
Fixed bug in RedCloth bug-fix.
#
# = Configuration
#
# This is essentially the "boot" script for the application. (See below for
# the precise order in which files are included and run.)
#
# Configurations in this file will affect all three modes: PRODUCTION,
# DEVELOPMENT and TEST. They can be overridden with mode-specific
# configurations in these three files:
#
# * config/environments/production.rb
# * config/environments/development.rb
# * config/environments/test.rb
#
# Make site-specific modifcations in these files:
#
# * config/consts-site.rb
# * config/environment-site.rb
# * config/environments/production-site.rb
# * config/environments/development-site.rb
# * config/environments/test-site.rb
#
# == Startup Procedure
#
# [1. script/server]
# Runs config/boot.rb, then runs commands/server (in rails gem).
#
# [2. config/boot.rb]
# Loads rubygems, loads rails gem, sets load path, DOES NOT RUN ANYTHING.
#
# [3. commands/server]
# Picks server and runs it.
#
# [4. commands/servers/webrick]
# Parses ARGV, runs config/environment.rb, runs server.
#
# [5. config/environment.rb]
# Does all the application-supplied configuration, in this order:
# 1. config/consts-site.rb (optional)
# 2. config/consts.rb
# 3. config/environment.rb
# 4. config/environment-site.rb (optional)
# 5. config/environments/RAILS_ENV.rb
# 6. config/environments/RAILS_ENC-site.rb (optional)
#
# == Global Constants
#
# Global Constants (e.g., DOMAIN and DEFAULT_LOCALE) are initialized in
# config/consts.rb.
#
# == Rails Configurator
#
# The environment files are evaled in the context of a Rails::Initializer
# instance. The "local" variable +config+ gives you access to the
# Rails::Configuration class. This, among other things, lets you set class
# variables in all the major Rails packages:
#
# # This sets @@default_timezone in ActiveRecord::Base.
# config.active_record.default_timezone = :utc
#
# Global constants _can_ be defined in these configurator blocks, too. Rails
# automatically copies them into the Object class namespace (thereby making
# them available to the entire application). However, currently, no global
# constants are defined this way -- they are defined outside, directly in the
# main namespace.
#
################################################################################
# Make sure it's already booted.
require File.join(File.dirname(__FILE__), 'boot')
# This must be here -- config/boot.rb greps this file looking for it(!!)
RAILS_GEM_VERSION = '2.1.1'
# Short-hand for the three execution modes.
PRODUCTION = (RAILS_ENV == 'production')
DEVELOPMENT = (RAILS_ENV == 'development')
TESTING = (RAILS_ENV == 'test')
# Should be one of [:normal, :silent]
# :silent turns off event logging and email notifications
class RunLevel
@@runlevel = :normal
def self.normal()
@@runlevel = :normal
end
def self.silent()
@@runlevel = :silent
end
def self.is_normal?()
@@runlevel == :normal
end
end
# RUN_LEVEL = :normal # :silent
# Do site-specific global constants first.
file = File.join(File.dirname(__FILE__), 'consts-site')
require file if File.exists?(file + '.rb')
# Now provide defaults for the rest.
require File.join(File.dirname(__FILE__), 'consts')
# --------------------------------------------------------------------
# General non-mode-specific, non-site-specific configurations here.
# --------------------------------------------------------------------
# Sacraficial goat and rubber chicken to get Globalite to behave correctly
# for rake tasks.
:some_new_symbol
Rails::Initializer.run do |config|
# Add our local classes and modules (e.g., Textile and LoginSystem) and class
# extensions (e.g., String and Symbol extensions) to the include path.
config.load_paths += %W(
#{RAILS_ROOT}/app/classes
#{RAILS_ROOT}/app/extensions
)
# Use the database for sessions instead of the file system
# (create the session table with 'rake create_sessions_table')
# config.action_controller.session_store = :active_record_store
# A secret is required to generate an integrity hash for cookie session data.
config.action_controller.session = {
:session_key => 'mo_session',
:secret => '1f58da43b4419cd9c1a7ffb87c062a910ebd2925d3475aefe298e2a44d5e86541125c91c2fb8482b7c04f7dd89ecf997c09a6e28a2d01fc4819c75d2996e6641'
}
# Enable page/fragment caching by setting a file-based store (remember to
# create the caching directory and make it readable to the application) .
# config.action_controller.fragment_cache_store = :file_store, "#{RAILS_ROOT}/cache"
# Make Active Record use UTC instead of local time. This is critical if we
# want to sync up remote servers. It causes Rails to store dates in UTC and
# convert from UTC to whatever we've set the timezone to when reading them
# back in. It shouldn't actually make any difference how the database is
# configured. It takes dates as a string, stores them however it chooses,
# performing whatever conversions it deems fit, then returns them back to us
# in exactly the same format we gave them to it. (NOTE: only the first line
# should be necessary, but for whatever reason, Rails is failing to do the
# other configs on some platforms.)
config.time_zone = ENV['TZ']
if config.time_zone.nil?
# Localization isn't loaded yet.
raise 'TZ environment variable must be set. Run "rake -D time" for a list of tasks for finding appropriate time zone names.'
end
# This instructs ActionView how to mark form fields which have an error.
# I just change the CSS class to "has_error", which gives it a red border.
# This is superior to the default, which encapsulates the field in a div,
# because that throws the layout off. Just changing the border, while less
# conspicuous, has no effect on the layout.
config.action_view.field_error_proc = Proc.new { |html_tag, instance|
html_tag.sub(/(<\w+)/, '\1 class="has_error"')
}
# Configure SMTP settings for ActionMailer.
config.action_mailer.smtp_settings = {
:address => MAIL_DOMAIN,
:port => 25,
:domain => DOMAIN,
# :authentication => :login,
# :user_name => "<username>",
# :password => "<password>",
}
# Include optional site-specific configs.
file = __FILE__.sub(/.rb$/, '-site.rb')
eval(IO.read(file), binding, file) if File.exists?(file)
end
# -------------------------------------
# "Temporary" third-party bug-fixes.
# -------------------------------------
# Get rid of ?<timestamp> so caching works better.
# See http://www.deathy.net/blog/2007/06/26/rails-asset-timestamping-and-why-its-bad/ for more details.
ENV['RAILS_ASSET_ID'] = ''
# RedCloth 4.x has quite a few bugs still. This should roughly fix them until
# Jason Garber has time to fix them properly in the parser rules. :stopdoc:
module RedCloth
class TextileDoc
def to_html(*rules)
# Pre-filters: losing square brackets if next to quotes, this
# introduces a space -- not perfect, but close.
self.gsub!('["', '[ "')
self.gsub!('"]', '" ]')
apply_rules(rules)
result = to(RedCloth::Formatters::HTML).to_s.clone
# Post-filters: not catching all the itallics, and seeing spans where
# they don't belong.
result.gsub!(/(^|\W)_+([A-Z][A-Za-z0-9]+)_+(\W|$)/, '\\1<i>\\2</i>\\3')
result.gsub!(/<span>(.*?)<\/span>/, '%\\1%')
return result
end
end
end
# There appears to be a bug in read_multipart. I checked, this is never used
# by the live server. But it *is* used by integration tests, and it fails
# hideously if you are foolish enough to try to upload a file in such tests.
# Apparently it simply forgot to unescape the parameter names. Easy fix. -JPH
module ActionController
class AbstractRequest
class << self
alias fubar_read_multipart read_multipart
def read_multipart(*args)
params = fubar_read_multipart(*args)
for key, val in params
params.delete(key)
params[URI.unescape(key)] = val
end
return params
end
end
end
end
# Add the option to "orphan" attachments if you use ":dependent => :orphan" in
# the has_many association options. This has the effect of doing *nothing* to
# the attachements. The other options allowed by Rails already are:
#
# :delete_all Delete directly from database without callbacks.
# :destroy Call "destroy" on all attachments.
# nil Set the parent id to NULL.
#
# New option:
#
# :orphan Do nothing.
#
module ActiveRecord
module Associations
class HasManyAssociation
alias original_delete_records delete_records
def delete_records(records)
if @reflection.options[:dependent] != :orphan
original_delete_records(records)
end
end
end
module ClassMethods
alias original_configure_dependency_for_has_many configure_dependency_for_has_many
def configure_dependency_for_has_many(reflection)
if reflection.options[:dependent] != :orphan
original_configure_dependency_for_has_many(reflection)
end
end
end
end
end
|
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module PRX
APP_VERSION = 'v4'.freeze
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
I18n.config.enforce_available_locales = true
config.i18n.enforce_available_locales = true
config.i18n.default_locale = :en
config.autoload_paths += %W( #{config.root}/app/representers/concerns )
config.autoload_paths += %W( #{config.root}/app/workers )
# Disable the asset pipeline.
config.assets.enabled = false
config.generators do |g|
g.test_framework :mini_test, spec: true, fixture: false
end
config.middleware.insert_after Rails::Rack::Logger, Rack::Cors do
allow do
origins /.*\.prx\.(?:org|dev|tech|docker)$/
resource '/api/*', methods: [:get, :put, :post, :delete, :options], headers: :any
end
allow do
origins '*'
resource '/api/*', methods: [:get]
end
allow do
origins '*'
resource '/pub/*', methods: [:get, :head, :options], headers: :any
end
end
# explicitly set auth host, rather than defaulting to id.prx.org
if ENV['ID_HOST'].present?
protocol = ENV['ID_HOST'].include?('.docker') ? 'http' : 'https'
PrxAuth::Rails.middleware = false
config.middleware.insert_before 'ActionDispatch::ParamsParser', 'Rack::PrxAuth',
cert_location: "#{protocol}://#{ENV['ID_HOST']}/api/v1/certs",
issuer: ENV['ID_HOST']
end
config.active_job.queue_adapter = :shoryuken
config.active_job.queue_name_prefix = Rails.env
config.active_job.queue_name_delimiter = '_'
config.active_record.raise_in_transactional_callbacks = true
prx_url_options = { host: ENV['PRX_HOST'], protocol: 'https' }
config.action_mailer.default_url_options = prx_url_options
cms_url_options = { host: ENV['CMS_HOST'], protocol: 'https' }
Rails.application.routes.default_url_options = cms_url_options
end
end
Use http in dev
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module PRX
APP_VERSION = 'v4'.freeze
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
I18n.config.enforce_available_locales = true
config.i18n.enforce_available_locales = true
config.i18n.default_locale = :en
config.autoload_paths += %W( #{config.root}/app/representers/concerns )
config.autoload_paths += %W( #{config.root}/app/workers )
# Disable the asset pipeline.
config.assets.enabled = false
config.generators do |g|
g.test_framework :mini_test, spec: true, fixture: false
end
config.middleware.insert_after Rails::Rack::Logger, Rack::Cors do
allow do
origins /.*\.prx\.(?:org|dev|tech|docker)$/
resource '/api/*', methods: [:get, :put, :post, :delete, :options], headers: :any
end
allow do
origins '*'
resource '/api/*', methods: [:get]
end
allow do
origins '*'
resource '/pub/*', methods: [:get, :head, :options], headers: :any
end
end
# explicitly set auth host, rather than defaulting to id.prx.org
if ENV['ID_HOST'].present?
protocol = ENV['ID_HOST'].include?('.docker') ? 'http' : 'https'
PrxAuth::Rails.middleware = false
config.middleware.insert_before 'ActionDispatch::ParamsParser', 'Rack::PrxAuth',
cert_location: "#{protocol}://#{ENV['ID_HOST']}/api/v1/certs",
issuer: ENV['ID_HOST']
end
config.active_job.queue_adapter = :shoryuken
config.active_job.queue_name_prefix = Rails.env
config.active_job.queue_name_delimiter = '_'
config.active_record.raise_in_transactional_callbacks = true
prx_url_options = { host: ENV['PRX_HOST'], protocol: 'https' }
config.action_mailer.default_url_options = prx_url_options
proto = (ENV['CMS_HOST'] || '').include?('.docker') ? 'http' : 'https'
cms_url_options = { host: ENV['CMS_HOST'], protocol: proto }
Rails.application.routes.default_url_options = cms_url_options
end
end
|
RAILS_GEM_VERSION = '2.3.2' unless defined? RAILS_GEM_VERSION
require File.join(File.dirname(__FILE__), 'boot')
require 'yaml'
config_file_path = File.join(RAILS_ROOT, *%w(config settings.yml))
if File.exist?(config_file_path)
config = YAML.load_file(config_file_path)
APP_CONFIG = config.has_key?(RAILS_ENV) ? config[RAILS_ENV] : {}
else
puts "WARNING: configuration file #{config_file_path} not found."
APP_CONFIG = {}
end
DEFAULT_HOST = APP_CONFIG[:default_host] || "spotus.local"
Rails::Initializer.run do |config|
config.gem "haml", :version => '>=2.0.6'
config.gem "fastercsv"
config.gem "mysql", :version => '2.7'
config.gem 'thoughtbot-factory_girl', :lib => 'factory_girl', :source => 'http://gems.github.com'
config.gem "rubyist-aasm", :lib => "aasm", :version => '>=2.0.5', :source => 'http://gems.github.com'
config.gem 'mislav-will_paginate', :lib => 'will_paginate', :version => '>=2.3.7', :source => 'http://gems.github.com/'
config.gem "rspec-rails", :lib => false, :version => "= 1.2.2"
config.gem "rspec", :lib => false, :version => "= 1.2.2"
config.gem "cucumber", :lib => false, :version => "= 0.1.16"
config.gem "webrat", :lib => false, :version => ">= 0.4.4"
config.gem "money", :version => ">=2.1.3"
config.time_zone = 'UTC'
config.load_paths += %W( #{RAILS_ROOT}/app/sweepers )
DEFAULT_SECRET = "552e024ba5bbf493d1ae37aacb875359804da2f1002fa908f304c7b0746ef9ab67875b69e66361eb9484fc0308cabdced715f7e97f02395874934d401a07d3e0"
secret = APP_CONFIG[:action_controller][:session][:secret] rescue DEFAULT_SECRET
config.action_controller.session = { :session_key => '_spotus_session', :secret => secret }
end
# use this domain for cookies so switching networks doesn't drop cookies
ActionController::Base.session_options[:domain] = DEFAULT_HOST
# These are the sizes of the domain (i.e. 0 for localhost, 1 for something.com)
# for each of your environments
SubdomainFu.tld_sizes = { :development => 1,
:test => 1,
:staging => 2,
:production => 1 }
# These are the subdomains that will be equivalent to no subdomain
SubdomainFu.mirrors = %w(www spotus)
# This is the "preferred mirror" if you would rather show this subdomain
# in the URL than no subdomain at all.
# SubdomainFu.preferred_mirror = "www"
b4 removing environment.rb from repo
RAILS_GEM_VERSION = '2.3.2' unless defined? RAILS_GEM_VERSION
require File.join(File.dirname(__FILE__), 'boot')
require 'yaml'
config_file_path = File.join(RAILS_ROOT, *%w(config settings.yml))
if File.exist?(config_file_path)
config = YAML.load_file(config_file_path)
APP_CONFIG = config.has_key?(RAILS_ENV) ? config[RAILS_ENV] : {}
else
puts "WARNING: configuration file #{config_file_path} not found."
APP_CONFIG = {}
end
DEFAULT_HOST = APP_CONFIG[:default_host] || "spotus.local"
Rails::Initializer.run do |config|
config.gem "haml", :version => '>=2.0.6'
config.gem "fastercsv"
#config.gem "mysql", :version => '2.7'
config.gem 'thoughtbot-factory_girl', :lib => 'factory_girl', :source => 'http://gems.github.com'
config.gem "rubyist-aasm", :lib => "aasm", :version => '>=2.0.5', :source => 'http://gems.github.com'
config.gem 'mislav-will_paginate', :lib => 'will_paginate', :version => '>=2.3.7', :source => 'http://gems.github.com/'
config.gem "rspec-rails", :lib => false, :version => "= 1.2.2"
config.gem "rspec", :lib => false, :version => "= 1.2.2"
config.gem "cucumber", :lib => false, :version => "= 0.1.16"
config.gem "webrat", :lib => false, :version => ">= 0.4.4"
config.gem "money", :version => ">=2.1.3"
config.time_zone = 'UTC'
config.load_paths += %W( #{RAILS_ROOT}/app/sweepers )
DEFAULT_SECRET = "552e024ba5bbf493d1ae37aacb875359804da2f1002fa908f304c7b0746ef9ab67875b69e66361eb9484fc0308cabdced715f7e97f02395874934d401a07d3e0"
secret = APP_CONFIG[:action_controller][:session][:secret] rescue DEFAULT_SECRET
config.action_controller.session = { :session_key => '_spotus_session', :secret => secret }
end
# use this domain for cookies so switching networks doesn't drop cookies
ActionController::Base.session_options[:domain] = DEFAULT_HOST
# These are the sizes of the domain (i.e. 0 for localhost, 1 for something.com)
# for each of your environments
SubdomainFu.tld_sizes = { :development => 1,
:test => 1,
:staging => 2,
:production => 1 }
# These are the subdomains that will be equivalent to no subdomain
SubdomainFu.mirrors = %w(www spotus)
# This is the "preferred mirror" if you would rather show this subdomain
# in the URL than no subdomain at all.
# SubdomainFu.preferred_mirror = "www"
|
# Be sure to restart your server when you modify this file
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
# ENV['RAILS_ENV'] ||= 'production'
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.3.4' unless defined? RAILS_GEM_VERSION
# RAILS_GEM_VERSION = '2.2.0' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
require 'yaml'
configuration_bindings = YAML.load(File.open("#{RAILS_ROOT}/config/config.yml"))
Rails::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Skip frameworks you're not going to use (only works if using vendor/rails).
# To use Rails without a database, you must remove the Active Record framework
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Only load the plugins named here, in the order given. By default, all plugins
# in vendor/plugins are loaded in alphabetical order.
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
config.action_controller.session = {
:session_key => configuration_bindings['base']['rails']['secret'],
:secret => configuration_bindings['base']['rails']['secret']
}
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with 'rake db:sessions:create')
# config.action_controller.session_store = :active_record_store
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector
# Make Active Record use UTC-base instead of local time
# config.active_record.default_timezone = :utc
# Added from the acts_as_authenticated plugin 6/3/07
config.active_record.observers = :user_observer, :listed_taxon_sweeper
# Gems
config.gem 'mislav-will_paginate', :lib => 'will_paginate', :source => 'http://gems.github.com'
config.gem 'rubyist-aasm', :lib => 'aasm', :source => 'http://gems.github.com', :version => '2.0.2'
# config.gem "ruby-debug" # Apparently this doesn't work with passenger...
config.gem "GeoRuby", :lib => 'geo_ruby'
config.gem "mojombo-chronic", :lib => 'chronic', :source => 'http://gems.github.com'
config.gem 'bluecloth'
config.gem "htmlentities"
config.gem "right_http_connection"
config.gem "right_aws"
config.gem "mocha"
config.gem "thoughtbot-paperclip", :lib => 'paperclip', :source => 'http://gems.github.com'
config.gem "ambethia-smtp-tls", :lib => "smtp-tls", :source => "http://gems.github.com/"
config.gem "kueda-flickraw", :lib => "flickraw", :source => "http://gems.github.com/"
config.gem 'rest-client', :lib => 'rest_client'
config.gem "carlosparamio-geoplanet", :lib => 'geoplanet', :source => "http://gems.github.com/"
config.gem 'geoip'
config.gem 'alexvollmer-daemon-spawn', :lib => 'daemon-spawn', :source => "http://gems.github.com/"
# Can't do this until Rails starts including the rake tasks of plugin gems
# config.gem "freelancing-god-thinking-sphinx", :lib => 'thinking_sphinx',
# :source => 'http://gems.github.com'
# Set default time zone to UTC
config.time_zone = 'UTC'
end
# Windows flag, for disabling things that might not work in Windoze
WINDOWS = false
require 'geoplanet'
require 'geoip'
require 'net-flickr/lib/net/flickr'
require 'catalogue_of_life'
require 'ubio'
require 'model_tips'
require 'meta_service'
require 'wikipedia_service'
require 'batch_tools'
require 'georuby_extra'
# GeoIP setup, for IP geocoding
geoip_config = YAML.load(File.open("#{RAILS_ROOT}/config/geoip.yml"))
GEOIP = GeoIP.new(geoip_config[RAILS_ENV]['city'])
### API KEYS ###
UBIO_KEY = configuration_bindings['base']['ubio']['UBIO_KEY']
# Yahoo Developer Network
YDN_APP_ID = configuration_bindings['base']['yahoo_dev_network']['YDN_APP_ID']
GeoPlanet.appid = YDN_APP_ID
# Google Analytics configs
# See http://www.rubaidh.com/projects/google-analytics-plugin/
Rubaidh::GoogleAnalytics.tracker_id = configuration_bindings['base']['google_analytics']['tracker_id']
Rubaidh::GoogleAnalytics.domain_name = configuration_bindings['base']['google_analytics']['domain_name']
Rubaidh::GoogleAnalytics.environments = ['production']
# General settings
SITE_NAME = configuration_bindings['base']['general']['SITE_NAME']
OBSERVATIONS_TILE_SERVER = configuration_bindings[Rails.env]['tile_servers']['observations']
SPHERICAL_MERCATOR = SphericalMercator.new
Added gem deps for Picasa plugin.
# Be sure to restart your server when you modify this file
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
# ENV['RAILS_ENV'] ||= 'production'
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.3.4' unless defined? RAILS_GEM_VERSION
# RAILS_GEM_VERSION = '2.2.0' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
require 'yaml'
configuration_bindings = YAML.load(File.open("#{RAILS_ROOT}/config/config.yml"))
Rails::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Skip frameworks you're not going to use (only works if using vendor/rails).
# To use Rails without a database, you must remove the Active Record framework
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Only load the plugins named here, in the order given. By default, all plugins
# in vendor/plugins are loaded in alphabetical order.
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
config.action_controller.session = {
:session_key => configuration_bindings['base']['rails']['secret'],
:secret => configuration_bindings['base']['rails']['secret']
}
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with 'rake db:sessions:create')
# config.action_controller.session_store = :active_record_store
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector
# Make Active Record use UTC-base instead of local time
# config.active_record.default_timezone = :utc
# Added from the acts_as_authenticated plugin 6/3/07
config.active_record.observers = :user_observer, :listed_taxon_sweeper
# Gems
config.gem 'mislav-will_paginate', :lib => 'will_paginate', :source => 'http://gems.github.com'
config.gem 'rubyist-aasm', :lib => 'aasm', :source => 'http://gems.github.com', :version => '2.0.2'
# config.gem "ruby-debug" # Apparently this doesn't work with passenger...
config.gem "GeoRuby", :lib => 'geo_ruby'
config.gem "mojombo-chronic", :lib => 'chronic', :source => 'http://gems.github.com'
config.gem 'bluecloth'
config.gem "htmlentities"
config.gem "right_http_connection"
config.gem "right_aws"
config.gem "mocha"
config.gem "thoughtbot-paperclip", :lib => 'paperclip', :source => 'http://gems.github.com'
config.gem "ambethia-smtp-tls", :lib => "smtp-tls", :source => "http://gems.github.com/"
config.gem "kueda-flickraw", :lib => "flickraw", :source => "http://gems.github.com/"
config.gem 'rest-client', :lib => 'rest_client'
config.gem "carlosparamio-geoplanet", :lib => 'geoplanet', :source => "http://gems.github.com/"
config.gem 'geoip'
config.gem 'alexvollmer-daemon-spawn', :lib => 'daemon-spawn', :source => "http://gems.github.com/"
config.gem 'nokogiri'
config.gem 'objectify-xml', :lib => 'objectify_xml'
# Can't do this until Rails starts including the rake tasks of plugin gems
# config.gem "freelancing-god-thinking-sphinx", :lib => 'thinking_sphinx',
# :source => 'http://gems.github.com'
# Set default time zone to UTC
config.time_zone = 'UTC'
end
# Windows flag, for disabling things that might not work in Windoze
WINDOWS = false
require 'geoplanet'
require 'geoip'
require 'net-flickr/lib/net/flickr'
require 'catalogue_of_life'
require 'ubio'
require 'model_tips'
require 'meta_service'
require 'wikipedia_service'
require 'batch_tools'
require 'georuby_extra'
# GeoIP setup, for IP geocoding
geoip_config = YAML.load(File.open("#{RAILS_ROOT}/config/geoip.yml"))
GEOIP = GeoIP.new(geoip_config[RAILS_ENV]['city'])
### API KEYS ###
UBIO_KEY = configuration_bindings['base']['ubio']['UBIO_KEY']
# Yahoo Developer Network
YDN_APP_ID = configuration_bindings['base']['yahoo_dev_network']['YDN_APP_ID']
GeoPlanet.appid = YDN_APP_ID
# Google Analytics configs
# See http://www.rubaidh.com/projects/google-analytics-plugin/
Rubaidh::GoogleAnalytics.tracker_id = configuration_bindings['base']['google_analytics']['tracker_id']
Rubaidh::GoogleAnalytics.domain_name = configuration_bindings['base']['google_analytics']['domain_name']
Rubaidh::GoogleAnalytics.environments = ['production']
# General settings
SITE_NAME = configuration_bindings['base']['general']['SITE_NAME']
OBSERVATIONS_TILE_SERVER = configuration_bindings[Rails.env]['tile_servers']['observations']
SPHERICAL_MERCATOR = SphericalMercator.new
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module YouCarryIt
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
config.assets.initialize_on_precompile = false
end
end
Fix assets again
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module YouCarryIt
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
config.serve_static_assets = true
end
end
|
## Gets loaded once, upon app boot or during a rake task.
require 'bundler/setup'
require 'aws-sdk'
require 'elasticsearch'
require 'faraday_middleware/aws_sigv4'
require 'yaml'
class Environment
def self.config
@config ||= YAML.load_file(ENV["config"] || File.join(File.dirname(__FILE__), "config.yaml"))
end
def self.init_client!
# Define elasticsearch client.
if Environment.config['elasticsearch']['host'].nil?
puts "The elasticsearch server's hostname is not set in the configuration file"
exit
end
if Environment.config['elasticsearch']['port'].nil?
puts "The elasticsearch server's port number is not set in the configuration file"
exit
end
if Environment.config['elasticsearch']['index_read'].nil?
puts "The elasticsearch server's index name for reading is not set in the configuration file"
exit
end
if Environment.config['elasticsearch']['index_write'].nil?
puts "The elasticsearch server's index name for writing is not set in the configuration file"
exit
end
log = ENV['log'] || false
endpoint = "http://#{Environment.config['elasticsearch']['host']}:#{Environment.config['elasticsearch']['port']}"
@elasticsearch_client = Elasticsearch::Client.new url: endpoint, log: log do |f|
if Environment.config['aws'] && (Environment.config['elasticsearch']['host'] != '127.0.0.1') then
f.request :aws_sigv4,
credentials: Aws::InstanceProfileCredentials.new,
service: 'es',
region: Environment.config['aws']['region']
end
end
end
def self.client
@elasticsearch_client
end
end
Environment.init_client!
Fix for deprecation warning from aws-sigv4
## Gets loaded once, upon app boot or during a rake task.
require 'bundler/setup'
require 'aws-sdk'
require 'elasticsearch'
require 'faraday_middleware/aws_sigv4'
require 'yaml'
class Environment
def self.config
@config ||= YAML.load_file(ENV["config"] || File.join(File.dirname(__FILE__), "config.yaml"))
end
def self.init_client!
# Define elasticsearch client.
if Environment.config['elasticsearch']['host'].nil?
puts "The elasticsearch server's hostname is not set in the configuration file"
exit
end
if Environment.config['elasticsearch']['port'].nil?
puts "The elasticsearch server's port number is not set in the configuration file"
exit
end
if Environment.config['elasticsearch']['index_read'].nil?
puts "The elasticsearch server's index name for reading is not set in the configuration file"
exit
end
if Environment.config['elasticsearch']['index_write'].nil?
puts "The elasticsearch server's index name for writing is not set in the configuration file"
exit
end
log = ENV['log'] || false
endpoint = "http://#{Environment.config['elasticsearch']['host']}:#{Environment.config['elasticsearch']['port']}"
@elasticsearch_client = Elasticsearch::Client.new url: endpoint, log: log do |f|
if Environment.config['aws'] && (Environment.config['elasticsearch']['host'] != '127.0.0.1') then
f.request :aws_sigv4,
credentials_provider: Aws::InstanceProfileCredentials.new,
service: 'es',
region: Environment.config['aws']['region']
end
end
end
def self.client
@elasticsearch_client
end
end
Environment.init_client!
|
SAT::Application.configure do |config|
# directory structure
config.set :root, File.expand_path('../..', __FILE__)
config.set :views, File.join(config.root, 'app', 'views')
# i18n
I18n.load_path = Dir[File.join('config', 'locales', '*.{rb,yml}').to_s]
I18n.default_locale = 'en'
# link header
config.helpers Sinatra::LinkHeader
# logging
file = File.new("#{config.root}/log/#{config.environment}.log", 'a+')
file.sync = true
config.use Rack::CommonLogger, file
# method override
config.enable :method_override
# partial
config.register Sinatra::Partial
config.set :partial_template_engine, :slim
config.enable :partial_underscores
# sessions
config.enable :sessions
# show exceptions
config.enable :show_exceptions
# sprockets
map '/assets' do
environment = Sprockets::Environment.new
environment.append_path File.join(config.root, 'app', 'assets', 'javascripts')
environment.append_path File.join(config.root, 'app', 'assets', 'stylesheets')
run environment
end
# template engine
config.set :slim, layout_engine: :slim, layout: '../layouts/application'.to_sym
end
oops didnt test
SAT::Application.configure do |config|
# directory structure
config.set :root, File.expand_path('../..', __FILE__)
config.set :views, File.join(config.root, 'app', 'views')
# i18n
I18n.load_path = Dir[File.join('config', 'locales', '*.{rb,yml}').to_s]
I18n.default_locale = 'en'
# link header
config.helpers Sinatra::LinkHeader
# logging
file = File.new("#{config.root}/log/#{config.environment}.log", 'a+')
file.sync = true
config.use Rack::CommonLogger, file
# method override
config.enable :method_override
# partial
config.register Sinatra::Partial
config.set :partial_template_engine, :slim
config.enable :partial_underscores
# sessions
config.enable :sessions
# show exceptions
config.enable :show_exceptions
# sprockets
config.builder.map '/assets' do
environment = Sprockets::Environment.new
environment.append_path File.join(config.root, 'app', 'assets', 'javascripts')
environment.append_path File.join(config.root, 'app', 'assets', 'stylesheets')
run environment
end
# template engine
config.set :slim, layout_engine: :slim, layout: '../layouts/application'.to_sym
end
|
# Be sure to restart your web server when you modify this file.
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
# ENV['RAILS_ENV'] ||= 'production'
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.0.2' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
Rails::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here
# Skip frameworks you're not going to use (only works if using vendor/rails)
# config.frameworks -= [ :action_web_service, :action_mailer ]
# Only load the plugins named here, by default all plugins in vendor/plugins are loaded
# config.plugins = %W( exception_notification ssl_requirement )
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Use the database for sessions instead of the file system
# (create the session table with 'rake db:sessions:create')
# config.action_controller.session_store = :active_record_store
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector
# Make Active Record use UTC-base instead of local time
# config.active_record.default_timezone = :utc
# See Rails::Configuration for more options
config.action_controller.session = {
:session_key => '_bunnylove_session_id',
:secret => '967dffdd493bbb47ffc17be815cbfe85c0348740fdbdc87c65b10e9e1bd25a15dfc5aa70f1f1e1beff8e5e4faaadab1084b5e40044abeeee69d4a2e5a791162a'
}
end
# Add new inflection rules using the following format
# (all these examples are active by default):
# Inflector.inflections do |inflect|
# inflect.plural /^(ox)$/i, '\1en'
# inflect.singular /^(ox)en/i, '\1'
# inflect.irregular 'person', 'people'
# inflect.uncountable %w( fish sheep )
# end
# Add new mime types for use in respond_to blocks:
# Mime::Type.register "text/richtext", :rtf
# Mime::Type.register "application/x-mobile", :mobile
# Include your application configuration below
require 'acts_as_authenticated'
ActiveRecord::Base.send(:include, ActiveRecord::Acts::Authenticated)
ActiveSupport::CoreExtensions::Time::Conversions::DATE_FORMATS.merge!(
:full => "%d %B %Y",
:dmy => "%d/%m/%Y",
:short_timestamp => "at %I:%M %p on %d %B, %Y"
)
ActiveSupport::CoreExtensions::Date::Conversions::DATE_FORMATS.merge!(
:full => "%d %B %Y",
:dmy => "%d/%m/%Y"
)
ExceptionNotifier.exception_recipients = %w(chris@feedmechocolate.com)
ExceptionNotifier.email_prefix = "[Bunny Love]"
Specify sender address for exception notifier emails
git-svn-id: 801577a2cbccabf54a3a609152c727abd26e524a@1324 a18515e9-6cfd-0310-9624-afb4ebaee84e
# Be sure to restart your web server when you modify this file.
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
# ENV['RAILS_ENV'] ||= 'production'
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.0.2' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
Rails::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here
# Skip frameworks you're not going to use (only works if using vendor/rails)
# config.frameworks -= [ :action_web_service, :action_mailer ]
# Only load the plugins named here, by default all plugins in vendor/plugins are loaded
# config.plugins = %W( exception_notification ssl_requirement )
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Use the database for sessions instead of the file system
# (create the session table with 'rake db:sessions:create')
# config.action_controller.session_store = :active_record_store
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector
# Make Active Record use UTC-base instead of local time
# config.active_record.default_timezone = :utc
# See Rails::Configuration for more options
config.action_controller.session = {
:session_key => '_bunnylove_session_id',
:secret => '967dffdd493bbb47ffc17be815cbfe85c0348740fdbdc87c65b10e9e1bd25a15dfc5aa70f1f1e1beff8e5e4faaadab1084b5e40044abeeee69d4a2e5a791162a'
}
end
# Add new inflection rules using the following format
# (all these examples are active by default):
# Inflector.inflections do |inflect|
# inflect.plural /^(ox)$/i, '\1en'
# inflect.singular /^(ox)en/i, '\1'
# inflect.irregular 'person', 'people'
# inflect.uncountable %w( fish sheep )
# end
# Add new mime types for use in respond_to blocks:
# Mime::Type.register "text/richtext", :rtf
# Mime::Type.register "application/x-mobile", :mobile
# Include your application configuration below
require 'acts_as_authenticated'
ActiveRecord::Base.send(:include, ActiveRecord::Acts::Authenticated)
ActiveSupport::CoreExtensions::Time::Conversions::DATE_FORMATS.merge!(
:full => "%d %B %Y",
:dmy => "%d/%m/%Y",
:short_timestamp => "at %I:%M %p on %d %B, %Y"
)
ActiveSupport::CoreExtensions::Date::Conversions::DATE_FORMATS.merge!(
:full => "%d %B %Y",
:dmy => "%d/%m/%Y"
)
ExceptionNotifier.exception_recipients = %w(chris@feedmechocolate.com)
ExceptionNotifier.sender_address = %("Application Error" <notifier@bunnylove.org.uk>)
ExceptionNotifier.email_prefix = "[Bunny Love]" |
# Load the rails application
require File.expand_path('../application', __FILE__)
# Initialize the rails application
BraintreeRailsExample::Application.initialize!
require pp explicitly
# Load the rails application
require File.expand_path('../application', __FILE__)
require 'pp'
# Initialize the rails application
BraintreeRailsExample::Application.initialize!
|
class Hhvm < Formula
desc "JIT compiler and runtime for the PHP and Hack languages"
homepage "http://hhvm.com/"
url "http://dl.hhvm.com/source/hhvm-3.11.0.tar.bz2"
sha256 "cc813d1de7bd2a30b29b1c99dcf3d4ae8865c0044bdf838283ed5ded4097759c"
head "https://github.com/facebook/hhvm.git"
option "with-debug", <<-EOS.undent
Make an unoptimized build with assertions enabled. This will run PHP and
Hack code dramatically slower than a release build, and is suitable mostly
for debugging HHVM itself.
EOS
# Needs libdispatch APIs only available in Mavericks and newer.
depends_on :macos => :mavericks
# We need to build with upstream clang -- the version Apple ships doesn't
# support TLS, which HHVM uses heavily. (And gcc compiles HHVM fine, but
# causes ld to trip an assert and fail, for unclear reasons.)
depends_on "llvm" => [:build, "with-clang"]
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "cmake" => :build
depends_on "dwarfutils" => :build
depends_on "gawk" => :build
depends_on "libelf" => :build
depends_on "libtool" => :build
depends_on "md5sha1sum" => :build
depends_on "ocaml" => :build
depends_on "pkg-config" => :build
depends_on "boost"
depends_on "freetype"
depends_on "gd"
depends_on "gettext"
depends_on "glog"
depends_on "gmp"
depends_on "icu4c"
depends_on "imagemagick"
depends_on "jemalloc"
depends_on "jpeg"
depends_on "libevent"
depends_on "libmemcached"
depends_on "libpng"
depends_on "libzip"
depends_on "lz4"
depends_on "mcrypt"
depends_on "oniguruma"
depends_on "openssl"
depends_on "pcre"
depends_on "readline"
depends_on "sqlite"
depends_on "tbb"
def install
# Work around https://github.com/Homebrew/homebrew/issues/42957 by making
# brew's superenv forget which libraries it wants to inject into ld
# invocations. (We tell cmake below where they all are, so we don't need
# them to be injected like that.)
ENV["HOMEBREW_LIBRARY_PATHS"] = ""
cmake_args = %W[
-DCMAKE_INSTALL_PREFIX=#{prefix}
-DDEFAULT_CONFIG_DIR=#{etc}/hhvm
]
# Must use upstream clang -- see above.
cmake_args += %W[
-DCMAKE_CXX_COMPILER=#{Formula["llvm"].opt_bin}/clang++
-DCMAKE_C_COMPILER=#{Formula["llvm"].opt_bin}/clang
-DCMAKE_ASM_COMPILER=#{Formula["llvm"].opt_bin}/clang
]
# Features which don't work on OS X yet since they haven't been ported yet.
cmake_args += %W[
-DENABLE_MCROUTER=OFF
-DENABLE_EXTENSION_MCROUTER=OFF
-DENABLE_EXTENSION_IMAP=OFF
]
# Required to specify a socket path if you are using the bundled async SQL
# client (which is very strongly recommended).
cmake_args << "-DMYSQL_UNIX_SOCK_ADDR=/tmp/mysql.sock"
# We tell HHVM below where readline is, but due to the machinery of CMake's
# subprojects, it's hard for HHVM to tell one of its subproject dependencies
# where readline is, so be more aggressive in a way that makes it through.
cmake_args << "-DCMAKE_C_FLAGS=-I#{Formula["readline"].opt_include} -L#{Formula["readline"].opt_lib}"
cmake_args << "-DCMAKE_CXX_FLAGS=-I#{Formula["readline"].opt_include} -L#{Formula["readline"].opt_lib}"
# Dependency information.
cmake_args += %W[
-DBOOST_INCLUDEDIR=#{Formula["boost"].opt_include}
-DBOOST_LIBRARYDIR=#{Formula["boost"].opt_lib}
-DFREETYPE_INCLUDE_DIRS=#{Formula["freetype"].opt_include}/freetype2
-DFREETYPE_LIBRARIES=#{Formula["freetype"].opt_lib}/libfreetype.dylib
-DICU_INCLUDE_DIR=#{Formula["icu4c"].opt_include}
-DICU_I18N_LIBRARY=#{Formula["icu4c"].opt_lib}/libicui18n.dylib
-DICU_LIBRARY=#{Formula["icu4c"].opt_lib}/libicuuc.dylib
-DICU_DATA_LIBRARY=#{Formula["icu4c"].opt_lib}/libicudata.dylib
-DJEMALLOC_INCLUDE_DIR=#{Formula["jemalloc"].opt_include}
-DJEMALLOC_LIB=#{Formula["jemalloc"].opt_lib}/libjemalloc.dylib
-DLIBDWARF_INCLUDE_DIRS=#{Formula["dwarfutils"].opt_include}
-DLIBDWARF_LIBRARIES=#{Formula["dwarfutils"].opt_lib}/libdwarf.a
-DLIBELF_INCLUDE_DIRS=#{Formula["libelf"].opt_include}/libelf
-DLIBELF_LIBRARIES=#{Formula["libelf"].opt_lib}/libelf.a
-DLIBEVENT_INCLUDE_DIR=#{Formula["libevent"].opt_include}
-DLIBEVENT_LIB=#{Formula["libevent"].opt_lib}/libevent.dylib
-DLIBGLOG_INCLUDE_DIR=#{Formula["glog"].opt_include}
-DLIBGLOG_LIBRARY=#{Formula["glog"].opt_lib}/libglog.dylib
-DLIBINTL_INCLUDE_DIR=#{Formula["gettext"].opt_include}
-DLIBINTL_LIBRARIES=#{Formula["gettext"].opt_lib}/libintl.dylib
-DLIBMAGICKWAND_INCLUDE_DIRS=#{Formula["imagemagick"].opt_include}/ImageMagick-6
-DLIBMAGICKWAND_LIBRARIES=#{Formula["imagemagick"].opt_lib}/libMagickWand-6.Q16.dylib
-DLIBMEMCACHED_INCLUDE_DIR=#{Formula["libmemcached"].opt_include}
-DLIBMEMCACHED_LIBRARY=#{Formula["libmemcached"].opt_lib}/libmemcached.dylib
-DLIBSQLITE3_INCLUDE_DIR=#{Formula["sqlite"].opt_include}
-DLIBSQLITE3_LIBRARY=#{Formula["sqlite"].opt_lib}/libsqlite3.dylib
-DPC_SQLITE3_FOUND=1
-DLIBZIP_INCLUDE_DIR_ZIP=#{Formula["libzip"].opt_include}
-DLIBZIP_INCLUDE_DIR_ZIPCONF=#{Formula["libzip"].opt_lib}/libzip/include
-DLIBZIP_LIBRARY=#{Formula["libzip"].opt_lib}/libzip.dylib
-DLZ4_INCLUDE_DIR=#{Formula["lz4"].opt_include}
-DLZ4_LIBRARY=#{Formula["lz4"].opt_lib}/liblz4.dylib
-DOPENSSL_INCLUDE_DIR=#{Formula["openssl"].opt_include}
-DOPENSSL_CRYPTO_LIBRARY=#{Formula["openssl"].opt_lib}/libcrypto.dylib
-DCRYPT_LIB=#{Formula["openssl"].opt_lib}/libcrypto.dylib
-DOPENSSL_SSL_LIBRARY=#{Formula["openssl"].opt_lib}/libssl.dylib
-DPCRE_INCLUDE_DIR=#{Formula["pcre"].opt_include}
-DPCRE_LIBRARY=#{Formula["pcre"].opt_lib}/libpcre.dylib
-DREADLINE_INCLUDE_DIR=#{Formula["readline"].opt_include}
-DREADLINE_LIBRARY=#{Formula["readline"].opt_lib}/libreadline.dylib
-DTBB_INSTALL_DIR=#{Formula["tbb"].opt_prefix}
]
# brew's PCRE always has the JIT enabled; work around issue where the CMake
# scripts will pick up the wrong PCRE and think it is disabled.
cmake_args << "-DSYSTEM_PCRE_HAS_JIT=1"
# Debug builds. This switch is all that's needed, it sets all the right
# cflags and other config changes.
cmake_args << "-DCMAKE_BUILD_TYPE=Debug" if build.with? "debug"
system "cmake", *cmake_args
system "make"
system "make", "install"
ini = etc/"hhvm"
(ini/"php.ini").write php_ini unless File.exist? (ini/"php.ini")
(ini/"server.ini").write server_ini unless File.exist? (ini/"server.ini")
end
test do
(testpath/"test.php").write <<-EOS.undent
<?php
exit(is_integer(HHVM_VERSION_ID) ? 0 : 1);
EOS
system "#{bin}/hhvm", testpath/"test.php"
end
plist_options :manual => "hhvm -m daemon -c #{HOMEBREW_PREFIX}/etc/hhvm/php.ini -c #{HOMEBREW_PREFIX}/etc/hhvm/server.ini"
def plist
<<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/hhvm</string>
<string>-m</string>
<string>server</string>
<string>-c</string>
<string>#{etc}/hhvm/php.ini</string>
<string>-c</string>
<string>#{etc}/hhvm/server.ini</string>
</array>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
</dict>
</plist>
EOS
end
# https://github.com/hhvm/packaging/blob/master/hhvm/deb/skeleton/etc/hhvm/php.ini
def php_ini
<<-EOS.undent
; php options
session.save_handler = files
session.save_path = #{var}/lib/hhvm/sessions
session.gc_maxlifetime = 1440
; hhvm specific
hhvm.log.level = Warning
hhvm.log.always_log_unhandled_exceptions = true
hhvm.log.runtime_error_reporting_level = 8191
hhvm.mysql.typed_results = false
EOS
end
# https://github.com/hhvm/packaging/blob/master/hhvm/deb/skeleton/etc/hhvm/server.ini
def server_ini
<<-EOS.undent
; php options
pid = #{var}/run/hhvm/pid
; hhvm specific
hhvm.server.port = 9000
hhvm.server.type = fastcgi
hhvm.server.default_document = index.php
hhvm.log.use_log_file = true
hhvm.log.file = #{var}/log/hhvm/error.log
hhvm.repo.central.path = #{var}/run/hhvm/hhvm.hhbc
EOS
end
end
Added --with-libcxx flag to llvm
This fixes #21, #23, facebook/hhvm#6730
class Hhvm < Formula
desc "JIT compiler and runtime for the PHP and Hack languages"
homepage "http://hhvm.com/"
url "http://dl.hhvm.com/source/hhvm-3.11.0.tar.bz2"
sha256 "cc813d1de7bd2a30b29b1c99dcf3d4ae8865c0044bdf838283ed5ded4097759c"
head "https://github.com/facebook/hhvm.git"
option "with-debug", <<-EOS.undent
Make an unoptimized build with assertions enabled. This will run PHP and
Hack code dramatically slower than a release build, and is suitable mostly
for debugging HHVM itself.
EOS
# Needs libdispatch APIs only available in Mavericks and newer.
depends_on :macos => :mavericks
# We need to build with upstream clang -- the version Apple ships doesn't
# support TLS, which HHVM uses heavily. (And gcc compiles HHVM fine, but
# causes ld to trip an assert and fail, for unclear reasons.)
depends_on "llvm" => [:build, "with-clang", "with-libcxx"]
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "cmake" => :build
depends_on "dwarfutils" => :build
depends_on "gawk" => :build
depends_on "libelf" => :build
depends_on "libtool" => :build
depends_on "md5sha1sum" => :build
depends_on "ocaml" => :build
depends_on "pkg-config" => :build
depends_on "boost"
depends_on "freetype"
depends_on "gd"
depends_on "gettext"
depends_on "glog"
depends_on "gmp"
depends_on "icu4c"
depends_on "imagemagick"
depends_on "jemalloc"
depends_on "jpeg"
depends_on "libevent"
depends_on "libmemcached"
depends_on "libpng"
depends_on "libzip"
depends_on "lz4"
depends_on "mcrypt"
depends_on "oniguruma"
depends_on "openssl"
depends_on "pcre"
depends_on "readline"
depends_on "sqlite"
depends_on "tbb"
def install
# Work around https://github.com/Homebrew/homebrew/issues/42957 by making
# brew's superenv forget which libraries it wants to inject into ld
# invocations. (We tell cmake below where they all are, so we don't need
# them to be injected like that.)
ENV["HOMEBREW_LIBRARY_PATHS"] = ""
cmake_args = %W[
-DCMAKE_INSTALL_PREFIX=#{prefix}
-DDEFAULT_CONFIG_DIR=#{etc}/hhvm
]
# Must use upstream clang -- see above.
cmake_args += %W[
-DCMAKE_CXX_COMPILER=#{Formula["llvm"].opt_bin}/clang++
-DCMAKE_C_COMPILER=#{Formula["llvm"].opt_bin}/clang
-DCMAKE_ASM_COMPILER=#{Formula["llvm"].opt_bin}/clang
]
# Features which don't work on OS X yet since they haven't been ported yet.
cmake_args += %W[
-DENABLE_MCROUTER=OFF
-DENABLE_EXTENSION_MCROUTER=OFF
-DENABLE_EXTENSION_IMAP=OFF
]
# Required to specify a socket path if you are using the bundled async SQL
# client (which is very strongly recommended).
cmake_args << "-DMYSQL_UNIX_SOCK_ADDR=/tmp/mysql.sock"
# We tell HHVM below where readline is, but due to the machinery of CMake's
# subprojects, it's hard for HHVM to tell one of its subproject dependencies
# where readline is, so be more aggressive in a way that makes it through.
cmake_args << "-DCMAKE_C_FLAGS=-I#{Formula["readline"].opt_include} -L#{Formula["readline"].opt_lib}"
cmake_args << "-DCMAKE_CXX_FLAGS=-I#{Formula["readline"].opt_include} -L#{Formula["readline"].opt_lib}"
# Dependency information.
cmake_args += %W[
-DBOOST_INCLUDEDIR=#{Formula["boost"].opt_include}
-DBOOST_LIBRARYDIR=#{Formula["boost"].opt_lib}
-DFREETYPE_INCLUDE_DIRS=#{Formula["freetype"].opt_include}/freetype2
-DFREETYPE_LIBRARIES=#{Formula["freetype"].opt_lib}/libfreetype.dylib
-DICU_INCLUDE_DIR=#{Formula["icu4c"].opt_include}
-DICU_I18N_LIBRARY=#{Formula["icu4c"].opt_lib}/libicui18n.dylib
-DICU_LIBRARY=#{Formula["icu4c"].opt_lib}/libicuuc.dylib
-DICU_DATA_LIBRARY=#{Formula["icu4c"].opt_lib}/libicudata.dylib
-DJEMALLOC_INCLUDE_DIR=#{Formula["jemalloc"].opt_include}
-DJEMALLOC_LIB=#{Formula["jemalloc"].opt_lib}/libjemalloc.dylib
-DLIBDWARF_INCLUDE_DIRS=#{Formula["dwarfutils"].opt_include}
-DLIBDWARF_LIBRARIES=#{Formula["dwarfutils"].opt_lib}/libdwarf.a
-DLIBELF_INCLUDE_DIRS=#{Formula["libelf"].opt_include}/libelf
-DLIBELF_LIBRARIES=#{Formula["libelf"].opt_lib}/libelf.a
-DLIBEVENT_INCLUDE_DIR=#{Formula["libevent"].opt_include}
-DLIBEVENT_LIB=#{Formula["libevent"].opt_lib}/libevent.dylib
-DLIBGLOG_INCLUDE_DIR=#{Formula["glog"].opt_include}
-DLIBGLOG_LIBRARY=#{Formula["glog"].opt_lib}/libglog.dylib
-DLIBINTL_INCLUDE_DIR=#{Formula["gettext"].opt_include}
-DLIBINTL_LIBRARIES=#{Formula["gettext"].opt_lib}/libintl.dylib
-DLIBMAGICKWAND_INCLUDE_DIRS=#{Formula["imagemagick"].opt_include}/ImageMagick-6
-DLIBMAGICKWAND_LIBRARIES=#{Formula["imagemagick"].opt_lib}/libMagickWand-6.Q16.dylib
-DLIBMEMCACHED_INCLUDE_DIR=#{Formula["libmemcached"].opt_include}
-DLIBMEMCACHED_LIBRARY=#{Formula["libmemcached"].opt_lib}/libmemcached.dylib
-DLIBSQLITE3_INCLUDE_DIR=#{Formula["sqlite"].opt_include}
-DLIBSQLITE3_LIBRARY=#{Formula["sqlite"].opt_lib}/libsqlite3.dylib
-DPC_SQLITE3_FOUND=1
-DLIBZIP_INCLUDE_DIR_ZIP=#{Formula["libzip"].opt_include}
-DLIBZIP_INCLUDE_DIR_ZIPCONF=#{Formula["libzip"].opt_lib}/libzip/include
-DLIBZIP_LIBRARY=#{Formula["libzip"].opt_lib}/libzip.dylib
-DLZ4_INCLUDE_DIR=#{Formula["lz4"].opt_include}
-DLZ4_LIBRARY=#{Formula["lz4"].opt_lib}/liblz4.dylib
-DOPENSSL_INCLUDE_DIR=#{Formula["openssl"].opt_include}
-DOPENSSL_CRYPTO_LIBRARY=#{Formula["openssl"].opt_lib}/libcrypto.dylib
-DCRYPT_LIB=#{Formula["openssl"].opt_lib}/libcrypto.dylib
-DOPENSSL_SSL_LIBRARY=#{Formula["openssl"].opt_lib}/libssl.dylib
-DPCRE_INCLUDE_DIR=#{Formula["pcre"].opt_include}
-DPCRE_LIBRARY=#{Formula["pcre"].opt_lib}/libpcre.dylib
-DREADLINE_INCLUDE_DIR=#{Formula["readline"].opt_include}
-DREADLINE_LIBRARY=#{Formula["readline"].opt_lib}/libreadline.dylib
-DTBB_INSTALL_DIR=#{Formula["tbb"].opt_prefix}
]
# brew's PCRE always has the JIT enabled; work around issue where the CMake
# scripts will pick up the wrong PCRE and think it is disabled.
cmake_args << "-DSYSTEM_PCRE_HAS_JIT=1"
# Debug builds. This switch is all that's needed, it sets all the right
# cflags and other config changes.
cmake_args << "-DCMAKE_BUILD_TYPE=Debug" if build.with? "debug"
system "cmake", *cmake_args
system "make"
system "make", "install"
ini = etc/"hhvm"
(ini/"php.ini").write php_ini unless File.exist? (ini/"php.ini")
(ini/"server.ini").write server_ini unless File.exist? (ini/"server.ini")
end
test do
(testpath/"test.php").write <<-EOS.undent
<?php
exit(is_integer(HHVM_VERSION_ID) ? 0 : 1);
EOS
system "#{bin}/hhvm", testpath/"test.php"
end
plist_options :manual => "hhvm -m daemon -c #{HOMEBREW_PREFIX}/etc/hhvm/php.ini -c #{HOMEBREW_PREFIX}/etc/hhvm/server.ini"
def plist
<<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/hhvm</string>
<string>-m</string>
<string>server</string>
<string>-c</string>
<string>#{etc}/hhvm/php.ini</string>
<string>-c</string>
<string>#{etc}/hhvm/server.ini</string>
</array>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
</dict>
</plist>
EOS
end
# https://github.com/hhvm/packaging/blob/master/hhvm/deb/skeleton/etc/hhvm/php.ini
def php_ini
<<-EOS.undent
; php options
session.save_handler = files
session.save_path = #{var}/lib/hhvm/sessions
session.gc_maxlifetime = 1440
; hhvm specific
hhvm.log.level = Warning
hhvm.log.always_log_unhandled_exceptions = true
hhvm.log.runtime_error_reporting_level = 8191
hhvm.mysql.typed_results = false
EOS
end
# https://github.com/hhvm/packaging/blob/master/hhvm/deb/skeleton/etc/hhvm/server.ini
def server_ini
<<-EOS.undent
; php options
pid = #{var}/run/hhvm/pid
; hhvm specific
hhvm.server.port = 9000
hhvm.server.type = fastcgi
hhvm.server.default_document = index.php
hhvm.log.use_log_file = true
hhvm.log.file = #{var}/log/hhvm/error.log
hhvm.repo.central.path = #{var}/run/hhvm/hhvm.hhbc
EOS
end
end
|
require 'libraries'
module Hurl
def self.redis
return @redis if @redis
@redis = Redis.new(:host => '127.0.0.1', :port => 6379, :thread_safe => true)
end
def self.redis=(redis)
@redis = redis
end
def self.encode(object)
Zlib::Deflate.deflate Yajl::Encoder.encode(object)
end
def self.decode(object)
Yajl::Parser.parse(Zlib::Inflate.inflate(object)) rescue nil
end
class App < Sinatra::Base
register Mustache::Sinatra
helpers Hurl::Helpers
dir = File.dirname(File.expand_path(__FILE__))
set :public, "#{dir}/public"
set :static, true
set :mustache, {
:namespace => Object,
:views => 'views/',
:templates => 'templates/'
}
enable :sessions
def initialize(*args)
super
@debug = ENV['DEBUG']
setup_default_hurls
end
def redis
Hurl.redis
end
#
# routes
#
before do
if load_session
@user = User.find_by_email(@session['email'])
end
@flash = session.delete('flash')
end
get '/' do
@hurl = {}
mustache :index
end
get '/hurls/?' do
redirect('/') and return unless logged_in?
@hurls = @user.hurls
mustache :hurls
end
get '/hurls/:id/?' do
@hurl = find_hurl_or_view(params[:id])
@hurl ? mustache(:index) : not_found
end
delete '/hurls/:id/?' do
redirect('/') and return unless logged_in?
if @hurl = find_hurl_or_view(params[:id])
@user.remove_hurl(@hurl['id'])
end
request.xhr? ? "ok" : redirect('/')
end
get '/hurls/:id/:view_id/?' do
@hurl = find_hurl_or_view(params[:id])
@view = find_hurl_or_view(params[:view_id])
@view_id = params[:view_id]
@hurl && @view ? mustache(:index) : not_found
end
get '/views/:id/?' do
@view = find_hurl_or_view(params[:id])
@view ? mustache(:view, :layout => false) : not_found
end
get '/test.json' do
content_type 'application/json'
File.read('test/json')
end
get '/test.xml' do
content_type 'application/xml'
File.read('test/xml')
end
get '/about/?' do
mustache :about
end
get '/stats/?' do
mustache :stats
end
get '/logout/?' do
clear_session
session['flash'] = 'see you later!'
redirect '/'
end
post '/login/?' do
email, password = params.values_at(:email, :password)
if User.authenticate(email, password)
create_session(:email => email)
json :success => true
else
json :error => 'incorrect email or password'
end
end
post '/signup/?' do
email, password = params.values_at(:email, :password)
user = User.create(:email => email, :password => password)
if user.valid?
create_session(:email => email)
stat :users
session['flash'] = 'welcome to hurl!'
json :success => true
else
json :error => user.errors.to_s
end
end
post '/' do
return json(:error => "Calm down and try my margarita!") if rate_limited?
url, method, auth = params.values_at(:url, :method, :auth)
return json(:error => "That's... wait.. what?!") if invalid_url?(url)
curl = Curl::Easy.new(url)
sent_headers = []
curl.on_debug do |type, data|
# track request headers
sent_headers << data if type == Curl::CURLINFO_HEADER_OUT
end
curl.follow_location = true if params[:follow_redirects]
# ensure a method is set
method = (method.to_s.empty? ? 'GET' : method).upcase
# update auth
add_auth(auth, curl, params)
# arbitrary headers
add_headers_from_arrays(curl, params["header-keys"], params["header-vals"])
# arbitrary params
fields = make_fields(method, params["param-keys"], params["param-vals"])
begin
debug { puts "#{method} #{url}" }
curl.send("http_#{method.downcase}", *fields)
debug do
puts sent_headers.join("\n")
puts fields.join('&') if fields.any?
puts curl.header_str
end
header = pretty_print_headers(curl.header_str)
body = pretty_print(curl.content_type, curl.body_str)
request = pretty_print_requests(sent_headers, fields)
json :header => header,
:body => body,
:request => request,
:hurl_id => save_hurl(params),
:prev_hurl => @user ? @user.second_to_last_hurl_id : nil,
:view_id => save_view(header, body, request)
rescue => e
json :error => e.to_s
end
end
#
# error handlers
#
not_found do
mustache :"404"
end
error do
mustache :"500"
end
#
# route helpers
#
# is this a url hurl can handle. basically a spam check.
def invalid_url?(url)
url.include? 'hurl.it'
end
# update auth based on auth type
def add_auth(auth, curl, params)
if auth == 'basic'
username, password = params.values_at(:username, :password)
encoded = Base64.encode64("#{username}:#{password}").strip
curl.headers['Authorization'] = "Basic #{encoded}"
end
end
# headers from non-empty keys and values
def add_headers_from_arrays(curl, keys, values)
keys, values = Array(keys), Array(values)
keys.each_with_index do |key, i|
next if values[i].to_s.empty?
curl.headers[key] = values[i]
end
end
# post params from non-empty keys and values
def make_fields(method, keys, values)
return [] unless method == 'POST'
fields = []
keys, values = Array(keys), Array(values)
keys.each_with_index do |name, i|
value = values[i]
next if name.to_s.empty? || value.to_s.empty?
fields << Curl::PostField.content(name, value)
end
fields
end
def save_view(header, body, request)
hash = { 'header' => header, 'body' => body, 'request' => request }
id = sha(hash.to_s)
json = encode(hash)
redis.set(id, json)
id
end
def save_hurl(params)
id = sha(params.to_s)
json = encode(params.merge(:id => id))
was_set = redis.setnx(id, json)
stat :hurls if was_set
@user.add_hurl(id) if @user
id
end
def find_hurl_or_view(id)
decode redis.get(id)
end
# has this person made too many requests?
def rate_limited?
tries = redis.get(key="tries:#{@env['REMOTE_ADDR']}").to_i
if tries > 10
true
else
# give the key a new value and tell it to expire in 30 seconds
redis.set(key, tries+1)
redis.expire(key, 30)
false
end
end
end
end
Fixing newline issue mentioned here: http://github.com/defunkt/hurl/issues#issue/25
require 'libraries'
module Hurl
def self.redis
return @redis if @redis
@redis = Redis.new(:host => '127.0.0.1', :port => 6379, :thread_safe => true)
end
def self.redis=(redis)
@redis = redis
end
def self.encode(object)
Zlib::Deflate.deflate Yajl::Encoder.encode(object)
end
def self.decode(object)
Yajl::Parser.parse(Zlib::Inflate.inflate(object)) rescue nil
end
class App < Sinatra::Base
register Mustache::Sinatra
helpers Hurl::Helpers
dir = File.dirname(File.expand_path(__FILE__))
set :public, "#{dir}/public"
set :static, true
set :mustache, {
:namespace => Object,
:views => 'views/',
:templates => 'templates/'
}
enable :sessions
def initialize(*args)
super
@debug = ENV['DEBUG']
setup_default_hurls
end
def redis
Hurl.redis
end
#
# routes
#
before do
if load_session
@user = User.find_by_email(@session['email'])
end
@flash = session.delete('flash')
end
get '/' do
@hurl = {}
mustache :index
end
get '/hurls/?' do
redirect('/') and return unless logged_in?
@hurls = @user.hurls
mustache :hurls
end
get '/hurls/:id/?' do
@hurl = find_hurl_or_view(params[:id])
@hurl ? mustache(:index) : not_found
end
delete '/hurls/:id/?' do
redirect('/') and return unless logged_in?
if @hurl = find_hurl_or_view(params[:id])
@user.remove_hurl(@hurl['id'])
end
request.xhr? ? "ok" : redirect('/')
end
get '/hurls/:id/:view_id/?' do
@hurl = find_hurl_or_view(params[:id])
@view = find_hurl_or_view(params[:view_id])
@view_id = params[:view_id]
@hurl && @view ? mustache(:index) : not_found
end
get '/views/:id/?' do
@view = find_hurl_or_view(params[:id])
@view ? mustache(:view, :layout => false) : not_found
end
get '/test.json' do
content_type 'application/json'
File.read('test/json')
end
get '/test.xml' do
content_type 'application/xml'
File.read('test/xml')
end
get '/about/?' do
mustache :about
end
get '/stats/?' do
mustache :stats
end
get '/logout/?' do
clear_session
session['flash'] = 'see you later!'
redirect '/'
end
post '/login/?' do
email, password = params.values_at(:email, :password)
if User.authenticate(email, password)
create_session(:email => email)
json :success => true
else
json :error => 'incorrect email or password'
end
end
post '/signup/?' do
email, password = params.values_at(:email, :password)
user = User.create(:email => email, :password => password)
if user.valid?
create_session(:email => email)
stat :users
session['flash'] = 'welcome to hurl!'
json :success => true
else
json :error => user.errors.to_s
end
end
post '/' do
return json(:error => "Calm down and try my margarita!") if rate_limited?
url, method, auth = params.values_at(:url, :method, :auth)
return json(:error => "That's... wait.. what?!") if invalid_url?(url)
curl = Curl::Easy.new(url)
sent_headers = []
curl.on_debug do |type, data|
# track request headers
sent_headers << data if type == Curl::CURLINFO_HEADER_OUT
end
curl.follow_location = true if params[:follow_redirects]
# ensure a method is set
method = (method.to_s.empty? ? 'GET' : method).upcase
# update auth
add_auth(auth, curl, params)
# arbitrary headers
add_headers_from_arrays(curl, params["header-keys"], params["header-vals"])
# arbitrary params
fields = make_fields(method, params["param-keys"], params["param-vals"])
begin
debug { puts "#{method} #{url}" }
curl.send("http_#{method.downcase}", *fields)
debug do
puts sent_headers.join("\n")
puts fields.join('&') if fields.any?
puts curl.header_str
end
header = pretty_print_headers(curl.header_str)
body = pretty_print(curl.content_type, curl.body_str)
request = pretty_print_requests(sent_headers, fields)
json :header => header,
:body => body,
:request => request,
:hurl_id => save_hurl(params),
:prev_hurl => @user ? @user.second_to_last_hurl_id : nil,
:view_id => save_view(header, body, request)
rescue => e
json :error => e.to_s
end
end
#
# error handlers
#
not_found do
mustache :"404"
end
error do
mustache :"500"
end
#
# route helpers
#
# is this a url hurl can handle. basically a spam check.
def invalid_url?(url)
url.include? 'hurl.it'
end
# update auth based on auth type
def add_auth(auth, curl, params)
if auth == 'basic'
username, password = params.values_at(:username, :password)
encoded = Base64.encode64("#{username}:#{password}").gsub("\n",'')
curl.headers['Authorization'] = "Basic #{encoded}"
end
end
# headers from non-empty keys and values
def add_headers_from_arrays(curl, keys, values)
keys, values = Array(keys), Array(values)
keys.each_with_index do |key, i|
next if values[i].to_s.empty?
curl.headers[key] = values[i]
end
end
# post params from non-empty keys and values
def make_fields(method, keys, values)
return [] unless method == 'POST'
fields = []
keys, values = Array(keys), Array(values)
keys.each_with_index do |name, i|
value = values[i]
next if name.to_s.empty? || value.to_s.empty?
fields << Curl::PostField.content(name, value)
end
fields
end
def save_view(header, body, request)
hash = { 'header' => header, 'body' => body, 'request' => request }
id = sha(hash.to_s)
json = encode(hash)
redis.set(id, json)
id
end
def save_hurl(params)
id = sha(params.to_s)
json = encode(params.merge(:id => id))
was_set = redis.setnx(id, json)
stat :hurls if was_set
@user.add_hurl(id) if @user
id
end
def find_hurl_or_view(id)
decode redis.get(id)
end
# has this person made too many requests?
def rate_limited?
tries = redis.get(key="tries:#{@env['REMOTE_ADDR']}").to_i
if tries > 10
true
else
# give the key a new value and tell it to expire in 30 seconds
redis.set(key, tries+1)
redis.expire(key, 30)
false
end
end
end
end
|
#! /bin/env ruby
#
#########################################################################################
# Author: Alice "Duchess" Archer
# Copyright (c) 2015 Isaac Archer under the MIT License
# Project Name: Husk
# Project Description:
# Husk is a configurable irc bot using rirc framework
# it is configured via a ruby script file that sets variables for the irc bot
# it is plugable using the plugin design and manager in rirc
#########################################################################################
# This is the commands file for husk
#########################################################################################
module Command_mod
def initialize
@command_prefix = [
/^`info$/,
/^`join ##?/,
/^`part$/,
/^`plsgo$/,
/^`help /,
/^`help$/,
/^`load /,
/^`unload /,
/^`reload /,
/^`list$/,
/^`list channels$/,
/^`list admins$/
]
#/^`ignore /,
#/^`unignore /,
#/^`list ignore/,
#/^`msg /,
#/^`act /,
end
def warn(name, bot)
bot.notice(name, "You are not in the admin list, please contact an admin for help.")
bot.notice(name, "admins:")
bot.admins.each { |a| bot.notice(name, " ↪ #{a}") }
end
def commands(message, bot, plug)
commands_reg = Regexp.union(@command_prefix)
if message.message_regex(commands_reg)
i = 1
@command_prefix.each do |a|
if message.message_regex(a)
if i == 1
info_g(message, bot)
elsif i == 2
join(message, bot)
elsif i == 3
part(message, bot)
elsif i == 4
quit(message, bot)
elsif i == 5
help_plugin(message, bot, plug)
elsif i == 6
help_g(message, bot)
elsif i == 7
load_p(message, bot, plug)
elsif i == 8
unload(message, bot, plug)
elsif i == 9
reload(message, bot, plug)
elsif i == 10
list_plugins(message, bot, plug)
elsif i == 11
list_channels(message, bot)
elsif i == 12
list_admins(message, bot)
else
# oh shit
end
end
i = i + 1
end
else
return false
end
return true
end
def info_g(msg, bot)
bot.notice(msg.nick, "this is an instance of the Husk irc bot. instance nick: #{bot.nick_name}")
bot.notice(msg.nick, " ↪ is a modular/plugable irc bot with reloadable commands")
bot.notice(msg.nick, " ↪ is a fully configurable irc bot with ssl and server pass support")
bot.notice(msg.nick, " ↪ is based on the rirc framework (https://github.com/The-Duchess/ruby-irc-framework)")
bot.notice(msg.nick, " ↪ is open source under the MIT license")
bot.notice(msg.nick, " ↪ can be found here https://github.com/The-Duchess/husk")
end
def join(message, bot)
if !bot.admins.include? message.nick
warn(message.nick, bot)
return
end
tokens = message.message.split(" ")
if !tokens[1].to_s.match("/^#/") then bot.notice(message.nick, "#{tokens[1]} is an invalid channel name"); return; end
bot.join("#{tokens[1]}")
# bot.notice("#{tokens[1]}", "hello: for help with this bot use `help")
end
def part(message, bot)
if !bot.admins.include? message.nick
warn(message.nick, bot)
return
end
bot.part(message.channel, "")
end
def quit(message, bot)
if !bot.admins.include? message.nick
warn(message.nick, bot)
return
end
bot.quit("")
abort
end
def help_plugin(message, bot, plug)
tokens = message.message.split(" ")
help = plug.plugin_help(tokens[1])
if help != nil
bot.notice(message.nick, help)
else
bot.notice(message.nick, "plugin #{tokens[1]} not found")
end
end
def help_g(message, bot)
bot.notice(message.nick, "commands")
bot.notice(message.nick, " ↪ `help <plugin name> : help on the plugin")
bot.notice(message.nick, " ↪ `info : for information on the bot")
bot.notice(message.nick, " ↪ `list : lists active plugins by name")
end
def load_p(message, bot, plug)
if !bot.admins.include? message.nick
warn(message.nick, bot)
return
end
tokens = message.message.split(" ")
response = plug.plugin_load(tokens[1])
bot.notice(message.nick, response)
end
def unload(message, bot, plug)
if !bot.admins.include? message.nick
warn(message.nick, bot)
return
end
tokens = message.message.split(" ")
response = plug.unload(tokens[1])
bot.privmsg(message.nick, response)
end
def reload(message, bot, plug)
if !bot.admins.include? message.nick
warn(message.nick, bot)
return
end
tokens = message.message.split(" ")
response = plug.reload(tokens[1])
bot.notice(message.nick, response)
end
def list_plugins(message, bot, plug)
if plug.get_names.length == 0 then bot.notice(message.nick, "no plugins are loaded"); return; end
bot.notice(message.nick, "Loaded Plugins")
plug.get_names.each { |a| bot.notice(message.nick, " ↪ #{a}") }
end
def list_channels(message, bot)
if bot.channels.length == 0 then bot.notice(message.nick, "#{bot.nick_name} is not in any channels"); return; end
bot.notice(message.nick, "Active Chans")
bot.channels.each { |a| bot.notice(message.nick, " ↪ #{a}") }
end
def list_admins(message, bot)
if bot.admins.length == 0 then bot.notice(message.nick, "#{bot.nick_name} does not have any admins"); return; end
bot.notice(message.nick, "Admins")
bot.admins.each { |a| bot.notice(message.nick, " ↪ #{a}") }
end
#def ignore(message)
#end
#def unignore(message)
#end
#def list_ignore(message)
#end
#def send_msg(message)
#end
#def send_act(message)
#end
end
class Command_obj
include Command_mod
end
update
#! /bin/env ruby
#
#########################################################################################
# Author: Alice "Duchess" Archer
# Copyright (c) 2015 Isaac Archer under the MIT License
# Project Name: Husk
# Project Description:
# Husk is a configurable irc bot using rirc framework
# it is configured via a ruby script file that sets variables for the irc bot
# it is plugable using the plugin design and manager in rirc
#########################################################################################
# This is the commands file for husk
#########################################################################################
module Command_mod
def initialize
@command_prefix = [
/^`info$/,
/^`join ##?/,
/^`part$/,
/^`plsgo$/,
/^`help /,
/^`help$/,
/^`load /,
/^`unload /,
/^`reload /,
/^`list$/,
/^`list channels$/,
/^`list admins$/,
/^`ignore /,
/^`unignore /,
/^`list ignore/
]
#/^`ignore /,
#/^`unignore /,
#/^`list ignore/,
#/^`msg /,
#/^`act /,
end
def warn(name, bot)
bot.notice(name, "You are not in the admin list, please contact an admin for help.")
bot.notice(name, "admins:")
bot.admins.each { |a| bot.notice(name, " ↪ #{a}") }
end
def commands(message, bot, plug)
commands_reg = Regexp.union(@command_prefix)
if message.message_regex(commands_reg)
i = 1
@command_prefix.each do |a|
if message.message_regex(a)
if i == 1
info_g(message, bot)
elsif i == 2
join(message, bot)
elsif i == 3
part(message, bot)
elsif i == 4
quit(message, bot)
elsif i == 5
help_plugin(message, bot, plug)
elsif i == 6
help_g(message, bot)
elsif i == 7
load_p(message, bot, plug)
elsif i == 8
unload(message, bot, plug)
elsif i == 9
reload(message, bot, plug)
elsif i == 10
list_plugins(message, bot, plug)
elsif i == 11
list_channels(message, bot)
elsif i == 12
list_admins(message, bot)
elsif i == 13
ignore(message, bot)
elsif i == 14
unignore(message, bot)
elsif i == 15
list_ignore(message, bot)
else
# oh shit
end
end
i = i + 1
end
else
return false
end
return true
end
def info_g(msg, bot)
bot.notice(msg.nick, "this is an instance of the Husk irc bot. instance nick: #{bot.nick_name}")
bot.notice(msg.nick, " ↪ is a modular/plugable irc bot with reloadable commands")
bot.notice(msg.nick, " ↪ is a fully configurable irc bot with ssl and server pass support")
bot.notice(msg.nick, " ↪ is based on the rirc framework (https://github.com/The-Duchess/ruby-irc-framework)")
bot.notice(msg.nick, " ↪ is open source under the MIT license")
bot.notice(msg.nick, " ↪ can be found here https://github.com/The-Duchess/husk")
end
def join(message, bot)
if !bot.admins.include? message.nick
warn(message.nick, bot)
return
end
tokens = message.message.split(" ")
if !tokens[1].to_s.match("/^#/") then bot.notice(message.nick, "#{tokens[1]} is an invalid channel name"); return; end
bot.join("#{tokens[1]}")
# bot.notice("#{tokens[1]}", "hello: for help with this bot use `help")
end
def part(message, bot)
if !bot.admins.include? message.nick
warn(message.nick, bot)
return
end
bot.part(message.channel, "")
end
def quit(message, bot)
if !bot.admins.include? message.nick
warn(message.nick, bot)
return
end
bot.quit("")
abort
end
def help_plugin(message, bot, plug)
tokens = message.message.split(" ")
help = plug.plugin_help(tokens[1])
if help != nil
bot.notice(message.nick, help)
else
bot.notice(message.nick, "plugin #{tokens[1]} not found")
end
end
def help_g(message, bot)
bot.notice(message.nick, "commands")
bot.notice(message.nick, " ↪ `help <plugin name> : help on the plugin")
bot.notice(message.nick, " ↪ `info : for information on the bot")
bot.notice(message.nick, " ↪ `list : lists active plugins by name")
end
def load_p(message, bot, plug)
if !bot.admins.include? message.nick
warn(message.nick, bot)
return
end
tokens = message.message.split(" ")
response = plug.plugin_load(tokens[1])
bot.notice(message.nick, response)
end
def unload(message, bot, plug)
if !bot.admins.include? message.nick
warn(message.nick, bot)
return
end
tokens = message.message.split(" ")
response = plug.unload(tokens[1])
bot.privmsg(message.nick, response)
end
def reload(message, bot, plug)
if !bot.admins.include? message.nick
warn(message.nick, bot)
return
end
tokens = message.message.split(" ")
response = plug.reload(tokens[1])
bot.notice(message.nick, response)
end
def list_plugins(message, bot, plug)
if plug.get_names.length == 0 then bot.notice(message.nick, "no plugins are loaded"); return; end
bot.notice(message.nick, "Loaded Plugins")
plug.get_names.each { |a| bot.notice(message.nick, " ↪ #{a}") }
end
def list_channels(message, bot)
if bot.channels.length == 0 then bot.notice(message.nick, "#{bot.nick_name} is not in any channels"); return; end
bot.notice(message.nick, "Active Chans")
bot.channels.each { |a| bot.notice(message.nick, " ↪ #{a}") }
end
def list_admins(message, bot)
if bot.admins.length == 0 then bot.notice(message.nick, "#{bot.nick_name} does not have any admins"); return; end
bot.notice(message.nick, "Admins")
bot.admins.each { |a| bot.notice(message.nick, " ↪ #{a}") }
end
def ignore(message, bot)
if !bot.admins.include? message.nick
warn(message.nick, bot)
return
end
tokens = message.message.split(" ")
bot.add_ignore(tokens[1].to_s)
bot.notice(message.nick, "#{tokens[1]} added to ignore list")
end
def unignore(message, bot)
if !bot.admins.include? message.nick
warn(message.nick, bot)
return
end
tokens = message.message.split(" ")
bot.remove_ignore(tokens[1].to_s)
bot.notice(message.nick, "#{tokens[1]} removed from ignore list")
end
def list_ignore(message, bot)
if !bot.admins.include? message.nick
warn(message.nick, bot)
return
end
bot.notice(message.nick, "List of Ignored Users")
bot.ignore.each { |a| bot.notice(message.nick, " ↪ #{a}") }
end
#def send_msg(message)
#end
#def send_act(message)
#end
end
class Command_obj
include Command_mod
end
|
def remove_gem(*names)
names.each do |name|
gsub_file 'Gemfile', /gem '#{name}'\n/, ''
end
end
def remove_comment_of_gem
gsub_file('Gemfile', /^\s*#.*$\n/, '')
end
def get_remote(src, dest = nil)
dest ||= src
repo = 'https://raw.github.com/80percent/rails-template/master/files/'
remote_file = repo + src
remove_file dest
get(remote_file, dest)
end
remove_comment_of_gem
# gitignore
get_remote('gitignore', '.gitignore')
# postgresql
say 'Applying postgresql...'
remove_gem('sqlite3')
gem 'pg'
get_remote('config/database.yml.example')
gsub_file "config/database.yml.example", /database: myapp_development/, "database: #{app_name}_development"
gsub_file "config/database.yml.example", /database: myapp_test/, "database: #{app_name}_test"
gsub_file "config/database.yml.example", /database: myapp_production/, "database: #{app_name}_production"
get_remote('config/database.yml.example', 'config/database.yml')
gsub_file "config/database.yml", /database: myapp_development/, "database: #{app_name}_development"
gsub_file "config/database.yml", /database: myapp_test/, "database: #{app_name}_test"
gsub_file "config/database.yml", /database: myapp_production/, "database: #{app_name}_production"
# environment variables set
say 'Applying figaro...'
gem 'figaro'
get_remote('config/application.yml.example')
get_remote('config/application.yml.example', 'config/application.yml')
get_remote('config/spring.rb')
after_bundle do
say "Stop spring if exsit"
run "spring stop"
end
# bootstrap sass
say 'Applying bootstrap3...'
gem 'bootstrap-sass'
remove_file 'app/assets/stylesheets/application.css'
get_remote('application.scss', 'app/assets/stylesheets/application.scss')
inject_into_file 'app/assets/javascripts/application.js', after: "//= require rails-ujs\n" do "//= require bootstrap-sprockets\n" end
say 'Applying simple_form...'
gem 'simple_form', github: 'christiannelson/simple_form', branch: 'rails-5.1'
after_bundle do
generate 'simple_form:install', '--bootstrap'
end
say 'Applying font-awesome & slim & high_voltage...'
gem 'font-awesome-sass'
gem 'slim-rails'
gem 'high_voltage', '~> 3.0.0'
get_remote('visitors_controller.rb', 'app/controllers/visitors_controller.rb')
get_remote('index.html.slim', 'app/views/visitors/index.html.slim')
get_remote('about.html.slim', 'app/views/pages/about.html.slim')
remove_file('app/views/layouts/application.html.erb')
get_remote('application.html.slim', 'app/views/layouts/application.html.slim')
gsub_file 'app/views/layouts/application.html.slim', /myapp/, "#{app_name}"
get_remote('favicon.ico', 'app/assets/images/favicon.ico')
say 'Applying action cable config...'
inject_into_file 'config/environments/production.rb', after: "# Mount Action Cable outside main process or domain\n" do <<-EOF
config.action_cable.allowed_request_origins = [ "\#{ENV['PROTOCOL']}://\#{ENV['DOMAIN']}" ]
EOF
end
# initialize files
# uploader directory
# application.yml
say 'Applying carrierwave & upyun...'
gem 'carrierwave'
gem 'carrierwave-upyun'
get_remote('config/initializers/carrierwave.rb')
get_remote('image_uploader.rb', 'app/uploaders/image_uploader.rb')
# initialize files
say 'Applying status page...'
gem 'status-page'
get_remote('config/initializers/status_page.rb')
say "Applying browser_warrior..."
gem 'browser_warrior'
after_bundle do
generate 'browser_warrior:install'
end
say 'Applying redis & sidekiq...'
gem 'redis-namespace'
gem 'sidekiq'
get_remote('config/initializers/sidekiq.rb')
get_remote('config/routes.rb')
say 'Applying kaminari & rails-i18n...'
gem 'kaminari', '~> 1.0.1'
gem 'rails-i18n', '~> 5.0.3'
after_bundle do
generate 'kaminari:config'
generate 'kaminari:views', 'bootstrap3'
end
say 'Applying mina & its plugins...'
gem 'mina', '~> 1.0.6', require: false
gem 'mina-puma', '~> 1.0.1', require: false
gem 'mina-multistage', '~> 1.0.3', require: false
gem 'mina-sidekiq', '~> 1.0.1', require: false
gem 'mina-logs', '~> 1.0.1', require: false
get_remote('config/deploy.rb')
get_remote('config/puma.rb')
gsub_file 'config/puma.rb', /\/data\/www\/myapp\/shared/, "/data/www/#{app_name}/shared"
get_remote('config/deploy/production.rb')
gsub_file 'config/deploy/production.rb', /\/data\/www\/myapp/, "/data/www/#{app_name}"
get_remote('config/nginx.conf.example')
gsub_file 'config/nginx.conf.example', /myapp/, "#{app_name}"
get_remote('config/nginx.ssl.conf.example')
gsub_file 'config/nginx.ssl.conf.example', /myapp/, "#{app_name}"
get_remote('config/logrotate.conf.example')
gsub_file 'config/logrotate.conf.example', /myapp/, "#{app_name}"
get_remote('config/monit.conf.example')
gsub_file 'config/monit.conf.example', /myapp/, "#{app_name}"
say 'Applying lograge & basic application config...'
gem 'lograge'
inject_into_file 'config/application.rb', after: "class Application < Rails::Application\n" do <<-EOF
config.generators.assets = false
config.generators.helper = false
config.time_zone = 'Beijing'
config.i18n.available_locales = [:en, :'zh-CN']
config.i18n.default_locale = :'zh-CN'
config.lograge.enabled = true
EOF
end
say 'Applying rspec test framework...'
gem_group :development do
gem 'rails_apps_testing'
end
gem_group :development, :test do
gem 'rspec-rails'
gem 'factory_girl_rails'
end
gem_group :test do
gem 'capybara'
gem 'database_cleaner'
gem 'launchy'
gem 'selenium-webdriver'
end
after_bundle do
generate 'testing:configure', 'rspec --force'
end
get_remote 'README.md'
gsub_file 'README.md', /myapp/, "#{app_name}"
# `ack` is a really quick tool for searching code
get_remote 'ackrc', '.ackrc'
after_bundle do
say 'Done! init `git` and `database`...'
git :init
git add: '.'
git commit: '-m "init rails"'
rake 'db:create'
say "Build successfully! `cd #{app_name}` and use `rails s` to start your rails app..."
end
add jquery
def remove_gem(*names)
names.each do |name|
gsub_file 'Gemfile', /gem '#{name}'\n/, ''
end
end
def remove_comment_of_gem
gsub_file('Gemfile', /^\s*#.*$\n/, '')
end
def get_remote(src, dest = nil)
dest ||= src
repo = 'https://raw.github.com/80percent/rails-template/master/files/'
remote_file = repo + src
remove_file dest
get(remote_file, dest)
end
remove_comment_of_gem
# gitignore
get_remote('gitignore', '.gitignore')
# postgresql
say 'Applying postgresql...'
remove_gem('sqlite3')
gem 'pg'
get_remote('config/database.yml.example')
gsub_file "config/database.yml.example", /database: myapp_development/, "database: #{app_name}_development"
gsub_file "config/database.yml.example", /database: myapp_test/, "database: #{app_name}_test"
gsub_file "config/database.yml.example", /database: myapp_production/, "database: #{app_name}_production"
get_remote('config/database.yml.example', 'config/database.yml')
gsub_file "config/database.yml", /database: myapp_development/, "database: #{app_name}_development"
gsub_file "config/database.yml", /database: myapp_test/, "database: #{app_name}_test"
gsub_file "config/database.yml", /database: myapp_production/, "database: #{app_name}_production"
# environment variables set
say 'Applying figaro...'
gem 'figaro'
get_remote('config/application.yml.example')
get_remote('config/application.yml.example', 'config/application.yml')
get_remote('config/spring.rb')
after_bundle do
say "Stop spring if exsit"
run "spring stop"
end
# jquery, bootstrap needed
say 'Applying jquery...'
gem 'jquery-rails'
inject_into_file 'app/assets/javascripts/application.js', after: "//= require rails-ujs\n" do "//= require jquery\n" end
# bootstrap sass
say 'Applying bootstrap3...'
gem 'bootstrap-sass'
remove_file 'app/assets/stylesheets/application.css'
get_remote('application.scss', 'app/assets/stylesheets/application.scss')
inject_into_file 'app/assets/javascripts/application.js', after: "//= require jquery\n" do "//= require bootstrap-sprockets\n" end
say 'Applying simple_form...'
gem 'simple_form', github: 'christiannelson/simple_form', branch: 'rails-5.1'
after_bundle do
generate 'simple_form:install', '--bootstrap'
end
say 'Applying font-awesome & slim & high_voltage...'
gem 'font-awesome-sass'
gem 'slim-rails'
gem 'high_voltage', '~> 3.0.0'
get_remote('visitors_controller.rb', 'app/controllers/visitors_controller.rb')
get_remote('index.html.slim', 'app/views/visitors/index.html.slim')
get_remote('about.html.slim', 'app/views/pages/about.html.slim')
remove_file('app/views/layouts/application.html.erb')
get_remote('application.html.slim', 'app/views/layouts/application.html.slim')
gsub_file 'app/views/layouts/application.html.slim', /myapp/, "#{app_name}"
get_remote('favicon.ico', 'app/assets/images/favicon.ico')
say 'Applying action cable config...'
inject_into_file 'config/environments/production.rb', after: "# Mount Action Cable outside main process or domain\n" do <<-EOF
config.action_cable.allowed_request_origins = [ "\#{ENV['PROTOCOL']}://\#{ENV['DOMAIN']}" ]
EOF
end
# initialize files
# uploader directory
# application.yml
say 'Applying carrierwave & upyun...'
gem 'carrierwave'
gem 'carrierwave-upyun'
get_remote('config/initializers/carrierwave.rb')
get_remote('image_uploader.rb', 'app/uploaders/image_uploader.rb')
# initialize files
say 'Applying status page...'
gem 'status-page'
get_remote('config/initializers/status_page.rb')
say "Applying browser_warrior..."
gem 'browser_warrior'
after_bundle do
generate 'browser_warrior:install'
end
say 'Applying redis & sidekiq...'
gem 'redis-namespace'
gem 'sidekiq'
get_remote('config/initializers/sidekiq.rb')
get_remote('config/routes.rb')
say 'Applying kaminari & rails-i18n...'
gem 'kaminari', '~> 1.0.1'
gem 'rails-i18n', '~> 5.0.3'
after_bundle do
generate 'kaminari:config'
generate 'kaminari:views', 'bootstrap3'
end
say 'Applying mina & its plugins...'
gem 'mina', '~> 1.0.6', require: false
gem 'mina-puma', '~> 1.0.1', require: false
gem 'mina-multistage', '~> 1.0.3', require: false
gem 'mina-sidekiq', '~> 1.0.1', require: false
gem 'mina-logs', '~> 1.0.1', require: false
get_remote('config/deploy.rb')
get_remote('config/puma.rb')
gsub_file 'config/puma.rb', /\/data\/www\/myapp\/shared/, "/data/www/#{app_name}/shared"
get_remote('config/deploy/production.rb')
gsub_file 'config/deploy/production.rb', /\/data\/www\/myapp/, "/data/www/#{app_name}"
get_remote('config/nginx.conf.example')
gsub_file 'config/nginx.conf.example', /myapp/, "#{app_name}"
get_remote('config/nginx.ssl.conf.example')
gsub_file 'config/nginx.ssl.conf.example', /myapp/, "#{app_name}"
get_remote('config/logrotate.conf.example')
gsub_file 'config/logrotate.conf.example', /myapp/, "#{app_name}"
get_remote('config/monit.conf.example')
gsub_file 'config/monit.conf.example', /myapp/, "#{app_name}"
say 'Applying lograge & basic application config...'
gem 'lograge'
inject_into_file 'config/application.rb', after: "class Application < Rails::Application\n" do <<-EOF
config.generators.assets = false
config.generators.helper = false
config.time_zone = 'Beijing'
config.i18n.available_locales = [:en, :'zh-CN']
config.i18n.default_locale = :'zh-CN'
config.lograge.enabled = true
EOF
end
say 'Applying rspec test framework...'
gem_group :development do
gem 'rails_apps_testing'
end
gem_group :development, :test do
gem 'rspec-rails'
gem 'factory_girl_rails'
end
gem_group :test do
gem 'capybara'
gem 'database_cleaner'
gem 'launchy'
gem 'selenium-webdriver'
end
after_bundle do
generate 'testing:configure', 'rspec --force'
end
get_remote 'README.md'
gsub_file 'README.md', /myapp/, "#{app_name}"
# `ack` is a really quick tool for searching code
get_remote 'ackrc', '.ackrc'
after_bundle do
say 'Done! init `git` and `database`...'
git :init
git add: '.'
git commit: '-m "init rails"'
rake 'db:create'
say "Build successfully! `cd #{app_name}` and use `rails s` to start your rails app..."
end
|
Redmine::Plugin.register :rmplus_devtools do
name 'RMPlus Devtools plugin'
author 'Alexey Glukhov'
description 'Collection of tools useful for Redmine developers'
version '0.1.0'
url 'https://github.com/pineapple-thief/rmplus_devtools.git'
author_url 'https://github.com/pineapple-thief'
settings :partial => 'settings/rmplus_devtools'
end
Rails.application.config.after_initialize do
if Rails.env.development?
enable_assets_listeners = Setting.plugin_a_common_libs[:enable_assets_listeners]
if enable_assets_listeners
$listeners = []
Rails.logger.debug "Initializing listeners..."
Redmine::Plugin.registered_plugins.each do |name, plugin|
source = plugin.assets_directory
if File.exist?(source) && File.directory?(source)
destination = plugin.public_directory
assets_listener = Listen.to source do |modified, added, removed|
modified.each do |modified_path|
if File.file?(modified_path)
target = File.join(destination, modified_path.gsub(source, ''))
FileUtils.cp(modified_path, target)
end
end
added.each do |added_path|
if File.directory?(added_path)
FileUtils.mkdir_p(added_path)
elsif File.file?(added_path)
target = File.join(destination, added_path.gsub(source, ''))
FileUtils.cp(added_path, target)
end
end
removed.each do |removed_path|
target = File.join(destination, removed_path.gsub(source, ''))
FileUtils.remove_entry(target, true)
end
end
Rails.logger.debug "Starting assets listener for plugin #{name}"
assets_listener.start
$listeners << assets_listener
end
end
at_exit do
Rails.logger.debug "Stopping listeners..."
$listeners.each{ |listener| listener.stop }
end
end
end
end
no message
Redmine::Plugin.register :rmplus_devtools do
name 'RMPlus Devtools plugin'
author 'Alexey Glukhov'
description 'Collection of tools useful for Redmine developers'
version '0.1.0'
url 'https://github.com/pineapple-thief/rmplus_devtools.git'
author_url 'https://github.com/pineapple-thief'
settings :partial => 'settings/rmplus_devtools'
end
Rails.application.config.after_initialize do
if Rails.env.development?
enable_assets_listeners = Setting.plugin_rmplus_devtools[:enable_assets_listeners]
if enable_assets_listeners
$listeners = []
Rails.logger.debug "Initializing listeners..."
Redmine::Plugin.registered_plugins.each do |name, plugin|
source = plugin.assets_directory
if File.exist?(source) && File.directory?(source)
destination = plugin.public_directory
assets_listener = Listen.to source do |modified, added, removed|
modified.each do |modified_path|
if File.file?(modified_path)
target = File.join(destination, modified_path.gsub(source, ''))
FileUtils.cp(modified_path, target)
end
end
added.each do |added_path|
if File.directory?(added_path)
FileUtils.mkdir_p(added_path)
elsif File.file?(added_path)
target = File.join(destination, added_path.gsub(source, ''))
FileUtils.cp(added_path, target)
end
end
removed.each do |removed_path|
target = File.join(destination, removed_path.gsub(source, ''))
FileUtils.remove_entry(target, true)
end
end
Rails.logger.debug "Starting assets listener for plugin #{name}"
assets_listener.start
$listeners << assets_listener
end
end
at_exit do
Rails.logger.debug "Stopping listeners..."
$listeners.each{ |listener| listener.stop }
end
end
end
end |
require 'factory_girl'
Dir[File.join(RAILS_ROOT, 'test', 'factories', '*.rb')].each do |file|
require file
end
Added specs/factories/*.rb, test/factories.rb, and spec/factories.rb to list of loaded files.
require 'factory_girl'
Dir[File.join(RAILS_ROOT, *%(test factories *.rb)].each { |file| require file }
Dir[File.join(RAILS_ROOT, *%(test factories.rb)].each { |file| require file }
Dir[File.join(RAILS_ROOT, *%(spec factories *.rb)].each { |file| require file }
Dir[File.join(RAILS_ROOT, *%(spec factories.rb)].each { |file| require file } |
require 'redmine'
Redmine::Plugin.register :redmine_github_hook do
name 'Redmine Github Hook plugin'
author 'Jakob Skjerning'
description 'This plugin allows your Redmine installation to receive Github post-receive notifications'
version RedmineGithubHook::VERSION
end
add plugin URL and author URL
require 'redmine'
Redmine::Plugin.register :redmine_github_hook do
name 'Redmine Github Hook plugin'
author 'Jakob Skjerning'
description 'This plugin allows your Redmine installation to receive Github post-receive notifications'
url 'https://github.com/koppen/redmine_github_hook'
author_url 'https://github.com/koppen/'
version RedmineGithubHook::VERSION
end
|
Redmine::Plugin.register :redmine_pivot_table do
name 'Redmine Pivot Table plugin'
author 'Daiju Kito'
description 'Pivot table plugin for Redmine using pivottable.js'
version '0.0.2'
url 'http://example.com/path/to/plugin'
project_module :pivottables do
permission :pivottables, {:pivottables => [:index]}, :public => true
end
menu :project_menu, :pivottables, { :controller => 'pivottables', :action => 'index' }, :after => :activity, :param => :project_id
end
Updated URL to github
Redmine::Plugin.register :redmine_pivot_table do
name 'Redmine Pivot Table plugin'
author 'Daiju Kito'
description 'Pivot table plugin for Redmine using pivottable.js'
version '0.0.2'
url 'https://github.com/deecay/redmine_pivot_table'
project_module :pivottables do
permission :pivottables, {:pivottables => [:index]}, :public => true
end
menu :project_menu, :pivottables, { :controller => 'pivottables', :action => 'index' }, :after => :activity, :param => :project_id
end
|
require File.dirname(__FILE__) + "/rails/init.rb"
remove init.rb
|
# Force Globalize loading.
if Rails::VERSION::STRING.match /^1\.2+/
load_plugin(File.join(RAILS_ROOT, 'vendor', 'plugins', 'globalize'))
else
# Specify the plugins loading order: Click To Globalize should be the last one.
plugins = (Dir["#{config.plugin_paths}/*"] - [ File.dirname(__FILE__) ]).map { |plugin| plugin.split(File::SEPARATOR).last}
Rails::Initializer.run { |config| config.plugins = plugins }
end
Object.send :include, Globalize
require 'click_to_globalize'
Locale.load_configured_base_language
raise NoBaseLanguageError if Locale.base_language.blank?
Make sure Locale#load_configured_base_language is not performed in 'test' environment. This reverts commit b10bd091f4eb674f6fdc47f42ab16e4f2031265f
# Force Globalize loading.
if Rails::VERSION::STRING.match /^1\.2+/
load_plugin(File.join(RAILS_ROOT, 'vendor', 'plugins', 'globalize'))
else
# Specify the plugins loading order: Click To Globalize should be the last one.
plugins = (Dir["#{config.plugin_paths}/*"] - [ File.dirname(__FILE__) ]).map { |plugin| plugin.split(File::SEPARATOR).last}
Rails::Initializer.run { |config| config.plugins = plugins }
end
Object.send :include, Globalize
require 'click_to_globalize'
# FIXME
unless RAILS_ENV == 'test'
Locale.load_configured_base_language
raise NoBaseLanguageError if Locale.base_language.blank?
end |
require File.join(File.dirname(__FILE__), 'lib', 'tabtab')
require File.join(File.dirname(__FILE__), 'lib', 'tab')
class ActionController::Base
protected
class << self
include ::TabTab::ControllerMethods
end
include ::TabTab::ControllerMethods
include ::TabTab::ControllerInstanceMethods
end
class ActionView::Base
include ::TabTab::ViewHelpers
end
A lot less hacky
require 'tabtab'
class ActionController::Base
protected
class << self
include ::TabTab::ControllerMethods
end
include ::TabTab::ControllerMethods
include ::TabTab::ControllerInstanceMethods
end
class ActionView::Base
include ::TabTab::ViewHelpers
end
|
require 'resolv'
require 'uri'
module Heroku
module Helpers
def run_check(message, fix_url, options={})
display("#{message}".ljust(30), false)
ret = yield
if ret
display("Passed", false)
elsif ret == false
display("Failed \t remedy: #{fix_url}", false)
else
display("Skipped", false)
end
display
ret
end
end
end
module Dns
extend self
def cnames(dname)
col = []
Resolv::DNS.open do |dns|
res = dns.getresources(dname, Resolv::DNS::Resource::IN::CNAME)
col << res.map {|r| r.name.to_s}
end
col.flatten
end
def aliases(dname)
col = []
Resolv::DNS.open do |dns|
res = dns.getresources(dname, Resolv::DNS::Resource::IN::TXT)
ars = res.select {|r| r.data.start_with? "ALIAS for "}
col << ars.map {|r| r.data.split(" for ").last}
end
col.flatten
end
end
module Checks
extend self
HEROKU_DOMAINS = ["herokuapp.com", "herokussl.com", "heroku-shadowapp.com", "heroku-shadowssl.com"]
HEROKU_IPS = ["75.101.145.87", "75.101.163.44", "174.129.212.2", "50.16.232.130", "50.16.215.196"]
# Attempt to access dynos of the app.
# If the current user does not have access to the app,
# an error message will be displayed.
def can_access?(app_name)
web_dynos(app_name)
end
def domain_names(app_name)
api.get_domains(app_name).body.map {|d| d["domain"]}
end
def custom_domain_names(app_name)
domain_names(app_name).select{|dname| custom?(dname)}
end
def web_dynos(app_name)
api.get_ps(app_name).body.select do |ps|
ps["process"].include?("web")
end
end
def heroku_pgdb(app_name)
api.get_addons(app_name).body.select do |ao|
ao["name"].include?("heroku-postgresql") &&
!ao["name"].include?("dev") &&
!ao["name"].include?("basic")
end
end
def dyno_redundancy?(app_name)
web_dynos(app_name).length != 1
end
def cedar?(app_name)
api.get_app(app_name).body["stack"] == "cedar"
end
def database_url(app_name)
api.get_config_vars(app_name).body["DATABASE_URL"]
end
def postgres_urls(app_name)
api.get_config_vars(app_name).body.select do |k,v|
k.downcase.include?("heroku_postgres")
end
end
# Skip check if there is not DATABASE_URL set on the app.
# Otherweise we will check to see if a Heroku PostgreSQL prod db is installed.
def prod_db?(app_name)
return nil unless database_url(app_name)
heroku_pgdb(app_name).length >= 1
end
# Follower databases have the same username, password, and database name.
# The only difference between a follower url and a master url is the host.
# Skip if the app doesn't have a database_url set in the config.
def follower_db?(app_name)
return nil unless database_url(app_name)
uri = URI.parse(database_url(app_name))
postgres_urls(app_name).select do |name, url|
tmp_uri = URI.parse(url)
[:user, :password, :path].all? do |k|
uri.send(k) == tmp_uri.send(k)
end
end.length >= 2
end
def cross_reg_follower?(app_name)
return nil unless database_url(app_name)
postgres_urls(app_name).any? do |name, url|
url.include?("us-west")
end
end
def web_app?(app_name)
web_dynos(app_name).length >= 1
end
def ssl_endpoint?(app_name)
return nil unless web_app?(app_name)
return nil if domain_names(app_name).empty?
api.get_addons(app_name).body.select do |ao|
ao["name"].include?("ssl:endpoint")
end.length >= 1
end
def custom?(dname)
HEROKU_DOMAINS.none?{|hd| dname.include?(hd)}
end
def dns_cname?(app_name, dname)
s = Dns.cnames(dname)
s.any? && s.all? {|c| c == app_name+".herokuapp.com"}
end
def dns_alias?(app_name, dname)
s = Dns.aliases(dname)
s.any? && s.all? {|c| c == app_name+".herokuapp.com"}
end
def dns?(app_name)
return nil unless web_app?(app_name)
custom_domain_names(app_name).all? do |dname|
dns_cname?(app_name, dname) || dns_alias?(app_name, dname)
end
end
def log_drains?(app_name)
!heroku.list_drains(app_name).body.include?("No")
end
end
# check the production status of an app
#
class Heroku::Command::Production < Heroku::Command::Base
include Checks
# check
#
# check the production status of an app
def check
display("=== Production check for #{app}")
if can_access?(app)
run_check("Cedar", "http://bit.ly/NIMhag") {cedar?(app)}
run_check("Dyno Redundancy","http://bit.ly/SSHYev"){dyno_redundancy?(app)}
run_check("Production Database", "http://bit.ly/PWsbrJ") {prod_db?(app)}
run_check("Follower Database", "http://bit.ly/MGsk39") {follower_db?(app)}
run_check("Cross-Region Follower", "http://bit.ly/Rjypmw") {cross_reg_follower?(app)}
run_check("SSL Endpoint", "http://bit.ly/PfzI7x") {ssl_endpoint?(app)}
run_check("DNS Configuration", "http://bit.ly/PfzI7x") {dns?(app)}
run_check("Log Drains", "http://bit.ly/MGtYSq") {log_drains?(app)}
end
end
end
Update Follower Database remedy URL
- Previous URL used the #follower_databases fragment
- This HTML ID is now actually #follower-databases
- New shortened URL uses the currently correct URL fragment
require 'resolv'
require 'uri'
module Heroku
module Helpers
def run_check(message, fix_url, options={})
display("#{message}".ljust(30), false)
ret = yield
if ret
display("Passed", false)
elsif ret == false
display("Failed \t remedy: #{fix_url}", false)
else
display("Skipped", false)
end
display
ret
end
end
end
module Dns
extend self
def cnames(dname)
col = []
Resolv::DNS.open do |dns|
res = dns.getresources(dname, Resolv::DNS::Resource::IN::CNAME)
col << res.map {|r| r.name.to_s}
end
col.flatten
end
def aliases(dname)
col = []
Resolv::DNS.open do |dns|
res = dns.getresources(dname, Resolv::DNS::Resource::IN::TXT)
ars = res.select {|r| r.data.start_with? "ALIAS for "}
col << ars.map {|r| r.data.split(" for ").last}
end
col.flatten
end
end
module Checks
extend self
HEROKU_DOMAINS = ["herokuapp.com", "herokussl.com", "heroku-shadowapp.com", "heroku-shadowssl.com"]
HEROKU_IPS = ["75.101.145.87", "75.101.163.44", "174.129.212.2", "50.16.232.130", "50.16.215.196"]
# Attempt to access dynos of the app.
# If the current user does not have access to the app,
# an error message will be displayed.
def can_access?(app_name)
web_dynos(app_name)
end
def domain_names(app_name)
api.get_domains(app_name).body.map {|d| d["domain"]}
end
def custom_domain_names(app_name)
domain_names(app_name).select{|dname| custom?(dname)}
end
def web_dynos(app_name)
api.get_ps(app_name).body.select do |ps|
ps["process"].include?("web")
end
end
def heroku_pgdb(app_name)
api.get_addons(app_name).body.select do |ao|
ao["name"].include?("heroku-postgresql") &&
!ao["name"].include?("dev") &&
!ao["name"].include?("basic")
end
end
def dyno_redundancy?(app_name)
web_dynos(app_name).length != 1
end
def cedar?(app_name)
api.get_app(app_name).body["stack"] == "cedar"
end
def database_url(app_name)
api.get_config_vars(app_name).body["DATABASE_URL"]
end
def postgres_urls(app_name)
api.get_config_vars(app_name).body.select do |k,v|
k.downcase.include?("heroku_postgres")
end
end
# Skip check if there is not DATABASE_URL set on the app.
# Otherweise we will check to see if a Heroku PostgreSQL prod db is installed.
def prod_db?(app_name)
return nil unless database_url(app_name)
heroku_pgdb(app_name).length >= 1
end
# Follower databases have the same username, password, and database name.
# The only difference between a follower url and a master url is the host.
# Skip if the app doesn't have a database_url set in the config.
def follower_db?(app_name)
return nil unless database_url(app_name)
uri = URI.parse(database_url(app_name))
postgres_urls(app_name).select do |name, url|
tmp_uri = URI.parse(url)
[:user, :password, :path].all? do |k|
uri.send(k) == tmp_uri.send(k)
end
end.length >= 2
end
def cross_reg_follower?(app_name)
return nil unless database_url(app_name)
postgres_urls(app_name).any? do |name, url|
url.include?("us-west")
end
end
def web_app?(app_name)
web_dynos(app_name).length >= 1
end
def ssl_endpoint?(app_name)
return nil unless web_app?(app_name)
return nil if domain_names(app_name).empty?
api.get_addons(app_name).body.select do |ao|
ao["name"].include?("ssl:endpoint")
end.length >= 1
end
def custom?(dname)
HEROKU_DOMAINS.none?{|hd| dname.include?(hd)}
end
def dns_cname?(app_name, dname)
s = Dns.cnames(dname)
s.any? && s.all? {|c| c == app_name+".herokuapp.com"}
end
def dns_alias?(app_name, dname)
s = Dns.aliases(dname)
s.any? && s.all? {|c| c == app_name+".herokuapp.com"}
end
def dns?(app_name)
return nil unless web_app?(app_name)
custom_domain_names(app_name).all? do |dname|
dns_cname?(app_name, dname) || dns_alias?(app_name, dname)
end
end
def log_drains?(app_name)
!heroku.list_drains(app_name).body.include?("No")
end
end
# check the production status of an app
#
class Heroku::Command::Production < Heroku::Command::Base
include Checks
# check
#
# check the production status of an app
def check
display("=== Production check for #{app}")
if can_access?(app)
run_check("Cedar", "http://bit.ly/NIMhag") {cedar?(app)}
run_check("Dyno Redundancy","http://bit.ly/SSHYev"){dyno_redundancy?(app)}
run_check("Production Database", "http://bit.ly/PWsbrJ") {prod_db?(app)}
run_check("Follower Database", "http://bit.ly/XoOJJv") {follower_db?(app)}
run_check("Cross-Region Follower", "http://bit.ly/Rjypmw") {cross_reg_follower?(app)}
run_check("SSL Endpoint", "http://bit.ly/PfzI7x") {ssl_endpoint?(app)}
run_check("DNS Configuration", "http://bit.ly/PfzI7x") {dns?(app)}
run_check("Log Drains", "http://bit.ly/MGtYSq") {log_drains?(app)}
end
end
end
|
# Copyright (c) 2008 Phusion
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
module DefaultValueForPlugin
class NormalValueContainer
def initialize(value)
@value = value
end
def evaluate(instance)
return @value
end
end
class BlockValueContainer
def initialize(block)
@block = block
end
def evaluate(instance)
return @block.call(instance)
end
end
module ClassMethods
def default_value_for(attribute, value = nil, &block)
if !method_defined?(:initialize_with_defaults)
include(InstanceMethods)
alias_method_chain :initialize, :defaults
class_inheritable_accessor :_default_attribute_values
self._default_attribute_values = {}
end
if block_given?
container = BlockValueContainer.new(block)
else
container = NormalValueContainer.new(value)
end
_default_attribute_values[attribute.to_s] = container
end
def default_values(values)
values.each_pair do |key, value|
if value.kind_of? Proc
default_value_for(key, &value)
else
default_value_for(key, value)
end
end
end
end
module InstanceMethods
def initialize_with_defaults(attrs = nil)
initialize_without_defaults(attrs) do
if attrs
stringified_attrs = attrs.stringify_keys!
safe_attrs = remove_attributes_protected_from_mass_assignment(stringified_attrs)
safe_attribute_names = safe_attrs.keys.map do |x|
x.to_s
end
end
self.class._default_attribute_values.each_pair do |attribute, container|
if safe_attribute_names.nil? || !safe_attribute_names.include?(attribute)
__send__("#{attribute}=", container.evaluate(self))
end
end
yield(self) if block_given?
end
end
end
end
ActiveRecord::Base.extend(DefaultValueForPlugin::ClassMethods)
Respecting default_value_for order by using a ActiveSupport::OrderedHash instead of a plain hash to store the default_attribute_values
# Copyright (c) 2008 Phusion
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
module DefaultValueForPlugin
class NormalValueContainer
def initialize(value)
@value = value
end
def evaluate(instance)
return @value
end
end
class BlockValueContainer
def initialize(block)
@block = block
end
def evaluate(instance)
return @block.call(instance)
end
end
module ClassMethods
def default_value_for(attribute, value = nil, &block)
if !method_defined?(:initialize_with_defaults)
include(InstanceMethods)
alias_method_chain :initialize, :defaults
class_inheritable_accessor :_default_attribute_values
self._default_attribute_values = ActiveSupport::OrderedHash.new
end
if block_given?
container = BlockValueContainer.new(block)
else
container = NormalValueContainer.new(value)
end
_default_attribute_values[attribute.to_s] = container
end
def default_values(values)
values.each_pair do |key, value|
if value.kind_of? Proc
default_value_for(key, &value)
else
default_value_for(key, value)
end
end
end
end
module InstanceMethods
def initialize_with_defaults(attrs = nil)
initialize_without_defaults(attrs) do
if attrs
stringified_attrs = attrs.stringify_keys!
safe_attrs = remove_attributes_protected_from_mass_assignment(stringified_attrs)
safe_attribute_names = safe_attrs.keys.map do |x|
x.to_s
end
end
self.class._default_attribute_values.each do |attribute, container|
if safe_attribute_names.nil? || !safe_attribute_names.include?(attribute)
__send__("#{attribute}=", container.evaluate(self))
end
end
yield(self) if block_given?
end
end
end
end
ActiveRecord::Base.extend(DefaultValueForPlugin::ClassMethods)
|
require_dependency "full_text_search/hooks/search_index_options_content_bottom_hook"
require_dependency "full_text_search/hooks/issues_show_description_bottom_hook"
require_dependency "full_text_search/hooks/similar_issues_helper"
require "full_text_search/searcher"
Redmine::Plugin.register :full_text_search do
name 'Full Text Search plugin'
author 'Kenji Okimoto'
description 'This plugin provides full text search for Redmine'
version '0.5.0'
url 'https://github.com/okkez/redmine_full_text_search'
author_url 'https://github.com/okkez/redmine_full_text_search'
settings default: { display_score: "0" }, partial: "settings/full_text_search"
end
Rails.configuration.to_prepare do
case
when Redmine::Database.postgresql?
require "full_text_search/pgroonga"
FullTextSearch::SearcherRecord.prepend(FullTextSearch::PGroonga)
when Redmine::Database.mysql?
require "full_text_search/mroonga"
FullTextSearch::SearcherRecord.prepend(FullTextSearch::Mroonga)
else
# Do nothing
end
[Project, News, Issue, Document, Changeset, Message, Journal, WikiPage, WikiContent, CustomValue, Attachment].each do |klass|
klass.include(FullTextSearch::Model)
end
Issue.include(FullTextSearch::SimilarSearcher::Model)
SearchHelper.prepend(FullTextSearch::Hooks::SearchHelper)
SearchController.prepend(FullTextSearch::Hooks::ControllerSearchIndex)
IssuesHelper.include(FullTextSearch::Hooks::SimilarIssuesHelper)
end
Set callback for Journal
require_dependency "full_text_search/hooks/search_index_options_content_bottom_hook"
require_dependency "full_text_search/hooks/issues_show_description_bottom_hook"
require_dependency "full_text_search/hooks/similar_issues_helper"
require "full_text_search/searcher"
Redmine::Plugin.register :full_text_search do
name 'Full Text Search plugin'
author 'Kenji Okimoto'
description 'This plugin provides full text search for Redmine'
version '0.5.0'
url 'https://github.com/okkez/redmine_full_text_search'
author_url 'https://github.com/okkez/redmine_full_text_search'
settings default: { display_score: "0" }, partial: "settings/full_text_search"
end
Rails.configuration.to_prepare do
case
when Redmine::Database.postgresql?
require "full_text_search/pgroonga"
FullTextSearch::SearcherRecord.prepend(FullTextSearch::PGroonga)
when Redmine::Database.mysql?
require "full_text_search/mroonga"
FullTextSearch::SearcherRecord.prepend(FullTextSearch::Mroonga)
else
# Do nothing
end
[Project, News, Issue, Document, Changeset, Message, Journal, WikiPage, WikiContent, CustomValue, Attachment].each do |klass|
klass.include(FullTextSearch::Model)
end
Issue.include(FullTextSearch::SimilarSearcher::Model)
Journal.include(FullTextSearch::SimilarSearcher::Model)
SearchHelper.prepend(FullTextSearch::Hooks::SearchHelper)
SearchController.prepend(FullTextSearch::Hooks::ControllerSearchIndex)
IssuesHelper.include(FullTextSearch::Hooks::SimilarIssuesHelper)
end
|
# Timelog Mailer Plugin
# (C) 2014 vjt@openssl.it
# MIT License
#
require 'redmine'
Redmine::Plugin.register :timelog_mailer do
name 'Time Log Mailer'
author 'Marcello Barnaba'
description 'E-mails issue recipients when time entries are logged'
version '0.0.1'
url 'https://github.com/vjt/redmine-timelog-mailer'
author_url 'http://sindro.me'
requires_redmine :version_or_higher => '2.4.0'
end
ActionDispatch::Callbacks.to_prepare do
TimeEntryObserver.instance # Instantiate and register the observer
end
Version 0.0.2
# Timelog Mailer Plugin
# (C) 2014 vjt@openssl.it
# MIT License
#
require 'redmine'
Redmine::Plugin.register :timelog_mailer do
name 'Time Log Mailer'
author 'Marcello Barnaba'
description 'E-mails project members when time entries are logged'
version '0.0.2'
url 'https://github.com/vjt/redmine-timelog-mailer'
author_url 'http://sindro.me'
requires_redmine :version_or_higher => '2.4.0'
end
ActionDispatch::Callbacks.to_prepare do
TimeEntryObserver.instance # Instantiate and register the observer
end
|
Add ruby script
print 'pod name > '
cocoapod = gets.chop
file_names = Dir["#{Dir.pwd}/**/*.*"]
ignored_file_types = ['.xccheckout',
'.xcodeproj',
'.xcworkspace',
'.xcuserdatad',
'.xcuserstate']
file_names.each do |file_name|
if !ignored_file_types.include?(File.extname(file_name))
text = File.read(file_name)
new_contents = text.gsub(/CocoaPod/, cocoapod)
File.open(file_name, "w") {|file| file.puts new_contents }
end
end
|
module BOAST
class Dimension
def self.parens(*args,&block)
return self::new(*args,&block)
end
attr_reader :val1
attr_reader :val2
attr_reader :size
def initialize(v1=nil,v2=nil)
@size = nil
@val1 = nil
@val2 = nil
if v2.nil? and v1 then
@val1 = BOAST::get_array_start
@val2 = v1 + BOAST::get_array_start - 1
@size = v1
else
@val1 = v1
@val2 = v2
end
end
def to_str
s = ""
if @val2 then
if BOAST::get_lang == BOAST::FORTRAN then
s += @val1.to_s
s += ":"
s += @val2.to_s
elsif [BOAST::C, BOAST::CL, BOAST::CUDA].include?( BOAST::get_lang ) then
s += (@val2 - @val1 + 1).to_s
end
elsif @val1.nil? then
return nil
else
s += @val1.to_s
end
return s
end
def to_s
self.to_str
end
end
class ConstArray < Array
def initialize(array,type = nil)
super(array)
@type = type::new if type
end
def to_s
self.to_str
end
def to_str
return self.to_str_fortran if BOAST::get_lang == BOAST::FORTRAN
return self.to_str_c if [BOAST::C, BOAST::CL, BOAST::CUDA].include?( BOAST::get_lang )
end
def to_str_fortran
s = ""
return s if self.first.nil?
s += "(/ &\n"
s += self.first.to_s
s += "_wp" if @type and @type.size == 8
self[1..-1].each { |v|
s += ", &\n"+v.to_s
s += "_wp" if @type and @type.size == 8
}
s += " /)"
end
def to_str_c
s = ""
return s if self.first.nil?
s += "{\n"
s += self.first.to_s
self[1..-1].each { |v|
s += ",\n"+v.to_s
}
s += "}"
end
end
class Variable
include BOAST::Arithmetic
alias_method :orig_method_missing, :method_missing
def method_missing(m, *a, &b)
return self.struct_reference(type.members[m.to_s]) if type.members[m.to_s]
# return self.get_element(m.to_s) if type.getters[m.to_s]
# return self.set_element(m.to_s) if type.setters[m.to_s]
return self.orig_method_missing(m, *a, &b)
end
def self.parens(*args,&block)
return self::new(*args,&block)
end
attr_reader :name
attr_accessor :direction
attr_accessor :constant
attr_reader :allocate
attr_reader :type
attr_reader :dimension
attr_reader :local
attr_reader :texture
attr_reader :sampler
attr_reader :restrict
attr_accessor :replace_constant
attr_accessor :force_replace_constant
def initialize(name,type,hash={})
@name = name.to_s
@direction = hash[:direction] ? hash[:direction] : hash[:dir]
@constant = hash[:constant] ? hash[:constant] : hash[:const]
@dimension = hash[:dimension] ? hash[:dimension] : hash[:dim]
@local = hash[:local] ? hash[:local] : hash[:shared]
@texture = hash[:texture]
@allocate = hash[:allocate]
@restrict = hash[:restrict]
@force_replace_constant = false
if not hash[:replace_constant].nil? then
@replace_constant = hash[:replace_constant]
else
@replace_constant = true
end
if @texture and BOAST::get_lang == BOAST::CL then
@sampler = Variable::new("sampler_#{name}", BOAST::CustomType,:type_name => "sampler_t" ,:replace_constant => false, :constant => "CLK_NORMALIZED_COORDS_FALSE | CLK_ADDRESS_NONE | CLK_FILTER_NEAREST")
else
@sampler = nil
end
@type = type::new(hash)
@hash = hash
if (@direction == :out or @direction == :inout) and not @dimension then
@scalar_output = true
else
@scalar_output = false
end
end
def copy(name=nil,options={})
name = @name if not name
h = @hash.clone
options.each { |k,v|
h[k] = v
}
return Variable::new(name, @type.class, h)
end
def Variable.from_type(name, type, options={})
hash = type.to_hash
options.each { |k,v|
hash[k] = v
}
hash[:direction] = nil
hash[:dir] = nil
return Variable::new(name, type.class, hash)
end
def to_s
self.to_str
end
def to_str
if @force_replace_constant or ( @replace_constant and @constant and BOAST::get_replace_constants and not @dimension ) then
s = @constant.to_s
s += "_wp" if BOAST::get_lang == BOAST::FORTRAN and @type and @type.size == 8
return s
end
if @scalar_output then
return "(*#{self.name})"
end
return @name
end
def to_var
return self
end
def set(x)
return Expression::new(BOAST::Set, self, x)
end
def dereference
return self.copy("*(#{self.name})", :dimension => nil, :dim => nil, :direction => nil, :dir => nil) if [BOAST::C, BOAST::CL, BOAST::CUDA].include?( BOAST::get_lang )
return self if BOAST::get_lang == BOAST::FORTRAN
#return Expression::new("*",nil,self)
end
def struct_reference(x)
return x.copy(self.name+"."+x.name) if [BOAST::C, BOAST::CL, BOAST::CUDA].include?( BOAST::get_lang )
return x.copy(self.name+"%"+x.name) if BOAST::get_lang == BOAST::FORTRAN
end
def inc
return Expression::new("++",self,nil)
end
def [](*args)
return Index::new(self,args)
end
def indent
return " "*BOAST::get_indent_level
end
def finalize
s = ""
s += ";" if [BOAST::C, BOAST::CL, BOAST::CUDA].include?( BOAST::get_lang )
s+="\n"
return s
end
def decl_c(final=true, device=false)
return decl_texture(final) if @texture
s = ""
s += self.indent if final
s += "const " if @constant or @direction == :in
s += "__global " if @direction and @dimension and not (@hash[:register] or @hash[:private] or @local) and BOAST::get_lang == BOAST::CL
s += "__local " if @local and BOAST::get_lang == BOAST::CL
s += "__shared__ " if @local and not device and BOAST::get_lang == BOAST::CUDA
s += @type.decl
if(@dimension and not @constant and not @allocate and (not @local or (@local and device))) then
s += " *"
if @restrict then
if BOAST::get_lang == BOAST::CL
s += " restrict"
else
s += " __restrict__"
end
end
end
if not @dimension and ( @direction == :out or @direction == :inout ) then
s += " *"
end
s += " #{@name}"
if @dimension and @constant then
s += "[]"
end
if @dimension and ((@local and not device) or (@allocate and not @constant)) then
s +="["
s += @dimension.reverse.join("*")
s +="]"
end
s += " = #{@constant}" if @constant
s += self.finalize if final
BOAST::get_output.print s if final
return s
end
def header(lang=C,final=true)
return decl_texture(final) if @texture
s = ""
s += self.indent if final
s += "const " if @constant or @direction == :in
s += "__global " if @direction and @dimension and BOAST::get_lang == BOAST::CL
s += "__local " if @local and BOAST::get_lang == BOAST::CL
s += "__shared__ " if @local and BOAST::get_lang == BOAST::CUDA
s += @type.decl
if(@dimension and not @constant and not @local) then
s += " *"
end
if not @dimension and ( lang == BOAST::FORTRAN or @direction == :out or @direction == :inout ) then
s += " *"
end
s += " #{@name}"
if(@dimension and @constant) then
s += "[]"
end
if(@dimension and @local) then
s +="["
s += @dimension.reverse.join("*")
s +="]"
end
s += " = #{@constant}" if @constant
s += self.finalize if final
BOAST::get_output.print s if final
return s
end
def decl(final=true,device=false)
return self.decl_fortran(final) if BOAST::get_lang == BOAST::FORTRAN
return self.decl_c(final, device) if [BOAST::C, BOAST::CL, BOAST::CUDA].include?( BOAST::get_lang )
end
def decl_texture(final=true)
raise "Unsupported language #{BOAST::get_lang} for texture!" if not [BOAST::CL, BOAST::CUDA].include?( BOAST::get_lang )
raise "Write is unsupported for textures!" if not (@constant or @direction == :in)
dim_number = 1
if @dimension then
dim_number == @dimension.size
end
raise "Unsupported number of dimension: #{dim_number}!" if dim_number > 3
s = ""
s += self.indent if final
if BOAST::get_lang == BOAST::CL then
s += "__read_only "
if dim_number < 3 then
s += "image2d_t " #from OCL 1.2+ image1d_t is defined
else
s += "image3d_t "
end
else
s += "texture<#{@type.decl}, cudaTextureType#{dim_number}D, cudaReadModeElementType> "
end
s += @name
s += self.finalize if final
BOAST::get_output.print s if final
return s
end
def decl_fortran(final=true)
s = ""
s += self.indent if final
s += @type.decl
s += ", intent(#{@direction})" if @direction
s += ", parameter" if @constant
if(@dimension) then
s += ", dimension("
dim = @dimension[0].to_str
if dim then
s += dim
@dimension[1..-1].each { |d|
s += ", "
s += d
}
else
s += "*"
end
s += ")"
end
s += " :: #{@name}"
if @constant
s += " = #{@constant}"
s += "_wp" if not @dimension and @type and @type.size == 8
end
s += self.finalize if final
BOAST::get_output.print s if final
return s
end
end
end
Corrected bug for FORTRAN intent out scalars.
module BOAST
class Dimension
def self.parens(*args,&block)
return self::new(*args,&block)
end
attr_reader :val1
attr_reader :val2
attr_reader :size
def initialize(v1=nil,v2=nil)
@size = nil
@val1 = nil
@val2 = nil
if v2.nil? and v1 then
@val1 = BOAST::get_array_start
@val2 = v1 + BOAST::get_array_start - 1
@size = v1
else
@val1 = v1
@val2 = v2
end
end
def to_str
s = ""
if @val2 then
if BOAST::get_lang == BOAST::FORTRAN then
s += @val1.to_s
s += ":"
s += @val2.to_s
elsif [BOAST::C, BOAST::CL, BOAST::CUDA].include?( BOAST::get_lang ) then
s += (@val2 - @val1 + 1).to_s
end
elsif @val1.nil? then
return nil
else
s += @val1.to_s
end
return s
end
def to_s
self.to_str
end
end
class ConstArray < Array
def initialize(array,type = nil)
super(array)
@type = type::new if type
end
def to_s
self.to_str
end
def to_str
return self.to_str_fortran if BOAST::get_lang == BOAST::FORTRAN
return self.to_str_c if [BOAST::C, BOAST::CL, BOAST::CUDA].include?( BOAST::get_lang )
end
def to_str_fortran
s = ""
return s if self.first.nil?
s += "(/ &\n"
s += self.first.to_s
s += "_wp" if @type and @type.size == 8
self[1..-1].each { |v|
s += ", &\n"+v.to_s
s += "_wp" if @type and @type.size == 8
}
s += " /)"
end
def to_str_c
s = ""
return s if self.first.nil?
s += "{\n"
s += self.first.to_s
self[1..-1].each { |v|
s += ",\n"+v.to_s
}
s += "}"
end
end
class Variable
include BOAST::Arithmetic
alias_method :orig_method_missing, :method_missing
def method_missing(m, *a, &b)
return self.struct_reference(type.members[m.to_s]) if type.members[m.to_s]
# return self.get_element(m.to_s) if type.getters[m.to_s]
# return self.set_element(m.to_s) if type.setters[m.to_s]
return self.orig_method_missing(m, *a, &b)
end
def self.parens(*args,&block)
return self::new(*args,&block)
end
attr_reader :name
attr_accessor :direction
attr_accessor :constant
attr_reader :allocate
attr_reader :type
attr_reader :dimension
attr_reader :local
attr_reader :texture
attr_reader :sampler
attr_reader :restrict
attr_accessor :replace_constant
attr_accessor :force_replace_constant
def initialize(name,type,hash={})
@name = name.to_s
@direction = hash[:direction] ? hash[:direction] : hash[:dir]
@constant = hash[:constant] ? hash[:constant] : hash[:const]
@dimension = hash[:dimension] ? hash[:dimension] : hash[:dim]
@local = hash[:local] ? hash[:local] : hash[:shared]
@texture = hash[:texture]
@allocate = hash[:allocate]
@restrict = hash[:restrict]
@force_replace_constant = false
if not hash[:replace_constant].nil? then
@replace_constant = hash[:replace_constant]
else
@replace_constant = true
end
if @texture and BOAST::get_lang == BOAST::CL then
@sampler = Variable::new("sampler_#{name}", BOAST::CustomType,:type_name => "sampler_t" ,:replace_constant => false, :constant => "CLK_NORMALIZED_COORDS_FALSE | CLK_ADDRESS_NONE | CLK_FILTER_NEAREST")
else
@sampler = nil
end
@type = type::new(hash)
@hash = hash
if (@direction == :out or @direction == :inout) and not @dimension then
@scalar_output = true
else
@scalar_output = false
end
end
def copy(name=nil,options={})
name = @name if not name
h = @hash.clone
options.each { |k,v|
h[k] = v
}
return Variable::new(name, @type.class, h)
end
def Variable.from_type(name, type, options={})
hash = type.to_hash
options.each { |k,v|
hash[k] = v
}
hash[:direction] = nil
hash[:dir] = nil
return Variable::new(name, type.class, hash)
end
def to_s
self.to_str
end
def to_str
if @force_replace_constant or ( @replace_constant and @constant and BOAST::get_replace_constants and not @dimension ) then
s = @constant.to_s
s += "_wp" if BOAST::get_lang == BOAST::FORTRAN and @type and @type.size == 8
return s
end
if @scalar_output and [BOAST::C, BOAST::CL, BOAST::CUDA].include?( BOAST::get_lang ) then
return "(*#{self.name})"
end
return @name
end
def to_var
return self
end
def set(x)
return Expression::new(BOAST::Set, self, x)
end
def dereference
return self.copy("*(#{self.name})", :dimension => nil, :dim => nil, :direction => nil, :dir => nil) if [BOAST::C, BOAST::CL, BOAST::CUDA].include?( BOAST::get_lang )
return self if BOAST::get_lang == BOAST::FORTRAN
#return Expression::new("*",nil,self)
end
def struct_reference(x)
return x.copy(self.name+"."+x.name) if [BOAST::C, BOAST::CL, BOAST::CUDA].include?( BOAST::get_lang )
return x.copy(self.name+"%"+x.name) if BOAST::get_lang == BOAST::FORTRAN
end
def inc
return Expression::new("++",self,nil)
end
def [](*args)
return Index::new(self,args)
end
def indent
return " "*BOAST::get_indent_level
end
def finalize
s = ""
s += ";" if [BOAST::C, BOAST::CL, BOAST::CUDA].include?( BOAST::get_lang )
s+="\n"
return s
end
def decl_c(final=true, device=false)
return decl_texture(final) if @texture
s = ""
s += self.indent if final
s += "const " if @constant or @direction == :in
s += "__global " if @direction and @dimension and not (@hash[:register] or @hash[:private] or @local) and BOAST::get_lang == BOAST::CL
s += "__local " if @local and BOAST::get_lang == BOAST::CL
s += "__shared__ " if @local and not device and BOAST::get_lang == BOAST::CUDA
s += @type.decl
if(@dimension and not @constant and not @allocate and (not @local or (@local and device))) then
s += " *"
if @restrict then
if BOAST::get_lang == BOAST::CL
s += " restrict"
else
s += " __restrict__"
end
end
end
if not @dimension and ( @direction == :out or @direction == :inout ) then
s += " *"
end
s += " #{@name}"
if @dimension and @constant then
s += "[]"
end
if @dimension and ((@local and not device) or (@allocate and not @constant)) then
s +="["
s += @dimension.reverse.join("*")
s +="]"
end
s += " = #{@constant}" if @constant
s += self.finalize if final
BOAST::get_output.print s if final
return s
end
def header(lang=C,final=true)
return decl_texture(final) if @texture
s = ""
s += self.indent if final
s += "const " if @constant or @direction == :in
s += "__global " if @direction and @dimension and BOAST::get_lang == BOAST::CL
s += "__local " if @local and BOAST::get_lang == BOAST::CL
s += "__shared__ " if @local and BOAST::get_lang == BOAST::CUDA
s += @type.decl
if(@dimension and not @constant and not @local) then
s += " *"
end
if not @dimension and ( lang == BOAST::FORTRAN or @direction == :out or @direction == :inout ) then
s += " *"
end
s += " #{@name}"
if(@dimension and @constant) then
s += "[]"
end
if(@dimension and @local) then
s +="["
s += @dimension.reverse.join("*")
s +="]"
end
s += " = #{@constant}" if @constant
s += self.finalize if final
BOAST::get_output.print s if final
return s
end
def decl(final=true,device=false)
return self.decl_fortran(final) if BOAST::get_lang == BOAST::FORTRAN
return self.decl_c(final, device) if [BOAST::C, BOAST::CL, BOAST::CUDA].include?( BOAST::get_lang )
end
def decl_texture(final=true)
raise "Unsupported language #{BOAST::get_lang} for texture!" if not [BOAST::CL, BOAST::CUDA].include?( BOAST::get_lang )
raise "Write is unsupported for textures!" if not (@constant or @direction == :in)
dim_number = 1
if @dimension then
dim_number == @dimension.size
end
raise "Unsupported number of dimension: #{dim_number}!" if dim_number > 3
s = ""
s += self.indent if final
if BOAST::get_lang == BOAST::CL then
s += "__read_only "
if dim_number < 3 then
s += "image2d_t " #from OCL 1.2+ image1d_t is defined
else
s += "image3d_t "
end
else
s += "texture<#{@type.decl}, cudaTextureType#{dim_number}D, cudaReadModeElementType> "
end
s += @name
s += self.finalize if final
BOAST::get_output.print s if final
return s
end
def decl_fortran(final=true)
s = ""
s += self.indent if final
s += @type.decl
s += ", intent(#{@direction})" if @direction
s += ", parameter" if @constant
if(@dimension) then
s += ", dimension("
dim = @dimension[0].to_str
if dim then
s += dim
@dimension[1..-1].each { |d|
s += ", "
s += d
}
else
s += "*"
end
s += ")"
end
s += " :: #{@name}"
if @constant
s += " = #{@constant}"
s += "_wp" if not @dimension and @type and @type.size == 8
end
s += self.finalize if final
BOAST::get_output.print s if final
return s
end
end
end
|
module RightData
class FileSystemItem
attr_reader :relativePath
attr_reader :parent
attr_reader :ignore_children
attr_reader :duplicate_children
attr_accessor :duplicates
attr_accessor :ignorable
def initialize path, args
if args[:parent]
@relativePath = File.basename(path)
@parent = args[:parent]
else
@relativePath = path
@parent = nil
end
@ignorable = false
@duplicates = [] # for this node
@duplicate_children = 0 # counts for children
@ignore_children = 0
self
end
def files
return 0 if leaf? && File.directory?(fullPath)
return 1 if leaf?
return children.map {|n| n.files}.inject {|sum, n| sum + n }
end
def ignore_files
return 0 if leaf? && File.directory?(fullPath)
return ignorable? ? 1 : 0 if leaf?
return children.map {|n| n.ignore_files}.inject {|sum, n| sum + n }
end
def duplicate_files
return 0 if leaf? && File.directory?(fullPath)
return duplicate? ? 1 : 0 if leaf?
return children.map {|n| n.duplicate_files}.inject {|sum, n| sum + n }
end
def basename; @relativePath; end
def self.rootItem
@rootItem ||= self.new '/', :parent => nil
end
def children
unless @children
if File.directory?(fullPath) and File.readable?(fullPath)
@children = Dir.entries(fullPath).select { |path|
path != '.' and path != '..'
}.map { |path|
FileSystemItem.new path, :parent => self
}
else
@children = nil
end
end
@children
end
def path; fullPath; end
def fullPath
@parent ? File.join(@parent.fullPath, @relativePath) : @relativePath
end
def childAtIndex n
children[n]
end
def numberOfChildren
children == nil ? -1 : children.size
end
def children?; !children.nil? && !children.empty?; end
def duplicate?
if leaf?
!duplicates.empty?
else # Dup if all ignored / dup children
((@ignore_children + @duplicate_children) == numberOfChildren)
end
end
def ignorable?; ignorable; end
def increment_ignorable_children
@ignore_children += 1
update_duplicate_ignorable_status
end
def update_duplicate_ignorable_status
parent.increment_duplicate_children if((@ignore_children + @duplicate_children) == numberOfChildren)
end
def increment_duplicate_children
@duplicate_children += 1
update_duplicate_ignorable_status
end
def leaf?; !children?; end
def traverse(&block) # Allow proc to decide if we traverse
if block.call(self) && children?
children.each { |c| c.traverse(&block) }
end
end
def other_children
children.size - ignore_children - duplicate_children
end
def to_param; to_s; end
def to_s
"<Tree :path => #{self.path}, :files => #{self.files}>"
end
# Inspect the nodes:
def report(pre="")
pre += " " if !pre.empty?
self.traverse do |n|
# Is this a leaf (e.g. a file)?
if n.leaf?
msg = nil
msg = "# dup(#{n.duplicates.count})" if n.duplicate?
msg = "# ign" if n.ignorable?
if msg
puts "#{pre}#{n.path} #{msg}" # Remove the dups/igns!
else
puts "# #{n.path} unique"
end
false # Don't traverse deeper!
else
if n.duplicate_children + n.ignore_children == n.children.size
puts "#{pre}#{n.path} # #{n.duplicate_children} dups / #{n.ignore_children} ignores"
false # Don't traverse deeper!
elsif n.children.size == 0
puts "#{pre}#{n.path} # Empty... "
false
else
puts "# #{n.path} # Not #{n.duplicate_children} dup/ #{n.ignore_children} ign / #{n.other_children} other "
true
end
end
end
puts "# #{self.ignore_files} ignores, #{self.duplicate_files} dups of #{self.files} files"
end
end
end
Added escaping
module RightData
class FileSystemItem
attr_reader :relativePath
attr_reader :parent
attr_reader :ignore_children
attr_reader :duplicate_children
attr_accessor :duplicates
attr_accessor :ignorable
def initialize path, args
if args[:parent]
@relativePath = File.basename(path)
@parent = args[:parent]
else
@relativePath = path
@parent = nil
end
@ignorable = false
@duplicates = [] # for this node
@duplicate_children = 0 # counts for children
@ignore_children = 0
self
end
def files
return 0 if leaf? && File.directory?(fullPath)
return 1 if leaf?
return children.map {|n| n.files}.inject {|sum, n| sum + n }
end
def ignore_files
return 0 if leaf? && File.directory?(fullPath)
return ignorable? ? 1 : 0 if leaf?
return children.map {|n| n.ignore_files}.inject {|sum, n| sum + n }
end
def duplicate_files
return 0 if leaf? && File.directory?(fullPath)
return duplicate? ? 1 : 0 if leaf?
return children.map {|n| n.duplicate_files}.inject {|sum, n| sum + n }
end
def basename; @relativePath; end
def self.rootItem
@rootItem ||= self.new '/', :parent => nil
end
def children
unless @children
if File.directory?(fullPath) and File.readable?(fullPath)
@children = Dir.entries(fullPath).select { |path|
path != '.' and path != '..'
}.map { |path|
FileSystemItem.new path, :parent => self
}
else
@children = nil
end
end
@children
end
def path; fullPath; end
def fullPath
@parent ? File.join(@parent.fullPath, @relativePath) : @relativePath
end
def childAtIndex n
children[n]
end
def numberOfChildren
children == nil ? -1 : children.size
end
def children?; !children.nil? && !children.empty?; end
def duplicate?
if leaf?
!duplicates.empty?
else # Dup if all ignored / dup children
((@ignore_children + @duplicate_children) == numberOfChildren)
end
end
def ignorable?; ignorable; end
def increment_ignorable_children
@ignore_children += 1
update_duplicate_ignorable_status
end
def update_duplicate_ignorable_status
parent.increment_duplicate_children if((@ignore_children + @duplicate_children) == numberOfChildren)
end
def increment_duplicate_children
@duplicate_children += 1
update_duplicate_ignorable_status
end
def leaf?; !children?; end
def traverse(&block) # Allow proc to decide if we traverse
if block.call(self) && children?
children.each { |c| c.traverse(&block) }
end
end
def other_children
children.size - ignore_children - duplicate_children
end
def to_param; to_s; end
def to_s
"<Tree :path => #{self.path}, :files => #{self.files}>"
end
# Inspect the nodes:
def report(pre="")
pre += " " if !pre.empty?
self.traverse do |n|
# Is this a leaf (e.g. a file)?
if n.leaf?
msg = nil
msg = "# dup(#{n.duplicates.count})" if n.duplicate?
msg = "# ign" if n.ignorable?
if msg
puts "#{pre}'#{n.path}' #{msg}" # Remove the dups/igns!
else
puts "# #{n.path} unique"
end
false # Don't traverse deeper!
else
if n.duplicate_children + n.ignore_children == n.children.size
puts "#{pre}'#{n.path}' # #{n.duplicate_children} dups / #{n.ignore_children} ignores"
false # Don't traverse deeper!
elsif n.children.size == 0
puts "#{pre}'#{n.path}' # Empty... "
false
else
puts "# #{n.path} # Not #{n.duplicate_children} dup/ #{n.ignore_children} ign / #{n.other_children} other "
true
end
end
end
puts "# #{self.ignore_files} ignores, #{self.duplicate_files} dups of #{self.files} files"
end
end
end
|
#
# Author:: AJ Christensen (<aj@opscode.com>)
# Copyright:: Copyright (c) 2008 OpsCode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe Chef::Provider::User do
before do
@node = Chef::Node.new
@events = Chef::EventDispatch::Dispatcher.new
@run_context = Chef::RunContext.new(@node, {}, @events)
@new_resource = Chef::Resource::Group.new("wheel", @run_context)
@new_resource.gid 500
@new_resource.members "aj"
@provider = Chef::Provider::Group.new(@new_resource, @run_context)
@current_resource = Chef::Resource::Group.new("aj", @run_context)
@current_resource.gid 500
@current_resource.members "aj"
@provider.current_resource = @current_resource
end
it "assumes the group exists by default" do
@provider.group_exists.should be_true
end
describe "when establishing the current state of the group" do
before do
@pw_group = mock("Struct::Group",
:name => "wheel",
:gid => 20,
:mem => [ "root", "aj" ]
)
Etc.stub!(:getgrnam).and_return(@pw_group)
end
it "sets the group name of the current resource to the group name of the new resource" do
@provider.load_current_resource
@provider.current_resource.group_name.should == 'wheel'
end
it "does not modify the desired gid if set" do
@provider.load_current_resource
@new_resource.gid.should == 500
end
it "sets the desired gid to the current gid if none is set" do
@new_resource.instance_variable_set(:@gid, nil)
@provider.load_current_resource
@new_resource.gid.should == 20
end
it "looks up the group in /etc/group with getgrnam" do
Etc.should_receive(:getgrnam).with(@new_resource.group_name).and_return(@pw_group)
@provider.load_current_resource
@provider.current_resource.gid.should == 20
@provider.current_resource.members.should == %w{root aj}
end
it "should flip the value of exists if it cannot be found in /etc/group" do
Etc.stub!(:getgrnam).and_raise(ArgumentError)
@provider.load_current_resource
@provider.group_exists.should be_false
end
it "should return the current resource" do
@provider.load_current_resource.should equal(@provider.current_resource)
end
end
describe "when determining if the system is already in the target state" do
[ :gid, :members ].each do |attribute|
it "should return true if #{attribute} doesn't match" do
@current_resource.stub!(attribute).and_return("looooooooooooooooooool")
@provider.compare_group.should be_true
end
end
it "should return false if gid and members are equal" do
@provider.compare_group.should be_false
end
it "should return false if append is true and the group member(s) already exists" do
@current_resource.members << "extra_user"
@new_resource.stub!(:append).and_return(true)
@provider.compare_group.should be_false
end
it "should return true if append is true and the group member(s) do not already exist" do
@new_resource.members << "extra_user"
@new_resource.stub!(:append).and_return(true)
@provider.compare_group.should be_true
end
end
describe "when creating a group" do
it "should call create_group if the group does not exist" do
@provider.group_exists = false
@provider.should_receive(:create_group).and_return(true)
@provider.run_action(:create)
end
it "should set the the new_resources updated flag when it creates the group" do
@provider.group_exists = false
@provider.stub!(:create_group)
@provider.run_action(:create)
@provider.new_resource.should be_updated
end
it "should check to see if the group has mismatched attributes if the group exists" do
@provider.group_exists = true
@provider.stub!(:compare_group).and_return(false)
@provider.run_action(:create)
@provider.new_resource.should_not be_updated
end
it "should call manage_group if the group exists and has mismatched attributes" do
@provider.group_exists = true
@provider.stub!(:compare_group).and_return(true)
@provider.should_receive(:manage_group).and_return(true)
@provider.run_action(:create)
end
it "should set the the new_resources updated flag when it creates the group if we call manage_group" do
@provider.group_exists = true
@provider.stub!(:compare_group).and_return(true)
@provider.stub!(:manage_group).and_return(true)
@provider.run_action(:create)
@new_resource.should be_updated
end
end
describe "when removing a group" do
it "should not call remove_group if the group does not exist" do
@provider.group_exists = false
@provider.should_not_receive(:remove_group)
@provider.run_action(:remove)
@provider.new_resource.should_not be_updated
end
it "should call remove_group if the group exists" do
@provider.group_exists = true
@provider.should_receive(:remove_group)
@provider.run_action(:remove)
@provider.new_resource.should be_updated
end
end
describe "when updating a group" do
before(:each) do
@provider.group_exists = true
@provider.stub!(:manage_group).and_return(true)
end
it "should run manage_group if the group exists and has mismatched attributes" do
@provider.should_receive(:compare_group).and_return(true)
@provider.should_receive(:manage_group).and_return(true)
@provider.run_action(:manage)
end
it "should set the new resources updated flag to true if manage_group is called" do
@provider.stub!(:compare_group).and_return(true)
@provider.stub!(:manage_group).and_return(true)
@provider.run_action(:manage)
@new_resource.should be_updated
end
it "should not run manage_group if the group does not exist" do
@provider.group_exists = false
@provider.should_not_receive(:manage_group)
@provider.run_action(:manage)
end
it "should not run manage_group if the group exists but has no differing attributes" do
@provider.should_receive(:compare_group).and_return(false)
@provider.should_not_receive(:manage_group)
@provider.run_action(:manage)
end
end
describe "when modifying the group" do
before(:each) do
@provider.group_exists = true
@provider.stub!(:manage_group).and_return(true)
end
it "should run manage_group if the group exists and has mismatched attributes" do
@provider.should_receive(:compare_group).and_return(true)
@provider.should_receive(:manage_group).and_return(true)
@provider.run_action(:modify)
end
it "should set the new resources updated flag to true if manage_group is called" do
@provider.stub!(:compare_group).and_return(true)
@provider.stub!(:manage_group).and_return(true)
@provider.run_action(:modify)
@new_resource.should be_updated
end
it "should not run manage_group if the group exists but has no differing attributes" do
@provider.should_receive(:compare_group).and_return(false)
@provider.should_not_receive(:manage_group)
@provider.run_action(:modify)
end
it "should raise a Chef::Exceptions::Group if the group doesn't exist" do
@provider.group_exists = false
lambda { @provider.run_action(:modify) }.should raise_error(Chef::Exceptions::Group)
end
end
describe "when determining the reason for a change" do
it "should report which group members are missing if members are missing and appending to the group" do
@new_resource.members << "user1"
@new_resource.members << "user2"
@new_resource.stub!(:append).and_return true
@provider.compare_group.should be_true
@provider.change_desc.should == "would add missing member(s): user1, user2"
end
it "should report that the group members will be overwritten if not appending" do
@new_resource.members << "user1"
@new_resource.stub!(:append).and_return false
@provider.compare_group.should be_true
@provider.change_desc.should == "would replace group members with new list of members"
end
it "should report the gid will be changed when it does not match" do
@current_resource.stub!(:gid).and_return("BADF00D")
@provider.compare_group.should be_true
@provider.change_desc.should == "would change gid #{@current_resource.gid} to #{@new_resource.gid}"
end
it "should report no change reason when no change is required" do
@provider.compare_group.should be_false
@provider.change_desc.should == nil
end
end
end
tests expecting a particular string of text fail after modifying converge-by descriptions
#
# Author:: AJ Christensen (<aj@opscode.com>)
# Copyright:: Copyright (c) 2008 OpsCode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe Chef::Provider::User do
before do
@node = Chef::Node.new
@events = Chef::EventDispatch::Dispatcher.new
@run_context = Chef::RunContext.new(@node, {}, @events)
@new_resource = Chef::Resource::Group.new("wheel", @run_context)
@new_resource.gid 500
@new_resource.members "aj"
@provider = Chef::Provider::Group.new(@new_resource, @run_context)
@current_resource = Chef::Resource::Group.new("aj", @run_context)
@current_resource.gid 500
@current_resource.members "aj"
@provider.current_resource = @current_resource
end
it "assumes the group exists by default" do
@provider.group_exists.should be_true
end
describe "when establishing the current state of the group" do
before do
@pw_group = mock("Struct::Group",
:name => "wheel",
:gid => 20,
:mem => [ "root", "aj" ]
)
Etc.stub!(:getgrnam).and_return(@pw_group)
end
it "sets the group name of the current resource to the group name of the new resource" do
@provider.load_current_resource
@provider.current_resource.group_name.should == 'wheel'
end
it "does not modify the desired gid if set" do
@provider.load_current_resource
@new_resource.gid.should == 500
end
it "sets the desired gid to the current gid if none is set" do
@new_resource.instance_variable_set(:@gid, nil)
@provider.load_current_resource
@new_resource.gid.should == 20
end
it "looks up the group in /etc/group with getgrnam" do
Etc.should_receive(:getgrnam).with(@new_resource.group_name).and_return(@pw_group)
@provider.load_current_resource
@provider.current_resource.gid.should == 20
@provider.current_resource.members.should == %w{root aj}
end
it "should flip the value of exists if it cannot be found in /etc/group" do
Etc.stub!(:getgrnam).and_raise(ArgumentError)
@provider.load_current_resource
@provider.group_exists.should be_false
end
it "should return the current resource" do
@provider.load_current_resource.should equal(@provider.current_resource)
end
end
describe "when determining if the system is already in the target state" do
[ :gid, :members ].each do |attribute|
it "should return true if #{attribute} doesn't match" do
@current_resource.stub!(attribute).and_return("looooooooooooooooooool")
@provider.compare_group.should be_true
end
end
it "should return false if gid and members are equal" do
@provider.compare_group.should be_false
end
it "should return false if append is true and the group member(s) already exists" do
@current_resource.members << "extra_user"
@new_resource.stub!(:append).and_return(true)
@provider.compare_group.should be_false
end
it "should return true if append is true and the group member(s) do not already exist" do
@new_resource.members << "extra_user"
@new_resource.stub!(:append).and_return(true)
@provider.compare_group.should be_true
end
end
describe "when creating a group" do
it "should call create_group if the group does not exist" do
@provider.group_exists = false
@provider.should_receive(:create_group).and_return(true)
@provider.run_action(:create)
end
it "should set the the new_resources updated flag when it creates the group" do
@provider.group_exists = false
@provider.stub!(:create_group)
@provider.run_action(:create)
@provider.new_resource.should be_updated
end
it "should check to see if the group has mismatched attributes if the group exists" do
@provider.group_exists = true
@provider.stub!(:compare_group).and_return(false)
@provider.run_action(:create)
@provider.new_resource.should_not be_updated
end
it "should call manage_group if the group exists and has mismatched attributes" do
@provider.group_exists = true
@provider.stub!(:compare_group).and_return(true)
@provider.should_receive(:manage_group).and_return(true)
@provider.run_action(:create)
end
it "should set the the new_resources updated flag when it creates the group if we call manage_group" do
@provider.group_exists = true
@provider.stub!(:compare_group).and_return(true)
@provider.stub!(:manage_group).and_return(true)
@provider.run_action(:create)
@new_resource.should be_updated
end
end
describe "when removing a group" do
it "should not call remove_group if the group does not exist" do
@provider.group_exists = false
@provider.should_not_receive(:remove_group)
@provider.run_action(:remove)
@provider.new_resource.should_not be_updated
end
it "should call remove_group if the group exists" do
@provider.group_exists = true
@provider.should_receive(:remove_group)
@provider.run_action(:remove)
@provider.new_resource.should be_updated
end
end
describe "when updating a group" do
before(:each) do
@provider.group_exists = true
@provider.stub!(:manage_group).and_return(true)
end
it "should run manage_group if the group exists and has mismatched attributes" do
@provider.should_receive(:compare_group).and_return(true)
@provider.should_receive(:manage_group).and_return(true)
@provider.run_action(:manage)
end
it "should set the new resources updated flag to true if manage_group is called" do
@provider.stub!(:compare_group).and_return(true)
@provider.stub!(:manage_group).and_return(true)
@provider.run_action(:manage)
@new_resource.should be_updated
end
it "should not run manage_group if the group does not exist" do
@provider.group_exists = false
@provider.should_not_receive(:manage_group)
@provider.run_action(:manage)
end
it "should not run manage_group if the group exists but has no differing attributes" do
@provider.should_receive(:compare_group).and_return(false)
@provider.should_not_receive(:manage_group)
@provider.run_action(:manage)
end
end
describe "when modifying the group" do
before(:each) do
@provider.group_exists = true
@provider.stub!(:manage_group).and_return(true)
end
it "should run manage_group if the group exists and has mismatched attributes" do
@provider.should_receive(:compare_group).and_return(true)
@provider.should_receive(:manage_group).and_return(true)
@provider.run_action(:modify)
end
it "should set the new resources updated flag to true if manage_group is called" do
@provider.stub!(:compare_group).and_return(true)
@provider.stub!(:manage_group).and_return(true)
@provider.run_action(:modify)
@new_resource.should be_updated
end
it "should not run manage_group if the group exists but has no differing attributes" do
@provider.should_receive(:compare_group).and_return(false)
@provider.should_not_receive(:manage_group)
@provider.run_action(:modify)
end
it "should raise a Chef::Exceptions::Group if the group doesn't exist" do
@provider.group_exists = false
lambda { @provider.run_action(:modify) }.should raise_error(Chef::Exceptions::Group)
end
end
describe "when determining the reason for a change" do
it "should report which group members are missing if members are missing and appending to the group" do
@new_resource.members << "user1"
@new_resource.members << "user2"
@new_resource.stub!(:append).and_return true
@provider.compare_group.should be_true
@provider.change_desc.should == "add missing member(s): user1, user2"
end
it "should report that the group members will be overwritten if not appending" do
@new_resource.members << "user1"
@new_resource.stub!(:append).and_return false
@provider.compare_group.should be_true
@provider.change_desc.should == "replace group members with new list of members"
end
it "should report the gid will be changed when it does not match" do
@current_resource.stub!(:gid).and_return("BADF00D")
@provider.compare_group.should be_true
@provider.change_desc.should == "change gid #{@current_resource.gid} to #{@new_resource.gid}"
end
it "should report no change reason when no change is required" do
@provider.compare_group.should be_false
@provider.change_desc.should == nil
end
end
end
|
Initial specifications for the group resource
#
# Author:: AJ Christensen (<aj@opscode.com>)
# Copyright:: Copyright (c) 2008 OpsCode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require File.expand_path(File.join(File.dirname(__FILE__), "..", "..", "spec_helper"))
describe Chef::Resource::Group, "initialize" do
before(:each) do
@resource = Chef::Resource::Group.new("admin")
end
it "should create a new Chef::Resource::Group" do
@resource.should be_a_kind_of(Chef::Resource)
@resource.should be_a_kind_of(Chef::Resource::Group)
end
it "should set the resource_name to :group" do
@resource.resource_name.should eql(:group)
end
it "should set the groupname equal to the argument to initialize" do
@resource.groupname.should eql("admin")
end
it "should set gid to nil" do
@resource.gid.should eql(nil)
end
it "should set action to :create" do
@resource.action.should eql(:create)
end
%w{create remove modify manage}.each do |action|
it "should allow action #{action}" do
@resource.allowed_actions.detect { |a| a == action.to_sym }.should eql(action.to_sym)
end
end
end
|
require "forwardable"
module Celluloid
module Sync
class << self
undef gem_path rescue nil
def gem_path
File.expand_path("../../", __FILE__)
end
undef gem_name rescue nil
def gem_name
Dir["#{File.expand_path('../../', __FILE__)}/*.gemspec"].first.gsub(".gemspec", "").split("/").last
end
undef gem_name? rescue nil
def gem_name?
!gem_name.nil?
end
undef lib_path rescue nil
def lib_path
File.expand_path("../../lib", __FILE__)
end
undef lib_gempath rescue nil
def lib_gempath
"#{lib_path}/#{gem_name.split('-').join('/')}"
end
undef scenario rescue nil
def scenario
File.basename($PROGRAM_NAME)
end
undef bundler? rescue nil
def bundler?
scenario == "bundle"
end
end
fail "Missing gemspec." unless gem_name?
$LOAD_PATH.push(gem_path)
$LOAD_PATH.push(lib_path)
# TODO: This will likely need to be done differently if INSIDE a cut gem.
case scenario
when "bundle"
if ARGV.first == "update"
puts "Celluloid::Sync // Gem: #{gem_name}"
`cd #{gem_path}/culture; git pull origin master`
end
end
require("#{gem_path}/culture/gems/loader")
if File.exist?(version = "#{lib_gempath}/version.rb")
require(version)
end
end
end
remove console output
require "forwardable"
module Celluloid
module Sync
class << self
undef gem_path rescue nil
def gem_path
File.expand_path("../../", __FILE__)
end
undef gem_name rescue nil
def gem_name
Dir["#{File.expand_path('../../', __FILE__)}/*.gemspec"].first.gsub(".gemspec", "").split("/").last
end
undef gem_name? rescue nil
def gem_name?
!gem_name.nil?
end
undef lib_path rescue nil
def lib_path
File.expand_path("../../lib", __FILE__)
end
undef lib_gempath rescue nil
def lib_gempath
"#{lib_path}/#{gem_name.split('-').join('/')}"
end
undef scenario rescue nil
def scenario
File.basename($PROGRAM_NAME)
end
undef bundler? rescue nil
def bundler?
scenario == "bundle"
end
end
fail "Missing gemspec." unless gem_name?
$LOAD_PATH.push(gem_path)
$LOAD_PATH.push(lib_path)
# TODO: This will likely need to be done differently if INSIDE a cut gem.
case scenario
when "bundle"
if ARGV.first == "update"
`cd #{gem_path}/culture; git pull origin master`
end
end
require("#{gem_path}/culture/gems/loader")
if File.exist?(version = "#{lib_gempath}/version.rb")
require(version)
end
end
end
|
#!/usr/bin/env ruby
require 'open3'
require 'open-uri'
require 'rubygems'
require 'terminal-notifier'
require 'thor'
require 'yaml'
if RUBY_VERSION =~ /1.9/
Encoding.default_external = Encoding::UTF_8
Encoding.default_internal = Encoding::UTF_8
end
class Tagger
def list(options)
if options[:source]
source = options[:source]
else
source = Dir.pwd
end
puts "Listing tags in: " + source
scanned = []
tags = []
tagsn = []
dir = source + '/*.{txt,md,mmd,markdown,taskpaper}'
# Scan for tags in the text of all files
Dir.glob(dir) do |p|
f = File.open(p)
# Hashtags
scanned << f.read.scan(/( #[\w\d-]+)(?=\s|$)/i)
#YAML meta data tags
yaml = YAML.load_file(p)
scanned << yaml['tags'] unless yaml['tags'] == nil
end
# iterate over the array, counting duplicate entries and hash the result
thash = Hash.new(0)
scanned.flatten.map(&:lstrip).sort.each { |v| thash[v] += 1 }
thash.each do |k, v|
tagsn << "#{k} (#{v})"
tags << k
end
if options[:sublime]
#create/update JSON file for Sublime Text autocompletion
sublime = '{"scope": "text","completions":[' + tags.map { |e| '"' + e.strip + '"'}.join(",") + ']}'
fpath = ENV['HOME'] + '/Library/Application Support/Sublime Text 2/Packages/User/tags.sublime-completions'
File.open(fpath , 'w') { |file| file.puts sublime }
puts "Sublime Text autocompletion list updated"
end
if options[:file]
File.open(options[:file], 'w') { |file| file.puts tags}
puts "List of tags writen to: " + options[:file]
else
tagsn
end
end
def find(tag, options)
if options[:source]
source = options[:source]
else
source = Dir.pwd
end
puts "Searching in: " + source
scanned = []
found = []
dir = source + '/*.{txt,md,mmd,markdown,taskpaper}'
# Scan for tags in the text of all files
Dir.glob(dir) do |p|
f = File.open(p)
# Hashtags
chunks = f.read.split(/\n\n[\-_\* ]{3,}\n|\n\n(?=#+.+\n)/)
chunks.each do |chunk|
if chunk =~ / ##{tag}[\s$]/
scanned << chunk + "\n\n[" + File.basename(p,File.extname(p))+ "](file://" + URI.escape(p) + ")"
found << ("'" + p + "'")
end
end
#YAML meta data tags
yaml = YAML.load_file(p)
if yaml['tags'].include? tag
scanned << f.read
found << ("'" + p + "'")
end
end
if options[:open]
founds = found.join(" ")
`subl -n #{founds}`
end
if options[:file]
File.open(options[:file], 'w') { |file| file.puts scanned.join("\n\n---\n\n")}
puts "Result in file: " + options[:file]
else
founds = "- " + found.join("\n- ")
end
end
end
class Tagh < Thor
desc "list [-s source]", "list tags."
option :sourcel, :aliases => "-s"
option :sublime, :aliases => "-u"
option :file, :aliases => "-f"
def list()
r = Tagger.new
puts r.list(options)
end
desc "find TAG [-s source]", "find items tagged TAG in [source]"
option :source, :aliases => "-s"
option :file, :aliases => "-f"
option :open, :aliases => "-o"
def find(tag)
r = Tagger.new
puts r.find(tag, options)
end
end
Tagh.start(ARGV)
skeleton for merge, added --min, --max as options for list
#!/usr/bin/env ruby
require 'open3'
require 'open-uri'
require 'rubygems'
require 'terminal-notifier'
require 'thor'
require 'yaml'
if RUBY_VERSION =~ /1.9/
Encoding.default_external = Encoding::UTF_8
Encoding.default_internal = Encoding::UTF_8
end
class Tagger
def list(options)
if options[:source]
source = options[:source]
else
source = Dir.pwd
end
options['min'] ? min = options['min'].to_i : min = 1
options['max'] ? max = options['max'].to_i : max = 999999
puts min
puts "Listing tags in: " + source
scanned = []
tags = []
tagsn = []
dir = source + '/*.{txt,md,mmd,markdown,taskpaper}'
# Scan for tags in the text of all files
Dir.glob(dir) do |p|
f = File.open(p)
# Hashtags
scanned << f.read.scan(/( #[\w\d-]+)(?=\s|$)/i)
#YAML meta data tags
yaml = YAML.load_file(p)
scanned << yaml['tags'] unless yaml['tags'] == nil
end
# iterate over the array, counting duplicate entries and hash the result
thash = Hash.new(0)
scanned.flatten.map(&:lstrip).sort.each { |v| thash[v] += 1 }
thash.each do |k, v|
if v.between?(min,max)
tagsn << "#{k} (#{v})"
tags << k
end
end
if options[:sublime]
#create/update JSON file for Sublime Text autocompletion
sublime = '{"scope": "text","completions":[' + tags.map { |e| '"' + e.strip + '"'}.join(",") + ']}'
fpath = ENV['HOME'] + '/Library/Application Support/Sublime Text 2/Packages/User/tags.sublime-completions'
File.open(fpath , 'w') { |file| file.puts sublime }
puts "Sublime Text autocompletion list updated"
end
if options[:file]
File.open(options[:file], 'w') { |file| file.puts tags}
puts "List of tags writen to: " + options[:file]
else
tagsn
end
end
def find(tag, options)
if options[:source]
source = options[:source]
else
source = Dir.pwd
end
puts "Searching in: " + source
scanned = []
found = []
dir = source + '/*.{txt,md,mmd,markdown,taskpaper}'
# Scan for tags in the text of all files
Dir.glob(dir) do |p|
f = File.open(p)
# Hashtags
chunks = f.read.split(/\n\n[\-_\* ]{3,}\n|\n\n(?=#+.+\n)/)
chunks.each do |chunk|
if chunk =~ / ##{tag}[\s$]/
scanned << chunk + "\n\n[" + File.basename(p,File.extname(p))+ "](file://" + URI.escape(p) + ")"
found << ("'" + p + "'")
end
end
#YAML meta data tags
yaml = YAML.load_file(p)
if yaml['tags'].include? tag
scanned << f.read
found << ("'" + p + "'")
end
end
if options[:open]
founds = found.join(" ")
`subl -n #{founds}`
end
if options[:file]
File.open(options[:file], 'w') { |file| file.puts scanned.join("\n\n---\n\n")}
puts "Result in file: " + options[:file]
else
founds = "- " + found.join("\n- ")
end
end
def merge(tags, options)
if options[:source]
source = options[:source]
else
source = Dir.pwd
end
puts "Merging tags #{tags[0..-2].join(', ')} into #{tags[-1]} in: " + source
end
end
class Tagh < Thor
desc "list [-s source]", "list tags."
option :sourcel, :aliases => "-s"
option :sublime, :aliases => "-u"
option :file, :aliases => "-f"
option :min
option :max
def list()
r = Tagger.new
puts r.list(options)
end
desc "find TAG [-s source]", "find items tagged TAG in [source]"
option :source, :aliases => "-s"
option :file, :aliases => "-f"
option :open, :aliases => "-o"
def find(tag)
r = Tagger.new
puts r.find(tag, options)
end
desc "merge TAGS", "merge all TAGS into the last one specified"
option :source, :aliases => "-s"
def merge(*tags)
r = Tagger.new
puts r.merge(tags, options)
end
end
Tagh.start(ARGV)
|
module Accern
VERSION = '3.0.0.beta1'.freeze
end
gem version beta2
module Accern
VERSION = '3.0.0.beta2'.freeze
end
|
module Aether
VERSION = '0.1.0'
end
v0.1.1
module Aether
VERSION = '0.1.1'
end
|
require 'net/http'
require 'uri'
module Akismet
class Client
# The API key obtained at akismet.com.
# @return [String]
attr_reader :api_key
# The URL of the home page of the application making the request.
# @return [String]
attr_reader :home_url
# The name of the application making the request
# @return [String]
attr_reader :app_name
# The version of the application making the request
# @return [String]
attr_reader :app_version
#@!group Constructors
# @param [String] api_key
# The API key obtained at akismet.com.
# @param [String] home_url
# The URL of the home page of the application making the request.
# @option options [String] :app_name
# The name of the application making the request, e.g. "jonahb.com".
# Forms part of the User-Agent header submitted to Akismet.
# @option options [String] :app_version
# The version of the application making the request, e.g. "1.0". Forms
# part of the User-Agent header submitted to Akismet. Ignored if
# :app_name is not privded.
#
def initialize(api_key, home_url, options = {})
@api_key = api_key
@home_url = home_url
@app_name = options[ :app_name ]
@app_version = options[ :app_version ]
@http_session = nil
end
#@!group Managing Connections
# Initializes a client, opens it, yields it to the given block, and closes
# it when the block returns.
#
# @example Submit several spam reports over a single TCP connection
# # `comments` is an array of model objects; `request` is a racklike HTTP request
# Akismet::Client.open('api_key', 'http://example.com') do |client|
# for comment in comments
# client.spam request.ip, request.user_agent, text: comment.text
# end
# end
#
# @param (see #initialize)
# @option (see #initialize)
# @yieldparam [Client] client
# @return [Client]
# @see #open
#
def self.open(api_key, home_url, options = {})
raise "Block required" unless block_given?
client = new(api_key, home_url)
client.open { yield client }
client
end
# Opens the client, creating a new TCP connection.
#
# If a block is given, yields to the block, closes the client when the
# block returns, and returns the return value of the block. If a
# block is not given, returns self and leaves the client open, relying on
# the caller to close the client with {#close}.
#
# Note that opening and closing the client is only required if you want to
# make several calls under one TCP connection. Otherwise, you can simply
# call {#check}, {#ham}, or {#spam}, which call {#open} for you if
# necessary.
#
# Due to a peculiarity of the Akismet API, {#verify_key} always creates its
# own connection.
#
# @overload open
# Opens the client, yields to the block, and closes the client when the
# block returns.
# @yield
# A block to be called when the client is open
# @return [Object]
# The return value of the block
# @raise [StandardError]
# The client is already open
# @overload open
# @return [self]
# @raise [StandardError]
# The client is already open
#
def open
raise "Already open" if open?
@http_session = Net::HTTP.new( "#{ api_key }.rest.akismet.com", 80 )
begin
@http_session.start
block_given? ? yield : self
ensure
close if block_given?
end
end
# Closes the Client.
# @return [self]
# @see #open
#
def close
@http_session.finish if open?
@http_session = nil
self
end
# Whether the Client is open.
# @return [Boolean]
#
def open?
@http_session && @http_session.started?
end
#@!group Verifying Keys
# Checks the validity of the API key.
# @example
# Akismet::Client.new('apikey', 'http://example.com').verify_key
# @return [Boolean]
#
def verify_key
response = Net::HTTP.start('rest.akismet.com', 80) do |session|
invoke session, 'verify-key', blog: home_url, key: api_key
end
unless %w{ valid invalid }.include?(response.body)
raise_with_response response
end
response.body == 'valid'
end
#@!group Checking
# Checks whether a comment is spam and whether it is "blatant."
# @param [String] user_ip
# The IP address of the submitter of the comment.
# @param [String] user_agent
# The user agent of the web browser submitting the comment. Typically
# the HTTP_USER_AGENT CGI variable. Not to be confused with the user
# agent of the Akismet library.
# @param [Hash{Symbol => Object}] params
# @option params [String] :referrer
# The value of the HTTP_REFERER header. Note that the parameter is
# spelled with two consecutive 'r's.
# @option params [String] :post_url
# The URL of the post, article, etc. on which the comment was made
# @option params [String] :type
# 'comment', 'trackback', 'pingback', or a made-up value like
# 'registration'
# @option params [String] :text
# The text of the comment.
# @option params [String] :author
# The comment author's name
# @option params [String] :author_email
# The comment author's email address
# @option params [String] :author_url
# The comment author's home page URL
# @return [(Boolean, Boolean)]
# An array containing two booleans. The first indicates whether the
# comment is spam. The second indicates whether it is "blatant,"
# i.e. whether it can be deleted without review.
# @raise [Akismet::Error]
#
def check(user_ip, user_agent, params = {})
response = invoke_comment_method('comment-check',
user_ip,
user_agent,
params)
unless %w{ true false }.include?(response.body)
raise_with_response response
end
[
response.body == 'true',
response['X-akismet-pro-tip'] == 'discard'
]
end
alias_method :comment_check, :check
# Checks whether a comment is spam.
# @param (see #check)
# @option (see #check)
# @return [Boolean]
# @raise (see #check)
#
def spam?(user_ip, user_agent, params = {})
check(user_ip, user_agent, params)[0]
end
#@!group Reporting
# Submits a comment that has been identified as not-spam (ham). If the
# Client is not open, opens it for the duration of the call.
#
# @param (see #check)
# @option (see #check)
# @return [void]
# @raise (see #check)
#
def ham(user_ip, user_agent, params = {})
response = invoke_comment_method('submit-ham',
user_ip,
user_agent,
params)
unless response.body == 'Thanks for making the web a better place.'
raise_with_response response
end
end
alias_method :submit_ham, :ham
# Submits a comment that has been identified as spam. If the Client is not
# open, opens it for the duration of the call.
#
# @param (see #check)
# @option (see #check)
# @return [void]
# @raise (see #check)
#
def spam(user_ip, user_agent, params = {})
response = invoke_comment_method('submit-spam',
user_ip,
user_agent,
params)
unless response.body == 'Thanks for making the web a better place.'
raise_with_response response
end
end
alias_method :submit_spam, :spam
private
# Yields an HTTP session to the given block. Uses this instance's open
# session if any; otherwise opens one and closes it when the block
# returns.
# @yield [Net::HTTP]
#
def in_http_session
if open?
yield @http_session
else
open { yield @http_session }
end
end
# @param [Net::HTTPResponse] response
def raise_with_response( response )
raise Error, response['X-akismet-debug-help'] || 'Unknown error'
end
# @param [String] method_name
# @param [String] user_ip
# @param [String] user_agent
# @param [Hash] params
# @return [Net::HTTPResponse]
#
def invoke_comment_method(method_name, user_ip, user_agent, params = {})
params = params.each_with_object(Hash.new) do |(name, value), hash|
hash[PARAM_NAME_REPLACEMENTS[name] || name] = value
end
params = params.merge(blog: home_url,
user_ip: user_ip,
user_agent: user_agent)
in_http_session do |session|
invoke session, method_name, params
end
end
# @param [Net::HTTP] http_session
# A started HTTP session
# @param [String] method_name
# @return [Net::HTTPResponse]
# @raise [Akismet::Error]
# An HTTP response other than 200 is received.
#
def invoke(http_session, method_name, params = {})
response = http_session.post("/1.1/#{ method_name }",
URI.encode_www_form(params),
http_headers)
unless response.is_a?( Net::HTTPOK )
raise Error, "HTTP #{ response.code } received (expected 200)"
end
response
end
# @return [Hash]
def http_headers
{
'User-Agent' => user_agent,
'Content-Type' => 'application/x-www-form-urlencoded'
}
end
# From the Akismet documentation:
# If possible, your user agent string should always use the following
# format: Application Name/Version | Plugin Name/Version
# @return [String]
#
def user_agent
[user_agent_app, user_agent_plugin].compact.join(" | ")
end
# Returns nil if the Client was instantiated without an app_name.
# @return [String]
#
def user_agent_app
app_name && [app_name, app_version].compact.join("/")
end
# @return [String]
def user_agent_plugin
"Ruby Akismet/#{ Akismet::VERSION }"
end
PARAM_NAME_REPLACEMENTS = {
post_url: :permalink,
text: :comment_content,
type: :comment_type,
author: :comment_author,
author_url: :comment_author_url,
author_email: :comment_author_email
}
end
end
Document why Akismet::Error is raised
require 'net/http'
require 'uri'
module Akismet
class Client
# The API key obtained at akismet.com.
# @return [String]
attr_reader :api_key
# The URL of the home page of the application making the request.
# @return [String]
attr_reader :home_url
# The name of the application making the request
# @return [String]
attr_reader :app_name
# The version of the application making the request
# @return [String]
attr_reader :app_version
#@!group Constructors
# @param [String] api_key
# The API key obtained at akismet.com.
# @param [String] home_url
# The URL of the home page of the application making the request.
# @option options [String] :app_name
# The name of the application making the request, e.g. "jonahb.com".
# Forms part of the User-Agent header submitted to Akismet.
# @option options [String] :app_version
# The version of the application making the request, e.g. "1.0". Forms
# part of the User-Agent header submitted to Akismet. Ignored if
# :app_name is not privded.
#
def initialize(api_key, home_url, options = {})
@api_key = api_key
@home_url = home_url
@app_name = options[ :app_name ]
@app_version = options[ :app_version ]
@http_session = nil
end
#@!group Managing Connections
# Initializes a client, opens it, yields it to the given block, and closes
# it when the block returns.
#
# @example Submit several spam reports over a single TCP connection
# # `comments` is an array of model objects; `request` is a racklike HTTP request
# Akismet::Client.open('api_key', 'http://example.com') do |client|
# for comment in comments
# client.spam request.ip, request.user_agent, text: comment.text
# end
# end
#
# @param (see #initialize)
# @option (see #initialize)
# @yieldparam [Client] client
# @return [Client]
# @see #open
#
def self.open(api_key, home_url, options = {})
raise "Block required" unless block_given?
client = new(api_key, home_url)
client.open { yield client }
client
end
# Opens the client, creating a new TCP connection.
#
# If a block is given, yields to the block, closes the client when the
# block returns, and returns the return value of the block. If a
# block is not given, returns self and leaves the client open, relying on
# the caller to close the client with {#close}.
#
# Note that opening and closing the client is only required if you want to
# make several calls under one TCP connection. Otherwise, you can simply
# call {#check}, {#ham}, or {#spam}, which call {#open} for you if
# necessary.
#
# Due to a peculiarity of the Akismet API, {#verify_key} always creates its
# own connection.
#
# @overload open
# Opens the client, yields to the block, and closes the client when the
# block returns.
# @yield
# A block to be called when the client is open
# @return [Object]
# The return value of the block
# @raise [StandardError]
# The client is already open
# @overload open
# @return [self]
# @raise [StandardError]
# The client is already open
#
def open
raise "Already open" if open?
@http_session = Net::HTTP.new( "#{ api_key }.rest.akismet.com", 80 )
begin
@http_session.start
block_given? ? yield : self
ensure
close if block_given?
end
end
# Closes the Client.
# @return [self]
# @see #open
#
def close
@http_session.finish if open?
@http_session = nil
self
end
# Whether the Client is open.
# @return [Boolean]
#
def open?
@http_session && @http_session.started?
end
#@!group Verifying Keys
# Checks the validity of the API key.
# @example
# Akismet::Client.new('apikey', 'http://example.com').verify_key
# @return [Boolean]
#
def verify_key
response = Net::HTTP.start('rest.akismet.com', 80) do |session|
invoke session, 'verify-key', blog: home_url, key: api_key
end
unless %w{ valid invalid }.include?(response.body)
raise_with_response response
end
response.body == 'valid'
end
#@!group Checking
# Checks whether a comment is spam and whether it is "blatant."
# @param [String] user_ip
# The IP address of the submitter of the comment.
# @param [String] user_agent
# The user agent of the web browser submitting the comment. Typically
# the HTTP_USER_AGENT CGI variable. Not to be confused with the user
# agent of the Akismet library.
# @param [Hash{Symbol => Object}] params
# @option params [String] :referrer
# The value of the HTTP_REFERER header. Note that the parameter is
# spelled with two consecutive 'r's.
# @option params [String] :post_url
# The URL of the post, article, etc. on which the comment was made
# @option params [String] :type
# 'comment', 'trackback', 'pingback', or a made-up value like
# 'registration'
# @option params [String] :text
# The text of the comment.
# @option params [String] :author
# The comment author's name
# @option params [String] :author_email
# The comment author's email address
# @option params [String] :author_url
# The comment author's home page URL
# @return [(Boolean, Boolean)]
# An array containing two booleans. The first indicates whether the
# comment is spam. The second indicates whether it is "blatant,"
# i.e. whether it can be deleted without review.
# @raise [Akismet::Error]
# The Akismet service returned an error
#
def check(user_ip, user_agent, params = {})
response = invoke_comment_method('comment-check',
user_ip,
user_agent,
params)
unless %w{ true false }.include?(response.body)
raise_with_response response
end
[
response.body == 'true',
response['X-akismet-pro-tip'] == 'discard'
]
end
alias_method :comment_check, :check
# Checks whether a comment is spam.
# @param (see #check)
# @option (see #check)
# @return [Boolean]
# @raise (see #check)
#
def spam?(user_ip, user_agent, params = {})
check(user_ip, user_agent, params)[0]
end
#@!group Reporting
# Submits a comment that has been identified as not-spam (ham). If the
# Client is not open, opens it for the duration of the call.
#
# @param (see #check)
# @option (see #check)
# @return [void]
# @raise (see #check)
#
def ham(user_ip, user_agent, params = {})
response = invoke_comment_method('submit-ham',
user_ip,
user_agent,
params)
unless response.body == 'Thanks for making the web a better place.'
raise_with_response response
end
end
alias_method :submit_ham, :ham
# Submits a comment that has been identified as spam. If the Client is not
# open, opens it for the duration of the call.
#
# @param (see #check)
# @option (see #check)
# @return [void]
# @raise (see #check)
#
def spam(user_ip, user_agent, params = {})
response = invoke_comment_method('submit-spam',
user_ip,
user_agent,
params)
unless response.body == 'Thanks for making the web a better place.'
raise_with_response response
end
end
alias_method :submit_spam, :spam
private
# Yields an HTTP session to the given block. Uses this instance's open
# session if any; otherwise opens one and closes it when the block
# returns.
# @yield [Net::HTTP]
#
def in_http_session
if open?
yield @http_session
else
open { yield @http_session }
end
end
# @param [Net::HTTPResponse] response
def raise_with_response( response )
raise Error, response['X-akismet-debug-help'] || 'Unknown error'
end
# @param [String] method_name
# @param [String] user_ip
# @param [String] user_agent
# @param [Hash] params
# @return [Net::HTTPResponse]
#
def invoke_comment_method(method_name, user_ip, user_agent, params = {})
params = params.each_with_object(Hash.new) do |(name, value), hash|
hash[PARAM_NAME_REPLACEMENTS[name] || name] = value
end
params = params.merge(blog: home_url,
user_ip: user_ip,
user_agent: user_agent)
in_http_session do |session|
invoke session, method_name, params
end
end
# @param [Net::HTTP] http_session
# A started HTTP session
# @param [String] method_name
# @return [Net::HTTPResponse]
# @raise [Akismet::Error]
# An HTTP response other than 200 is received.
#
def invoke(http_session, method_name, params = {})
response = http_session.post("/1.1/#{ method_name }",
URI.encode_www_form(params),
http_headers)
unless response.is_a?( Net::HTTPOK )
raise Error, "HTTP #{ response.code } received (expected 200)"
end
response
end
# @return [Hash]
def http_headers
{
'User-Agent' => user_agent,
'Content-Type' => 'application/x-www-form-urlencoded'
}
end
# From the Akismet documentation:
# If possible, your user agent string should always use the following
# format: Application Name/Version | Plugin Name/Version
# @return [String]
#
def user_agent
[user_agent_app, user_agent_plugin].compact.join(" | ")
end
# Returns nil if the Client was instantiated without an app_name.
# @return [String]
#
def user_agent_app
app_name && [app_name, app_version].compact.join("/")
end
# @return [String]
def user_agent_plugin
"Ruby Akismet/#{ Akismet::VERSION }"
end
PARAM_NAME_REPLACEMENTS = {
post_url: :permalink,
text: :comment_content,
type: :comment_type,
author: :comment_author,
author_url: :comment_author_url,
author_email: :comment_author_email
}
end
end
|
module Alegra
class Request
attr_accessor :path, :token, :session
def initialize(host, path, token=nil)
@token = token
@path = path
@session = Faraday.new url: host
end
def get(url, params={})
params = JSON.generate(params)
response = @session.get do |req|
req.url "#{ @path }#{ url }"
req.headers['Content-Type'] = 'application/json'
req.headers['Accept'] = 'application/json'
req.headers['Authorization'] = "Basic #{ @token }"
end
cast_error(response) unless (response.status == 200 || response.status == 201)
return JSON.parse(response.body)
end
def post(url, params={})
params = JSON.generate(params)
response = @session.post do |req|
req.url "#{ @path }#{ url }"
req.headers['Content-Type'] = 'application/json'
req.headers['Accept'] = 'application/json'
req.headers['Authorization'] = "Basic #{ @token }"
req.body = params
end
cast_error(response) unless (response.status == 200 || response.status == 201)
return JSON.parse(response.body)
end
def cast_error(response)
error_map = {
500 => 'Sever error! Something were wrong in the server.',
400 => "Bad request!, #{ JSON.parse(response.body)['message'] }",
401 => 'Authentication error!',
402 => 'Required payment!',
403 => 'Restricted access!',
403 => 'Not found!',
403 => 'Operation does not allowed!',
}
raise StandardError, "Status: #{ response.status }. Error: #{ error_map[response.status] }"
end
end
end
fix key names in error map array
module Alegra
class Request
attr_accessor :path, :token, :session
def initialize(host, path, token=nil)
@token = token
@path = path
@session = Faraday.new url: host
end
def get(url, params={})
params = JSON.generate(params)
response = @session.get do |req|
req.url "#{ @path }#{ url }"
req.headers['Content-Type'] = 'application/json'
req.headers['Accept'] = 'application/json'
req.headers['Authorization'] = "Basic #{ @token }"
end
cast_error(response) unless (response.status == 200 || response.status == 201)
return JSON.parse(response.body)
end
def post(url, params={})
params = JSON.generate(params)
response = @session.post do |req|
req.url "#{ @path }#{ url }"
req.headers['Content-Type'] = 'application/json'
req.headers['Accept'] = 'application/json'
req.headers['Authorization'] = "Basic #{ @token }"
req.body = params
end
cast_error(response) unless (response.status == 200 || response.status == 201)
return JSON.parse(response.body)
end
def cast_error(response)
error_map = {
500 => 'Sever error! Something were wrong in the server.',
400 => "Bad request!, #{ JSON.parse(response.body)['message'] }",
401 => 'Authentication error!',
402 => 'Required payment!',
403 => 'Restricted access!',
404 => 'Not found!',
405 => 'Operation does not allowed!',
}
raise StandardError, "Status: #{ response.status }. Error: #{ error_map[response.status] }"
end
end
end |
require 'active_support/logger'
require 'active_support/core_ext/module/delegation'
require 'request_store'
class AmaysimLogger
class << self
def info(msg = nil, _progname = nil)
log(msg, :info, block_given? ? -> { yield } : nil)
end
def debug(msg = nil, _progname = nil)
log(msg, :debug, block_given? ? -> { yield } : nil)
end
def warn(msg = nil, _progname = nil)
log(msg, :warn, block_given? ? -> { yield } : nil)
end
def error(msg = nil, _progname = nil)
log(msg, :error, block_given? ? -> { yield } : nil)
end
def unknown(msg = nil, _progname = nil)
log(msg, :unknown, block_given? ? -> { yield } : nil)
end
def add_to_log_context(params = {})
context_is_a_hash = log_context.is_a?(Hash)
new_params = log_context.merge(params) if context_is_a_hash
self.log_context = context_is_a_hash ? new_params : params
end
def log_context
RequestStore[:log_context] ||= {}
end
def log_context=(context)
RequestStore[:log_context] = context
end
def logger
@logger ||= ActiveSupport::Logger.new(STDOUT)
end
delegate :level, :level=, to: :logger
delegate :formatter, :formatter=, to: :logger
delegate :info?, :debug?, :warn?, :error?, :unknown?, to: :logger
private
def log(log_msg, log_level, execute)
msg, params = msg_and_attributes(log_msg)
log_params = create_log_params(msg, params)
log_with = ->(log_content) { logger.send(log_level, log_content) }
if execute
log_with_duration(log_params, log_with, execute)
else
log_with.call(format_params(log_params))
end
end
def msg_and_attributes(log_msg)
if log_msg.is_a?(Hash)
[log_msg.delete(:msg), log_msg]
else
[log_msg.to_s, {}]
end
end
def log_timestamp(time = Time.now)
"#{time} #{time.zone}"
end
def create_log_params(msg, params)
timestamped_message = { msg: msg, log_timestamp: log_timestamp }
timestamped_message.merge(log_context).merge(params)
end
# rubocop:disable Metrics/MethodLength
def log_with_duration(log_params, log_with, execute)
start_time = Time.now
log_params[:start_time] = log_timestamp(start_time)
execute.call
rescue StandardError => e
log_params[:exception] = e.class
log_params[:exception_msg] = e
raise e
ensure
end_time = Time.now
log_params[:end_time] = log_timestamp(end_time)
log_params[:duration] = (end_time - start_time)
log_with.call(format_params(log_params))
end
def format_params(params)
params.to_json
end
end
end
HB-278 fix time zone issues
require 'active_support/logger'
require 'active_support/core_ext/module/delegation'
require 'active_support/core_ext/time/zones'
require 'request_store'
class AmaysimLogger
class << self
def info(msg = nil, _progname = nil)
log(msg, :info, block_given? ? -> { yield } : nil)
end
def debug(msg = nil, _progname = nil)
log(msg, :debug, block_given? ? -> { yield } : nil)
end
def warn(msg = nil, _progname = nil)
log(msg, :warn, block_given? ? -> { yield } : nil)
end
def error(msg = nil, _progname = nil)
log(msg, :error, block_given? ? -> { yield } : nil)
end
def unknown(msg = nil, _progname = nil)
log(msg, :unknown, block_given? ? -> { yield } : nil)
end
def add_to_log_context(params = {})
context_is_a_hash = log_context.is_a?(Hash)
new_params = log_context.merge(params) if context_is_a_hash
self.log_context = context_is_a_hash ? new_params : params
end
def log_context
RequestStore[:log_context] ||= {}
end
def log_context=(context)
RequestStore[:log_context] = context
end
def logger
@logger ||= ActiveSupport::Logger.new(STDOUT)
end
delegate :level, :level=, to: :logger
delegate :formatter, :formatter=, to: :logger
delegate :info?, :debug?, :warn?, :error?, :unknown?, to: :logger
private
def log(log_msg, log_level, execute)
msg, params = msg_and_attributes(log_msg)
log_params = create_log_params(msg, params)
log_with = ->(log_content) { logger.send(log_level, log_content) }
if execute
log_with_duration(log_params, log_with, execute)
else
log_with.call(format_params(log_params))
end
end
def msg_and_attributes(log_msg)
if log_msg.is_a?(Hash)
[log_msg.delete(:msg), log_msg]
else
[log_msg.to_s, {}]
end
end
def log_timestamp(time = Time.now)
time = time.in_time_zone('Sydney')
"#{time} #{time.zone}"
end
def create_log_params(msg, params)
timestamped_message = { msg: msg, log_timestamp: log_timestamp }
timestamped_message.merge(log_context).merge(params)
end
# rubocop:disable Metrics/MethodLength
def log_with_duration(log_params, log_with, execute)
start_time = Time.now
log_params[:start_time] = log_timestamp(start_time)
execute.call
rescue StandardError => e
log_params[:exception] = e.class
log_params[:exception_msg] = e
raise e
ensure
end_time = Time.now
log_params[:end_time] = log_timestamp(end_time)
log_params[:duration] = (end_time - start_time)
log_with.call(format_params(log_params))
end
def format_params(params)
params.to_json
end
end
end
|
require 'active_support/logger'
require 'request_store'
class AmaysimLogger
class << self
def info(msg, _progname = nil)
log(msg, :info, block_given? ? -> { yield } : nil)
end
def debug(msg, _progname = nil)
log(msg, :debug, block_given? ? -> { yield } : nil)
end
def warn(msg, _progname = nil)
log(msg, :warn, block_given? ? -> { yield } : nil)
end
def error(msg, _progname = nil)
log(msg, :error, block_given? ? -> { yield } : nil)
end
def unknown(msg, _progname = nil)
log(msg, :unknown, block_given? ? -> { yield } : nil)
end
def add_to_log_context(params = {})
context_is_a_hash = log_context.is_a?(Hash)
new_params = log_context.merge(params) if context_is_a_hash
self.log_context = context_is_a_hash ? new_params : params
end
def log_context
RequestStore[:log_context] ||= {}
end
def log_context=(context)
RequestStore[:log_context] = context
end
def logger
@logger ||= ActiveSupport::Logger.new(STDOUT)
end
private
def log(log_msg, log_level, execute)
msg, params = msg_and_attributes(log_msg)
log_params = create_log_params(msg, params)
log_with = ->(log_content) { logger.send(log_level, log_content) }
if execute
log_with_duration(log_params, log_with, execute)
else
log_with.call(format_params(log_params))
end
end
def msg_and_attributes(log_msg)
if log_msg.is_a?(Hash)
[log_msg.delete(:msg), log_msg]
else
[log_msg.to_s, {}]
end
end
def log_timestamp(time = Time.now)
"#{time} #{time.zone}"
end
def create_log_params(msg, params)
timestamped_message = { msg: msg, log_timestamp: log_timestamp }
timestamped_message.merge(log_context).merge(params)
end
# rubocop:disable Metrics/MethodLength
def log_with_duration(log_params, log_with, execute)
start_time = Time.now
log_params[:start_time] = log_timestamp(start_time)
execute.call
rescue StandardError => e
log_params[:exception] = e.class
log_params[:exception_msg] = e
raise e
ensure
end_time = Time.now
log_params[:end_time] = log_timestamp(end_time)
log_params[:duration] = (end_time - start_time)
log_with.call(format_params(log_params))
end
def format_params(params)
params.to_json
end
end
end
HB-278 adapt the logger to be used in rails
require 'active_support/logger'
require 'active_support/core_ext/module/delegation'
require 'request_store'
class AmaysimLogger
class << self
def info(msg = nil, _progname = nil)
log(msg, :info, block_given? ? -> { yield } : nil)
end
def debug(msg = nil, _progname = nil)
log(msg, :debug, block_given? ? -> { yield } : nil)
end
def warn(msg = nil, _progname = nil)
log(msg, :warn, block_given? ? -> { yield } : nil)
end
def error(msg = nil, _progname = nil)
log(msg, :error, block_given? ? -> { yield } : nil)
end
def unknown(msg = nil, _progname = nil)
log(msg, :unknown, block_given? ? -> { yield } : nil)
end
def add_to_log_context(params = {})
context_is_a_hash = log_context.is_a?(Hash)
new_params = log_context.merge(params) if context_is_a_hash
self.log_context = context_is_a_hash ? new_params : params
end
def log_context
RequestStore[:log_context] ||= {}
end
def log_context=(context)
RequestStore[:log_context] = context
end
def logger
@logger ||= ActiveSupport::Logger.new(STDOUT)
end
delegate :level, :level=, to: :logger
delegate :formatter, :formatter=, to: :logger
delegate :info?, :debug?, :warn?, :error?, :unknown?, to: :logger
private
def log(log_msg, log_level, execute)
msg, params = msg_and_attributes(log_msg)
log_params = create_log_params(msg, params)
log_with = ->(log_content) { logger.send(log_level, log_content) }
if execute
log_with_duration(log_params, log_with, execute)
else
log_with.call(format_params(log_params))
end
end
def msg_and_attributes(log_msg)
if log_msg.is_a?(Hash)
[log_msg.delete(:msg), log_msg]
else
[log_msg.to_s, {}]
end
end
def log_timestamp(time = Time.now)
"#{time} #{time.zone}"
end
def create_log_params(msg, params)
timestamped_message = { msg: msg, log_timestamp: log_timestamp }
timestamped_message.merge(log_context).merge(params)
end
# rubocop:disable Metrics/MethodLength
def log_with_duration(log_params, log_with, execute)
start_time = Time.now
log_params[:start_time] = log_timestamp(start_time)
execute.call
rescue StandardError => e
log_params[:exception] = e.class
log_params[:exception_msg] = e
raise e
ensure
end_time = Time.now
log_params[:end_time] = log_timestamp(end_time)
log_params[:duration] = (end_time - start_time)
log_with.call(format_params(log_params))
end
def format_params(params)
params.to_json
end
end
end
|
module Animal
VERSION = '0.2.0'.freeze
end
Version bump
module Animal
VERSION = '0.2.1'.freeze
end
|
require 'selenium-webdriver'
module Appium
class Session
attr_reader :driver
def initialize(host, port, capabilities, timeout = 30)
client = Selenium::WebDriver::Remote::Http::Default.new
client.timeout = timeout
@driver = Selenium::WebDriver.for(:remote,
desired_capabilities: capabilities.to_hash,
url: "http://#{host}:#{port}#{Appium.root_path}",
http_client: client
)
end
def terminate
@driver.quit rescue nil
end
end
end
Keep connection errors encapsulated.
require 'selenium-webdriver'
module Appium
class Session
attr_reader :driver
def initialize(host, port, capabilities, timeout = 30)
client = Selenium::WebDriver::Remote::Http::Default.new
client.timeout = timeout
@driver = Selenium::WebDriver.for(:remote,
desired_capabilities: capabilities.to_hash,
url: "http://#{host}:#{port}#{Appium.root_path}",
http_client: client
)
rescue Errno::ECONNREFUSED
raise ConnectionError
end
def terminate
@driver.quit rescue nil
end
class ConnectionError < RuntimeError; end
end
end
|
require 'open3'
require 'nokogiri'
module AqBanking
class User
attr_accessor :username, :bank, :user, :server, :hbciversion, :context
def initialize(options = {})
User.complain_missing_parameters(:username, :bank, :user, options)
@username = options[:username]
@bank = options[:bank]
@user = options[:user]
@server = options[:server]
@hbciversion = options[:hbciversion]
@context = options[:context]
end
class << self
def add(options = {})
pin = options.delete(:pin)
options = {
tokentype: 'pintan',
hbciversion: 300,
context: '1'
}.merge(options)
complain_missing_parameters(:username, :bank, :user, :server, options)
Commander.aqhbci('adduser', options)
user = User.new(options)
if user && pin
Commander.with_pin(user, pin) do |f|
aqhbci('getsysid', user: options[:user])
end
end
user
end
def remove(options = {})
fail 'Missing options: user' unless options[:user]
_, status = Commander.aqhbci('deluser', user: options[:user], with_accounts: true)
status.success?
end
def list
output, _ = Commander.aqhbci('listusers', xml: true)
results = []
doc = Nokogiri::XML(output)
doc.xpath('//user').each do |node|
user = AqBanking::User.new(username: node.xpath('//UserName').first.content,
bank: node.xpath('//BankCode').first.content,
user: node.xpath('//UserId').first.content)
results << user
end
results
end
def complain_missing_parameters(*args)
hash = args.last.is_a?(Hash) ? args.pop : {}
missing = []
args.each do |param|
missing << param unless hash[param]
end
fail "Missing options: #{missing.join(', ')}" unless missing.empty?
end
end
end
end
Remove block parameter
require 'open3'
require 'nokogiri'
module AqBanking
class User
attr_accessor :username, :bank, :user, :server, :hbciversion, :context
def initialize(options = {})
User.complain_missing_parameters(:username, :bank, :user, options)
@username = options[:username]
@bank = options[:bank]
@user = options[:user]
@server = options[:server]
@hbciversion = options[:hbciversion]
@context = options[:context]
end
class << self
def add(options = {})
pin = options.delete(:pin)
options = {
tokentype: 'pintan',
hbciversion: 300,
context: '1'
}.merge(options)
complain_missing_parameters(:username, :bank, :user, :server, options)
Commander.aqhbci('adduser', options)
user = User.new(options)
if user && pin
Commander.with_pin(user, pin) do
aqhbci('getsysid', user: options[:user])
end
end
user
end
def remove(options = {})
fail 'Missing options: user' unless options[:user]
_, status = Commander.aqhbci('deluser', user: options[:user], with_accounts: true)
status.success?
end
def list
output, _ = Commander.aqhbci('listusers', xml: true)
results = []
doc = Nokogiri::XML(output)
doc.xpath('//user').each do |node|
user = AqBanking::User.new(username: node.xpath('//UserName').first.content,
bank: node.xpath('//BankCode').first.content,
user: node.xpath('//UserId').first.content)
results << user
end
results
end
def complain_missing_parameters(*args)
hash = args.last.is_a?(Hash) ? args.pop : {}
missing = []
args.each do |param|
missing << param unless hash[param]
end
fail "Missing options: #{missing.join(', ')}" unless missing.empty?
end
end
end
end
|
module Authie
class Session < ActiveRecord::Base
# Define some errors which may be used
class InactiveSession < Error; end
class ExpiredSession < Error; end
class BrowserMismatch < Error; end
class NoParentSessionForRevert < Error; end
# Set table name
self.table_name = "authie_sessions"
# Relationships
belongs_to :user, {:polymorphic => true}.merge(Authie.config.user_relationship_options)
belongs_to :parent, :class_name => "Authie::Session"
# Scopes
scope :active, -> { where(:active => true) }
scope :asc, -> { order(:last_activity_at => :desc) }
# Attributes
serialize :data, Hash
attr_accessor :controller
before_create do
self.token = SecureRandom.base64(32)
if controller
self.user_agent = controller.request.user_agent
set_cookie!
end
end
before_destroy do
cookies.delete(:user_session) if controller
end
# This method should be called each time a user performs an
# action while authenticated with this session.
def touch!
self.check_security!
self.last_activity_at = Time.now
self.last_activity_ip = controller.request.ip
self.last_activity_path = controller.request.path
self.requests += 1
self.save!
end
# Sets the cookie on the associated controller.
def set_cookie!
cookies[:user_session] = {
:value => token,
:secure => controller.request.ssl?,
:httponly => true,
:expires => self.expires_at
}
end
# Check the security of the session to ensure it can be used.
def check_security!
if controller
if cookies[:browser_id] != self.browser_id
invalidate!
raise BrowserMismatch, "Browser ID mismatch"
end
unless self.active?
invalidate!
raise InactiveSession, "Session is no longer active"
end
if self.expired?
invalidate!
raise ExpiredSession, "Persistent session has expired"
end
if self.inactive?
invalidate!
raise InactiveSession, "Non-persistent session has expired"
end
end
end
# Has this persistent session expired?
def expired?
self.expires_at &&
self.expires_at < Time.now
end
# Has a non-persistent session become inactive?
def inactive?
self.expires_at.nil? &&
self.last_activity_at &&
self.last_activity_at < Authie.config.session_inactivity_timeout.ago
end
# Allow this session to persist rather than expiring at the end of the
# current browser session
def persist!
self.expires_at = Authie.config.persistent_session_length.from_now
self.save!
set_cookie!
end
# Is this a persistent session?
def persistent?
!!expires_at
end
# Activate an old session
def activate!
self.active = true
self.save!
end
# Mark this session as invalid
def invalidate!
self.active = false
self.save!
if controller
cookies.delete(:user_session)
end
end
# Set some additional data in this session
def set(key, value)
self.data ||= {}
self.data[key.to_s] = value
self.save!
end
# Get some additional data from this session
def get(key)
(self.data ||= {})[key.to_s]
end
# Invalidate all sessions but this one for this user
def invalidate_others!
self.class.where.not(:id => self.id).where(:user => self.user).each do |s|
s.invalidate!
end
end
# Note that we have just seen the user enter their password.
def see_password!
self.password_seen_at = Time.now
self.save!
end
# Have we seen the user's password recently in this sesion?
def recently_seen_password?
!!(self.password_seen_at && self.password_seen_at >= Authie.config.sudo_session_timeout.ago)
end
# Is two factor authentication required for this request?
def two_factored?
!!(two_factored_at || self.parent_id)
end
# Mark this request as two factor authoritsed
def mark_as_two_factored!
self.two_factored_at = Time.now
self.two_factored_ip = controller.request.ip
self.save!
end
# Create a new session for impersonating for the given user
def impersonate!(user)
self.class.start(controller, :user => user, :parent => self)
end
# Revert back to the parent session
def revert_to_parent!
if self.parent
self.invalidate!
self.parent.activate!
self.parent.controller = self.controller
self.parent.set_cookie!
self.parent
else
raise NoParentSessionForRevert, "Session does not have a parent therefore cannot be reverted."
end
end
# Find a session from the database for the given controller instance.
# Returns a session object or :none if no session is found.
def self.get_session(controller)
cookies = controller.send(:cookies)
if cookies[:user_session] && session = self.active.where(:token => cookies[:user_session]).first
session.controller = controller
session
else
:none
end
end
# Create a new session and return the newly created session object.
# Any other sessions for the browser will be invalidated.
def self.start(controller, params = {})
cookies = controller.send(:cookies)
self.where(:browser_id => cookies[:browser_id]).each(&:invalidate!)
session = self.new(params)
session.controller = controller
session.browser_id = cookies[:browser_id]
session.login_at = Time.now
session.login_ip = controller.request.ip
session.save
session
end
# Cleanup any old sessions.
def self.cleanup
self.active.where("expires_at IS NULL AND last_activity_at < ?", Authie.config.session_inactivity_timeout.ago).each(&:invalidate!)
end
private
# Return all cookies on the associated controller
def cookies
controller.send(:cookies)
end
end
end
ensure that user agent strings aren't too big for the database column
module Authie
class Session < ActiveRecord::Base
# Define some errors which may be used
class InactiveSession < Error; end
class ExpiredSession < Error; end
class BrowserMismatch < Error; end
class NoParentSessionForRevert < Error; end
# Set table name
self.table_name = "authie_sessions"
# Relationships
belongs_to :user, {:polymorphic => true}.merge(Authie.config.user_relationship_options)
belongs_to :parent, :class_name => "Authie::Session"
# Scopes
scope :active, -> { where(:active => true) }
scope :asc, -> { order(:last_activity_at => :desc) }
# Attributes
serialize :data, Hash
attr_accessor :controller
before_validation do
if self.user_agent.is_a?(String)
self.user_agent = self.user_agent[0,255]
end
end
before_create do
self.token = SecureRandom.base64(32)
if controller
self.user_agent = controller.request.user_agent
set_cookie!
end
end
before_destroy do
cookies.delete(:user_session) if controller
end
# This method should be called each time a user performs an
# action while authenticated with this session.
def touch!
self.check_security!
self.last_activity_at = Time.now
self.last_activity_ip = controller.request.ip
self.last_activity_path = controller.request.path
self.requests += 1
self.save!
end
# Sets the cookie on the associated controller.
def set_cookie!
cookies[:user_session] = {
:value => token,
:secure => controller.request.ssl?,
:httponly => true,
:expires => self.expires_at
}
end
# Check the security of the session to ensure it can be used.
def check_security!
if controller
if cookies[:browser_id] != self.browser_id
invalidate!
raise BrowserMismatch, "Browser ID mismatch"
end
unless self.active?
invalidate!
raise InactiveSession, "Session is no longer active"
end
if self.expired?
invalidate!
raise ExpiredSession, "Persistent session has expired"
end
if self.inactive?
invalidate!
raise InactiveSession, "Non-persistent session has expired"
end
end
end
# Has this persistent session expired?
def expired?
self.expires_at &&
self.expires_at < Time.now
end
# Has a non-persistent session become inactive?
def inactive?
self.expires_at.nil? &&
self.last_activity_at &&
self.last_activity_at < Authie.config.session_inactivity_timeout.ago
end
# Allow this session to persist rather than expiring at the end of the
# current browser session
def persist!
self.expires_at = Authie.config.persistent_session_length.from_now
self.save!
set_cookie!
end
# Is this a persistent session?
def persistent?
!!expires_at
end
# Activate an old session
def activate!
self.active = true
self.save!
end
# Mark this session as invalid
def invalidate!
self.active = false
self.save!
if controller
cookies.delete(:user_session)
end
end
# Set some additional data in this session
def set(key, value)
self.data ||= {}
self.data[key.to_s] = value
self.save!
end
# Get some additional data from this session
def get(key)
(self.data ||= {})[key.to_s]
end
# Invalidate all sessions but this one for this user
def invalidate_others!
self.class.where.not(:id => self.id).where(:user => self.user).each do |s|
s.invalidate!
end
end
# Note that we have just seen the user enter their password.
def see_password!
self.password_seen_at = Time.now
self.save!
end
# Have we seen the user's password recently in this sesion?
def recently_seen_password?
!!(self.password_seen_at && self.password_seen_at >= Authie.config.sudo_session_timeout.ago)
end
# Is two factor authentication required for this request?
def two_factored?
!!(two_factored_at || self.parent_id)
end
# Mark this request as two factor authoritsed
def mark_as_two_factored!
self.two_factored_at = Time.now
self.two_factored_ip = controller.request.ip
self.save!
end
# Create a new session for impersonating for the given user
def impersonate!(user)
self.class.start(controller, :user => user, :parent => self)
end
# Revert back to the parent session
def revert_to_parent!
if self.parent
self.invalidate!
self.parent.activate!
self.parent.controller = self.controller
self.parent.set_cookie!
self.parent
else
raise NoParentSessionForRevert, "Session does not have a parent therefore cannot be reverted."
end
end
# Find a session from the database for the given controller instance.
# Returns a session object or :none if no session is found.
def self.get_session(controller)
cookies = controller.send(:cookies)
if cookies[:user_session] && session = self.active.where(:token => cookies[:user_session]).first
session.controller = controller
session
else
:none
end
end
# Create a new session and return the newly created session object.
# Any other sessions for the browser will be invalidated.
def self.start(controller, params = {})
cookies = controller.send(:cookies)
self.where(:browser_id => cookies[:browser_id]).each(&:invalidate!)
session = self.new(params)
session.controller = controller
session.browser_id = cookies[:browser_id]
session.login_at = Time.now
session.login_ip = controller.request.ip
session.save
session
end
# Cleanup any old sessions.
def self.cleanup
self.active.where("expires_at IS NULL AND last_activity_at < ?", Authie.config.session_inactivity_timeout.ago).each(&:invalidate!)
end
private
# Return all cookies on the associated controller
def cookies
controller.send(:cookies)
end
end
end
|
module Authie
VERSION = '3.1.3'
end
bump to 3.1.4
module Authie
VERSION = '3.1.4'
end
|
module Authie
VERSION = '1.0.1'
end
bump version to 1.0.2
module Authie
VERSION = '1.0.2'
end
|
module Author
VERSION = "1.1.6.alpha"
end
Bump :gem: version 1.2.0.alpha
module Author
VERSION = "1.2.0.alpha"
end
|
require 'awful/dynamodb_streams'
module Awful
module Short
def dyn(*args)
Awful::DynamoDB.new.invoke(*args)
end
end
class DynamoDB < Cli
COLORS = {
CREATING: :yellow,
UPDATING: :yellow,
DELETING: :red,
ACTIVE: :green,
}
no_commands do
def color(string)
set_color(string, COLORS.fetch(string.to_sym, :yellow))
end
## return array of tables names matching name
def all_matching_tables(name)
tables = []
last_evaluated = nil
loop do # get 100 at a time from sdk
response = dynamodb.list_tables(exclusive_start_table_name: last_evaluated)
matching = response.table_names.select do |table|
table.match(name)
end
tables = tables + matching
last_evaluated = response.last_evaluated_table_name
break unless last_evaluated
end
tables
end
end
desc 'ls [PATTERN]', 'list dynamodb tables [matching PATTERN]'
method_option :long, aliases: '-l', default: false, desc: 'Long listing'
def ls(name = /./)
tables = all_matching_tables(name)
if options[:long]
tables.map do |table|
dynamodb.describe_table(table_name: table).table
end.tap do |list|
print_table list.map { |t| [ t.table_name, color(t.table_status), t.item_count, t.table_size_bytes, t.creation_date_time ] }
end
else
tables.tap { |t| puts t }
end
end
desc 'dump NAME', 'dump table with name'
def dump(name)
all_matching_tables(name).map do |table_name|
dynamodb.describe_table(table_name: table_name).table.to_hash.tap do |table|
puts YAML.dump(stringify_keys(table))
end
end
end
desc 'status NAME', 'get status of NAMEd table'
def status(name)
dynamodb.describe_table(table_name: name).table.table_status.tap(&method(:puts))
end
desc 'key NAME', 'get hash or range key of named table'
method_option :type, aliases: '-t', type: :string, default: :hash, desc: 'type of key to get: hash or range'
def key(name)
dynamodb.describe_table(table_name: name).table.key_schema.find do |schema|
schema.key_type == options[:type].to_s.upcase
end.attribute_name.output(&method(:puts))
end
desc 'create_table NAME', 'create table with NAME'
def create_table(name, file = nil)
opt = load_cfg(options, file)
params = only_keys_matching(opt, %i[attribute_definitions key_schema])
params[:table_name] = name
params[:provisioned_throughput] = only_keys_matching(opt[:provisioned_throughput], %i[read_capacity_units write_capacity_units])
## scrub unwanted keys from LSIs
if opt.has_key?(:local_secondary_indexes)
params[:local_secondary_indexes] = opt[:local_secondary_indexes].map do |lsi|
only_keys_matching(lsi, %i[index_name key_schema projection])
end
end
## scrub unwanted keys from GSIs
if opt.has_key?(:global_secondary_indexes)
params[:global_secondary_indexes] = opt[:global_secondary_indexes].map do |gsi|
only_keys_matching(gsi, %i[index_name key_schema projection]).tap do |g|
if gsi[:provisioned_throughput]
g[:provisioned_throughput] = only_keys_matching(gsi[:provisioned_throughput], %i[read_capacity_units write_capacity_units])
end
end
end
end
dynamodb.create_table(params)
end
desc 'throughput NAME', 'get or update provisioned throughput for table NAME'
method_option :read_capacity_units, aliases: '-r', type: :numeric, default: nil, desc: 'Read capacity units'
method_option :write_capacity_units, aliases: '-w', type: :numeric, default: nil, desc: 'Write capacity units'
method_option :gsi, aliases: '-g', type: :array, default: [], desc: 'GSIs to update'
method_option :all, type: :boolean, default: false, desc: 'Update all GSIs for the table'
method_option :table, type: :boolean, default: true, desc: 'Update througput on table'
def throughput(name)
table = dynamodb.describe_table(table_name: name).table
## current is hash of current provisioned throughput
current = table.provisioned_throughput.to_h
## loop-safe version of GSIs (in case nil)
global_secondary_indexes = table.global_secondary_indexes || []
## get throughput for each GSI
global_secondary_indexes.each do |gsi|
current[gsi.index_name] = gsi.provisioned_throughput.to_h
end
## if no updates requested, just print throughput and return table details
unless options[:read_capacity_units] or options[:write_capacity_units]
puts YAML.dump(stringify_keys(current))
return table
end
## parameters for update request
params = { table_name: name }
## add table throughput unless told not to
params[:provisioned_throughput] = {
read_capacity_units: options[:read_capacity_units] || current[:read_capacity_units],
write_capacity_units: options[:write_capacity_units] || current[:write_capacity_units]
} if options[:table]
## list of requested GSIs, or all for this table
gsis = options[:gsi]
gsis = global_secondary_indexes.map(&:index_name) if options[:all]
params[:global_secondary_index_updates] = gsis.map do |gsi|
{
update: {
index_name: gsi,
provisioned_throughput: {
read_capacity_units: options[:read_capacity_units] || current[gsi][:read_capacity_units],
write_capacity_units: options[:write_capacity_units] || current[gsi][:write_capacity_units]
}
}
}
end
## make the update request
params.reject! { |_,v| v.empty? } # sdk hates empty global_secondary_index_updates
dynamodb.update_table(params)
end
desc 'enable_streams NAME', 'enable/disable streams on the table'
method_option :stream_view_type, aliases: '-t', default: 'NEW_IMAGE', desc: 'view type for the stream (NEW_IMAGE, OLD_IMAGE, NEW_AND_OLD_IMAGES, KEYS_ONLY)'
method_option :disable, aliases: '-d', default: false, desc: 'disable the stream'
def enable_streams(name)
stream_specification = {stream_enabled: !options[:disable]}
stream_specification.merge!(stream_view_type: options[:stream_view_type].upcase) unless options[:disable]
dynamodb.update_table(table_name: name, stream_specification: stream_specification)
end
desc 'delete NAME', 'delete table with NAME'
def delete_table(name)
confirmation = ask("to delete #{name} and all its data, type the name of table to delete:", :yellow)
if confirmation == name
say("deleting table #{name}")
dynamodb.delete_table(table_name: name)
else
say("confirmation failed for #{name}", :red)
end
end
desc 'copy [region/]SRC [region/]DEST', 'copy data from table region/SRC to table region/DEST'
method_option :dots, aliases: '-d', type: :boolean, default: false, desc: 'Show dots for put_item progress'
method_option :no_clobber, aliases: '-n', type: :boolean, default: false, desc: 'Do not overwrite existing items'
def copy(src, dst)
src_table, src_region = src.split('/').reverse # parse region/table into [table, region]
dst_table, dst_region = dst.split('/').reverse
## clients are potentially for different regions
src_client = Aws::DynamoDB::Client.new({region: src_region}.reject{|_,v| v.nil?})
dst_client = Aws::DynamoDB::Client.new({region: dst_region}.reject{|_,v| v.nil?})
## params for put_item call
params = {table_name: dst_table}
## add condition not to overwrite existing primary keys (hash or composite hash AND range)
if options[:no_clobber]
keys = dst_client.describe_table(table_name: dst_table).table.key_schema.map(&:attribute_name)
params.merge!(condition_expression: keys.map{|key| "attribute_not_exists(#{key})"}.join(' AND '))
end
## lame progress indicator, pass true for put, false for skip
dots = options[:dots] ? ->(x){print x ? '.' : 'x'} : ->(_){}
## loop on each batch of scanned items
exclusive_start_key = nil
loop do
r = src_client.scan(table_name: src_table, exclusive_start_key: exclusive_start_key, return_consumed_capacity: 'INDEXES')
puts "[#{Time.now}] [#{src_table}] scanned:#{r.count} key:#{r.last_evaluated_key || 'nil'}"
## loop items and put to destination
put = skipped = 0
r.items.each do |item|
begin
dst_client.put_item(params.merge(item: item))
put += 1
dots.call(true)
rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException #item key exists
skipped += 1
dots.call(false)
end
end
print "\n" if options[:dots]
puts "[#{Time.now}] [#{dst_table}] put:#{put} skipped:#{skipped}"
## loop if there are more keys to scan
exclusive_start_key = r.last_evaluated_key
break unless exclusive_start_key
end
end
desc 'scan NAME', 'scan table with NAME'
method_option :output, aliases: '-o', type: :string, default: nil, desc: 'Output filename (default: stdout)'
def scan(name, exclusive_start_key = nil)
fd = options[:output] ? File.open(options[:output], 'w') : $stdout.dup # open output file or stdout
exclusive_start_key = nil
loop do
r = dynamodb_simple.scan('TableName' => name, 'ExclusiveStartKey' => exclusive_start_key)
r['Items'].each do |item|
fd.puts JSON.generate(item)
end
exclusive_start_key = r['LastEvaluatedKey']
break unless exclusive_start_key
end
fd.close
end
desc 'query NAME', 'query table with NAME'
method_option :hash_key, aliases: '-k', type: :string, default: nil, desc: 'Hash key'
method_option :hash_key_value, aliases: '-v', type: :string, default: nil, desc: 'Hash key value'
method_option :output, aliases: '-o', type: :string, default: nil, desc: 'Output filename (default: stdout)'
method_option :count, aliases: '-c', type: :boolean, default: false, desc: 'Return count instead of items'
def query(name, exclusive_start_key = nil)
fd = options[:output] ? File.open(options[:output], 'w') : $stdout.dup # open output file or stdout
exclusive_start_key = nil
count = 0
loop do
r = dynamodb_simple.query(
'TableName' => name,
'ExclusiveStartKey' => exclusive_start_key,
'Select' => options[:count] ? 'COUNT' : 'ALL_ATTRIBUTES',
'KeyConditionExpression' => "#{options[:hash_key]} = :hash_key_value",
'ExpressionAttributeValues' => { ":hash_key_value" => { S: options[:hash_key_value] } }
)
count += r.fetch('Count', 0)
r.fetch('Items', []).each do |item|
fd.puts JSON.generate(item)
end
exclusive_start_key = r['LastEvaluatedKey']
break unless exclusive_start_key
end
fd.close
puts count if options[:count]
end
desc 'put_items NAME', 'puts json items into the table with NAME'
method_option :no_clobber, aliases: '-n', type: :boolean, default: false, desc: 'Do not overwrite existing items'
def put_items(name, file = nil)
params = {'TableName' => name}
## set a condition not to overwrite items with existing primary key(s)
if options[:no_clobber]
keys = dynamodb.describe_table(table_name: name).table.key_schema.map(&:attribute_name)
params.merge!('ConditionExpression' => keys.map{|key| "attribute_not_exists(#{key})"}.join(' AND '))
end
## input data
io = (file and File.open(file)) || ((not $stdin.tty?) and $stdin)
put_count = 0
skip_count = 0
io.each_line do |line|
begin
dynamodb_simple.put_item(params.merge('Item' => JSON.parse(line)))
put_count += 1
rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException #item key exists
skip_count += 1
end
end
## return counts
[put_count, skip_count].tap do |put, skip|
puts "put #{put} items, skipped #{skip} items"
end
end
desc 'batch_write NAME', 'batch write items to table NAME'
def batch_write(name)
items = (1..25).map do |n|
{
put_request: {
item: {
"store_id" => "store#{n}",
"object_id" => "object#{n}",
"object_value" => "value#{n}"
}
}
}
end
p items
r = dynamodb.batch_write_item(request_items: {name => items})
p r
end
## see lambda_events.rb for subcommands
desc 'streams SUBCOMMANDS', 'subcommands for dynamodb streams'
subcommand 'streams', Streams
end
end
add --count option to dyn scan task
require 'awful/dynamodb_streams'
module Awful
module Short
def dyn(*args)
Awful::DynamoDB.new.invoke(*args)
end
end
class DynamoDB < Cli
COLORS = {
CREATING: :yellow,
UPDATING: :yellow,
DELETING: :red,
ACTIVE: :green,
}
no_commands do
def color(string)
set_color(string, COLORS.fetch(string.to_sym, :yellow))
end
## return array of tables names matching name
def all_matching_tables(name)
tables = []
last_evaluated = nil
loop do # get 100 at a time from sdk
response = dynamodb.list_tables(exclusive_start_table_name: last_evaluated)
matching = response.table_names.select do |table|
table.match(name)
end
tables = tables + matching
last_evaluated = response.last_evaluated_table_name
break unless last_evaluated
end
tables
end
end
desc 'ls [PATTERN]', 'list dynamodb tables [matching PATTERN]'
method_option :long, aliases: '-l', default: false, desc: 'Long listing'
def ls(name = /./)
tables = all_matching_tables(name)
if options[:long]
tables.map do |table|
dynamodb.describe_table(table_name: table).table
end.tap do |list|
print_table list.map { |t| [ t.table_name, color(t.table_status), t.item_count, t.table_size_bytes, t.creation_date_time ] }
end
else
tables.tap { |t| puts t }
end
end
desc 'dump NAME', 'dump table with name'
def dump(name)
all_matching_tables(name).map do |table_name|
dynamodb.describe_table(table_name: table_name).table.to_hash.tap do |table|
puts YAML.dump(stringify_keys(table))
end
end
end
desc 'status NAME', 'get status of NAMEd table'
def status(name)
dynamodb.describe_table(table_name: name).table.table_status.tap(&method(:puts))
end
desc 'key NAME', 'get hash or range key of named table'
method_option :type, aliases: '-t', type: :string, default: :hash, desc: 'type of key to get: hash or range'
def key(name)
dynamodb.describe_table(table_name: name).table.key_schema.find do |schema|
schema.key_type == options[:type].to_s.upcase
end.attribute_name.output(&method(:puts))
end
desc 'create_table NAME', 'create table with NAME'
def create_table(name, file = nil)
opt = load_cfg(options, file)
params = only_keys_matching(opt, %i[attribute_definitions key_schema])
params[:table_name] = name
params[:provisioned_throughput] = only_keys_matching(opt[:provisioned_throughput], %i[read_capacity_units write_capacity_units])
## scrub unwanted keys from LSIs
if opt.has_key?(:local_secondary_indexes)
params[:local_secondary_indexes] = opt[:local_secondary_indexes].map do |lsi|
only_keys_matching(lsi, %i[index_name key_schema projection])
end
end
## scrub unwanted keys from GSIs
if opt.has_key?(:global_secondary_indexes)
params[:global_secondary_indexes] = opt[:global_secondary_indexes].map do |gsi|
only_keys_matching(gsi, %i[index_name key_schema projection]).tap do |g|
if gsi[:provisioned_throughput]
g[:provisioned_throughput] = only_keys_matching(gsi[:provisioned_throughput], %i[read_capacity_units write_capacity_units])
end
end
end
end
dynamodb.create_table(params)
end
desc 'throughput NAME', 'get or update provisioned throughput for table NAME'
method_option :read_capacity_units, aliases: '-r', type: :numeric, default: nil, desc: 'Read capacity units'
method_option :write_capacity_units, aliases: '-w', type: :numeric, default: nil, desc: 'Write capacity units'
method_option :gsi, aliases: '-g', type: :array, default: [], desc: 'GSIs to update'
method_option :all, type: :boolean, default: false, desc: 'Update all GSIs for the table'
method_option :table, type: :boolean, default: true, desc: 'Update througput on table'
def throughput(name)
table = dynamodb.describe_table(table_name: name).table
## current is hash of current provisioned throughput
current = table.provisioned_throughput.to_h
## loop-safe version of GSIs (in case nil)
global_secondary_indexes = table.global_secondary_indexes || []
## get throughput for each GSI
global_secondary_indexes.each do |gsi|
current[gsi.index_name] = gsi.provisioned_throughput.to_h
end
## if no updates requested, just print throughput and return table details
unless options[:read_capacity_units] or options[:write_capacity_units]
puts YAML.dump(stringify_keys(current))
return table
end
## parameters for update request
params = { table_name: name }
## add table throughput unless told not to
params[:provisioned_throughput] = {
read_capacity_units: options[:read_capacity_units] || current[:read_capacity_units],
write_capacity_units: options[:write_capacity_units] || current[:write_capacity_units]
} if options[:table]
## list of requested GSIs, or all for this table
gsis = options[:gsi]
gsis = global_secondary_indexes.map(&:index_name) if options[:all]
params[:global_secondary_index_updates] = gsis.map do |gsi|
{
update: {
index_name: gsi,
provisioned_throughput: {
read_capacity_units: options[:read_capacity_units] || current[gsi][:read_capacity_units],
write_capacity_units: options[:write_capacity_units] || current[gsi][:write_capacity_units]
}
}
}
end
## make the update request
params.reject! { |_,v| v.empty? } # sdk hates empty global_secondary_index_updates
dynamodb.update_table(params)
end
desc 'enable_streams NAME', 'enable/disable streams on the table'
method_option :stream_view_type, aliases: '-t', default: 'NEW_IMAGE', desc: 'view type for the stream (NEW_IMAGE, OLD_IMAGE, NEW_AND_OLD_IMAGES, KEYS_ONLY)'
method_option :disable, aliases: '-d', default: false, desc: 'disable the stream'
def enable_streams(name)
stream_specification = {stream_enabled: !options[:disable]}
stream_specification.merge!(stream_view_type: options[:stream_view_type].upcase) unless options[:disable]
dynamodb.update_table(table_name: name, stream_specification: stream_specification)
end
desc 'delete NAME', 'delete table with NAME'
def delete_table(name)
confirmation = ask("to delete #{name} and all its data, type the name of table to delete:", :yellow)
if confirmation == name
say("deleting table #{name}")
dynamodb.delete_table(table_name: name)
else
say("confirmation failed for #{name}", :red)
end
end
desc 'copy [region/]SRC [region/]DEST', 'copy data from table region/SRC to table region/DEST'
method_option :dots, aliases: '-d', type: :boolean, default: false, desc: 'Show dots for put_item progress'
method_option :no_clobber, aliases: '-n', type: :boolean, default: false, desc: 'Do not overwrite existing items'
def copy(src, dst)
src_table, src_region = src.split('/').reverse # parse region/table into [table, region]
dst_table, dst_region = dst.split('/').reverse
## clients are potentially for different regions
src_client = Aws::DynamoDB::Client.new({region: src_region}.reject{|_,v| v.nil?})
dst_client = Aws::DynamoDB::Client.new({region: dst_region}.reject{|_,v| v.nil?})
## params for put_item call
params = {table_name: dst_table}
## add condition not to overwrite existing primary keys (hash or composite hash AND range)
if options[:no_clobber]
keys = dst_client.describe_table(table_name: dst_table).table.key_schema.map(&:attribute_name)
params.merge!(condition_expression: keys.map{|key| "attribute_not_exists(#{key})"}.join(' AND '))
end
## lame progress indicator, pass true for put, false for skip
dots = options[:dots] ? ->(x){print x ? '.' : 'x'} : ->(_){}
## loop on each batch of scanned items
exclusive_start_key = nil
loop do
r = src_client.scan(table_name: src_table, exclusive_start_key: exclusive_start_key, return_consumed_capacity: 'INDEXES')
puts "[#{Time.now}] [#{src_table}] scanned:#{r.count} key:#{r.last_evaluated_key || 'nil'}"
## loop items and put to destination
put = skipped = 0
r.items.each do |item|
begin
dst_client.put_item(params.merge(item: item))
put += 1
dots.call(true)
rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException #item key exists
skipped += 1
dots.call(false)
end
end
print "\n" if options[:dots]
puts "[#{Time.now}] [#{dst_table}] put:#{put} skipped:#{skipped}"
## loop if there are more keys to scan
exclusive_start_key = r.last_evaluated_key
break unless exclusive_start_key
end
end
desc 'scan NAME', 'scan table with NAME'
method_option :output, aliases: '-o', type: :string, default: nil, desc: 'Output filename (default: stdout)'
method_option :count, aliases: '-c', type: :boolean, default: false, desc: 'Return count instead of items'
def scan(name, exclusive_start_key = nil)
fd = options[:output] ? File.open(options[:output], 'w') : $stdout.dup # open output file or stdout
exclusive_start_key = nil
count = 0
loop do
r = dynamodb_simple.scan(
'TableName' => name,
'Select' => options[:count] ? 'COUNT' : 'ALL_ATTRIBUTES',
'ExclusiveStartKey' => exclusive_start_key
)
count += r.fetch('Count', 0)
r.fetch('Items', []).each do |item|
fd.puts JSON.generate(item)
end
exclusive_start_key = r['LastEvaluatedKey']
break unless exclusive_start_key
end
fd.close
puts count if options[:count]
end
desc 'query NAME', 'query table with NAME'
method_option :hash_key, aliases: '-k', type: :string, default: nil, desc: 'Hash key'
method_option :hash_key_value, aliases: '-v', type: :string, default: nil, desc: 'Hash key value'
method_option :output, aliases: '-o', type: :string, default: nil, desc: 'Output filename (default: stdout)'
method_option :count, aliases: '-c', type: :boolean, default: false, desc: 'Return count instead of items'
def query(name, exclusive_start_key = nil)
fd = options[:output] ? File.open(options[:output], 'w') : $stdout.dup # open output file or stdout
exclusive_start_key = nil
count = 0
loop do
r = dynamodb_simple.query(
'TableName' => name,
'ExclusiveStartKey' => exclusive_start_key,
'Select' => options[:count] ? 'COUNT' : 'ALL_ATTRIBUTES',
'KeyConditionExpression' => "#{options[:hash_key]} = :hash_key_value",
'ExpressionAttributeValues' => { ":hash_key_value" => { S: options[:hash_key_value] } }
)
count += r.fetch('Count', 0)
r.fetch('Items', []).each do |item|
fd.puts JSON.generate(item)
end
exclusive_start_key = r['LastEvaluatedKey']
break unless exclusive_start_key
end
fd.close
puts count if options[:count]
end
desc 'put_items NAME', 'puts json items into the table with NAME'
method_option :no_clobber, aliases: '-n', type: :boolean, default: false, desc: 'Do not overwrite existing items'
def put_items(name, file = nil)
params = {'TableName' => name}
## set a condition not to overwrite items with existing primary key(s)
if options[:no_clobber]
keys = dynamodb.describe_table(table_name: name).table.key_schema.map(&:attribute_name)
params.merge!('ConditionExpression' => keys.map{|key| "attribute_not_exists(#{key})"}.join(' AND '))
end
## input data
io = (file and File.open(file)) || ((not $stdin.tty?) and $stdin)
put_count = 0
skip_count = 0
io.each_line do |line|
begin
dynamodb_simple.put_item(params.merge('Item' => JSON.parse(line)))
put_count += 1
rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException #item key exists
skip_count += 1
end
end
## return counts
[put_count, skip_count].tap do |put, skip|
puts "put #{put} items, skipped #{skip} items"
end
end
desc 'batch_write NAME', 'batch write items to table NAME'
def batch_write(name)
items = (1..25).map do |n|
{
put_request: {
item: {
"store_id" => "store#{n}",
"object_id" => "object#{n}",
"object_value" => "value#{n}"
}
}
}
end
p items
r = dynamodb.batch_write_item(request_items: {name => items})
p r
end
## see lambda_events.rb for subcommands
desc 'streams SUBCOMMANDS', 'subcommands for dynamodb streams'
subcommand 'streams', Streams
end
end |
module Awful
class DynamoDB < Cli
desc 'ls [PATTERN]', 'list dynamodb tables [matching PATTERN]'
method_option :long, aliases: '-l', default: false, desc: 'Long listing'
def ls(name = /./)
tables = dynamodb.list_tables.table_names.select do |table|
table.match(name)
end
if options[:long]
tables.map do |table|
dynamodb.describe_table(table_name: table).table
end.tap do |list|
print_table list.map { |t| [ t.table_name, t.table_status, t.item_count, t.table_size_bytes, t.creation_date_time ] }
end
else
tables.tap { |t| puts t }
end
end
desc 'dump NAME', 'dump table with name'
def dump(name)
dynamodb.describe_table(table_name: name).table.tap do |table|
puts YAML.dump(stringify_keys(table.to_hash))
end
end
desc 'scan NAME', 'scan table with NAME'
def scan(name, start_key = nil)
r = dynamodb.scan(table_name: name, exclusive_start_key: start_key) #.items.tap{ |x| p x.count }.tap do |table|
puts r.items.map { |item| JSON.generate(item) }.join("\n")
if r.last_evaluated_key # recurse if more data to get
scan(name, r.last_evaluated_key)
end
end
end
end
create task can create new table from dump of an existing table
module Awful
class DynamoDB < Cli
desc 'ls [PATTERN]', 'list dynamodb tables [matching PATTERN]'
method_option :long, aliases: '-l', default: false, desc: 'Long listing'
def ls(name = /./)
tables = dynamodb.list_tables.table_names.select do |table|
table.match(name)
end
if options[:long]
tables.map do |table|
dynamodb.describe_table(table_name: table).table
end.tap do |list|
print_table list.map { |t| [ t.table_name, t.table_status, t.item_count, t.table_size_bytes, t.creation_date_time ] }
end
else
tables.tap { |t| puts t }
end
end
desc 'dump NAME', 'dump table with name'
def dump(name)
dynamodb.describe_table(table_name: name).table.tap do |table|
puts YAML.dump(stringify_keys(table.to_hash))
end
end
desc 'scan NAME', 'scan table with NAME'
def scan(name, start_key = nil)
r = dynamodb.scan(table_name: name, exclusive_start_key: start_key) #.items.tap{ |x| p x.count }.tap do |table|
puts r.items.map { |item| JSON.generate(item) }.join("\n")
if r.last_evaluated_key # recurse if more data to get
scan(name, r.last_evaluated_key)
end
end
desc 'create NAME', 'create table with NAME'
def create(name, file = nil)
opt = load_cfg(options, file)
params = only_keys_matching(opt, %i[attribute_definitions key_schema])
params[:table_name] = name
params[:provisioned_throughput] = only_keys_matching(opt[:provisioned_throughput], %i[read_capacity_units write_capacity_units])
params[:local_secondary_indexes] = opt[:local_secondary_indexes].map do |lsi|
only_keys_matching(lsi, %i[index_name key_schema projection])
end
params[:global_secondary_indexes] = opt[:global_secondary_indexes].map do |gsi|
only_keys_matching(gsi, %i[index_name key_schema projection]).tap do |g|
if gsi[:provisioned_throughput]
g[:provisioned_throughput] = only_keys_matching(gsi[:provisioned_throughput], %i[read_capacity_units write_capacity_units])
end
end
end
dynamodb.create_table(params)
end
end
end
|
module Awspec
VERSION = '0.79.2'
end
Bump up version number
module Awspec
VERSION = '0.80.0'
end
|
module Awspec
VERSION = '1.22.1'
end
Bump up version
module Awspec
VERSION = '1.23.0'
end
|
module Babosa
module Version
STRING = '1.0.3'
end
end
Bump version
module Babosa
module Version
STRING = '1.0.4'
end
end
|
require "timeout"
module Beetle
class Message
FORMAT_VERSION = 2
FLAG_REDUNDANT = 1
DEFAULT_TTL = 1.day
DEFAULT_HANDLER_TIMEOUT = 300.seconds
DEFAULT_HANDLER_EXECUTION_ATTEMPTS = 1
DEFAULT_HANDLER_EXECUTION_ATTEMPTS_DELAY = 10.seconds
DEFAULT_EXCEPTION_LIMIT = 0
attr_reader :server, :queue, :header, :body, :uuid, :data, :format_version, :flags, :expires_at
attr_reader :timeout, :delay, :attempts_limit, :exceptions_limit, :exception
def initialize(queue, header, body, opts = {})
@keys = {}
@queue = queue
@header = header
@body = body
setup(opts)
decode
end
def setup(opts)
@server = opts[:server]
@timeout = opts[:timeout] || DEFAULT_HANDLER_TIMEOUT
@delay = opts[:delay] || DEFAULT_HANDLER_EXECUTION_ATTEMPTS_DELAY
@attempts_limit = opts[:attempts] || DEFAULT_HANDLER_EXECUTION_ATTEMPTS
@exceptions_limit = opts[:exceptions] || DEFAULT_EXCEPTION_LIMIT
@attempts_limit = @exceptions_limit + 1 if @attempts_limit <= @exceptions_limit
end
def decode
amqp_headers = header.properties
h = amqp_headers[:headers]
if h
@format_version, @flags, @expires_at = h.values_at(:format_version, :flags, :expires_at)
@uuid = amqp_headers[:message_id]
@data = @body
else
@format_version, @flags, @expires_at, @uuid, @data = @body.unpack("nnNA36A*")
end
end
def self.publishing_options(opts = {})
flags = 0
flags |= FLAG_REDUNDANT if opts.delete(:redundant)
expires_at = now + (opts.delete(:ttl) || DEFAULT_TTL).to_i
opts = opts.slice(*PUBLISHING_KEYS)
opts[:message_id] = generate_uuid.to_s
opts[:headers] = {
:format_version => FORMAT_VERSION,
:flags => flags,
:expires_at => expires_at
}
opts
end
def self.encode_v1(data, opts = {})
expires_at = now + (opts[:ttl] || DEFAULT_TTL).to_i
flags = 0
flags |= FLAG_REDUNDANT if opts[:redundant]
[FORMAT_VERSION, flags, expires_at, generate_uuid.to_s, data.to_s].pack("nnNA36A*")
end
def msg_id
@msg_id ||= "msgid:#{queue}:#{uuid}"
end
def now
Time.now.to_i
end
def self.now
Time.now.to_i
end
def expired?
@expires_at < now
end
def self.generate_uuid
UUID4R::uuid(1)
end
def redundant?
@flags & FLAG_REDUNDANT == FLAG_REDUNDANT
end
def set_timeout!
redis.set(key(:timeout), now + timeout)
end
def timed_out?
(t = redis.get(key(:timeout))) && t.to_i < now
end
def timed_out!
redis.set(key(:timeout), 0)
end
def completed?
redis.get(key(:status)) == "completed"
end
def completed!
redis.set(key(:status), "completed")
timed_out!
end
def delayed?
(t = redis.get(key(:delay))) && t.to_i > now
end
def set_delay!
redis.set(key(:delay), now + delay)
end
def attempts
redis.get(key(:attempts)).to_i
end
def increment_execution_attempts!
redis.incr(key(:attempts))
end
def attempts_limit_reached?
(limit = redis.get(key(:attempts))) && limit.to_i >= attempts_limit
end
def increment_exception_count!
redis.incr(key(:exceptions))
end
def exceptions_limit_reached?
redis.get(key(:exceptions)).to_i > exceptions_limit
end
def key_exists?
old_message = 0 == redis.msetnx(key(:status) =>"incomplete", key(:expires) => @expires_at)
if old_message
logger.debug "Beetle: received duplicate message: #{key(:status)} on queue: #{@queue}"
end
old_message
end
def aquire_mutex!
if mutex = redis.setnx(key(:mutex), now)
logger.debug "Beetle: aquired mutex: #{msg_id}"
else
delete_mutex!
end
mutex
end
def delete_mutex!
redis.del(key(:mutex))
logger.debug "Beetle: deleted mutex: #{msg_id}"
end
def self.redis
@redis ||= Redis.new(:host => Beetle.config.redis_host, :db => Beetle.config.redis_db)
end
KEY_SUFFIXES = [:status, :ack_count, :timeout, :delay, :attempts, :exceptions, :mutex, :expires]
def key(suffix)
@keys[suffix] ||= self.class.key(msg_id, suffix)
end
def keys
self.class.keys(msg_id)
end
def self.key(msg_id, suffix)
"#{msg_id}:#{suffix}"
end
def self.keys(msg_id)
KEY_SUFFIXES.map{|suffix| key(msg_id, suffix)}
end
def self.msg_id(key)
key =~ /^(msgid:[^:]*:[-0-9a-f]*):.*$/ && $1
end
def self.garbage_collect_keys
keys = redis.keys("msgid:*:expires")
threshold = now + Beetle.config.gc_threshold
keys.each do |key|
expires_at = redis.get key
if expires_at && expires_at.to_i < threshold
msg_id = msg_id(key)
redis.del(keys(msg_id))
end
end
end
def process(handler)
logger.debug "Beetle: processing message #{msg_id}"
result = nil
begin
result = process_internal(handler)
handler.process_exception(@exception) if @exception
handler.process_failure(result) if result.failure?
rescue Exception => e
Beetle::reraise_expectation_errors!
logger.warn "Beetle: exception '#{e}' during processing of message #{msg_id}"
logger.warn "Beetle: backtrace: #{e.backtrace.join("\n")}"
result = RC::InternalError
end
result
end
private
def process_internal(handler)
if expired?
logger.warn "Beetle: ignored expired message (#{msg_id})!"
ack!
RC::Ancient
elsif !key_exists?
set_timeout!
run_handler!(handler)
elsif completed?
ack!
RC::OK
elsif delayed?
logger.warn "Beetle: ignored delayed message (#{msg_id})!"
RC::Delayed
elsif !timed_out?
RC::HandlerNotYetTimedOut
elsif attempts_limit_reached?
ack!
logger.warn "Beetle: reached the handler execution attempts limit: #{attempts_limit} on #{msg_id}"
RC::AttemptsLimitReached
elsif exceptions_limit_reached?
ack!
logger.warn "Beetle: reached the handler exceptions limit: #{exceptions_limit} on #{msg_id}"
RC::ExceptionsLimitReached
else
set_timeout!
if aquire_mutex!
run_handler!(handler)
else
RC::MutexLocked
end
end
end
def run_handler!(handler)
increment_execution_attempts!
begin
Timeout::timeout(@timeout) { handler.call(self) }
rescue Exception => @exception
Beetle::reraise_expectation_errors!
increment_exception_count!
if attempts_limit_reached?
ack!
logger.debug "Beetle: reached the handler execution attempts limit: #{attempts_limit} on #{msg_id}"
return RC::AttemptsLimitReached
elsif exceptions_limit_reached?
ack!
logger.debug "Beetle: reached the handler exceptions limit: #{exceptions_limit} on #{msg_id}"
return RC::ExceptionsLimitReached
else
delete_mutex!
timed_out!
set_delay!
logger.debug "Beetle: message handler crashed on #{msg_id}"
return RC::HandlerCrash
end
ensure
ActiveRecord::Base.clear_active_connections! if defined?(ActiveRecord)
end
completed!
ack!
RC::OK
end
def redis
@redis ||= self.class.redis
end
def logger
@logger ||= self.class.logger
end
def self.logger
Beetle.config.logger
end
def ack!
logger.debug "Beetle: ack! for message #{msg_id}"
header.ack
if !redundant? || redis.incr(key(:ack_count)) == 2
redis.del(keys)
end
end
end
end
the legacy encoding version hardcodes the protocoll version to 1
require "timeout"
module Beetle
class Message
FORMAT_VERSION = 2
FLAG_REDUNDANT = 1
DEFAULT_TTL = 1.day
DEFAULT_HANDLER_TIMEOUT = 300.seconds
DEFAULT_HANDLER_EXECUTION_ATTEMPTS = 1
DEFAULT_HANDLER_EXECUTION_ATTEMPTS_DELAY = 10.seconds
DEFAULT_EXCEPTION_LIMIT = 0
attr_reader :server, :queue, :header, :body, :uuid, :data, :format_version, :flags, :expires_at
attr_reader :timeout, :delay, :attempts_limit, :exceptions_limit, :exception
def initialize(queue, header, body, opts = {})
@keys = {}
@queue = queue
@header = header
@body = body
setup(opts)
decode
end
def setup(opts)
@server = opts[:server]
@timeout = opts[:timeout] || DEFAULT_HANDLER_TIMEOUT
@delay = opts[:delay] || DEFAULT_HANDLER_EXECUTION_ATTEMPTS_DELAY
@attempts_limit = opts[:attempts] || DEFAULT_HANDLER_EXECUTION_ATTEMPTS
@exceptions_limit = opts[:exceptions] || DEFAULT_EXCEPTION_LIMIT
@attempts_limit = @exceptions_limit + 1 if @attempts_limit <= @exceptions_limit
end
def decode
amqp_headers = header.properties
h = amqp_headers[:headers]
if h
@format_version, @flags, @expires_at = h.values_at(:format_version, :flags, :expires_at)
@uuid = amqp_headers[:message_id]
@data = @body
else
@format_version, @flags, @expires_at, @uuid, @data = @body.unpack("nnNA36A*")
end
end
def self.publishing_options(opts = {})
flags = 0
flags |= FLAG_REDUNDANT if opts.delete(:redundant)
expires_at = now + (opts.delete(:ttl) || DEFAULT_TTL).to_i
opts = opts.slice(*PUBLISHING_KEYS)
opts[:message_id] = generate_uuid.to_s
opts[:headers] = {
:format_version => FORMAT_VERSION,
:flags => flags,
:expires_at => expires_at
}
opts
end
def self.encode_v1(data, opts = {})
expires_at = now + (opts[:ttl] || DEFAULT_TTL).to_i
flags = 0
flags |= FLAG_REDUNDANT if opts[:redundant]
[1, flags, expires_at, generate_uuid.to_s, data.to_s].pack("nnNA36A*")
end
def msg_id
@msg_id ||= "msgid:#{queue}:#{uuid}"
end
def now
Time.now.to_i
end
def self.now
Time.now.to_i
end
def expired?
@expires_at < now
end
def self.generate_uuid
UUID4R::uuid(1)
end
def redundant?
@flags & FLAG_REDUNDANT == FLAG_REDUNDANT
end
def set_timeout!
redis.set(key(:timeout), now + timeout)
end
def timed_out?
(t = redis.get(key(:timeout))) && t.to_i < now
end
def timed_out!
redis.set(key(:timeout), 0)
end
def completed?
redis.get(key(:status)) == "completed"
end
def completed!
redis.set(key(:status), "completed")
timed_out!
end
def delayed?
(t = redis.get(key(:delay))) && t.to_i > now
end
def set_delay!
redis.set(key(:delay), now + delay)
end
def attempts
redis.get(key(:attempts)).to_i
end
def increment_execution_attempts!
redis.incr(key(:attempts))
end
def attempts_limit_reached?
(limit = redis.get(key(:attempts))) && limit.to_i >= attempts_limit
end
def increment_exception_count!
redis.incr(key(:exceptions))
end
def exceptions_limit_reached?
redis.get(key(:exceptions)).to_i > exceptions_limit
end
def key_exists?
old_message = 0 == redis.msetnx(key(:status) =>"incomplete", key(:expires) => @expires_at)
if old_message
logger.debug "Beetle: received duplicate message: #{key(:status)} on queue: #{@queue}"
end
old_message
end
def aquire_mutex!
if mutex = redis.setnx(key(:mutex), now)
logger.debug "Beetle: aquired mutex: #{msg_id}"
else
delete_mutex!
end
mutex
end
def delete_mutex!
redis.del(key(:mutex))
logger.debug "Beetle: deleted mutex: #{msg_id}"
end
def self.redis
@redis ||= Redis.new(:host => Beetle.config.redis_host, :db => Beetle.config.redis_db)
end
KEY_SUFFIXES = [:status, :ack_count, :timeout, :delay, :attempts, :exceptions, :mutex, :expires]
def key(suffix)
@keys[suffix] ||= self.class.key(msg_id, suffix)
end
def keys
self.class.keys(msg_id)
end
def self.key(msg_id, suffix)
"#{msg_id}:#{suffix}"
end
def self.keys(msg_id)
KEY_SUFFIXES.map{|suffix| key(msg_id, suffix)}
end
def self.msg_id(key)
key =~ /^(msgid:[^:]*:[-0-9a-f]*):.*$/ && $1
end
def self.garbage_collect_keys
keys = redis.keys("msgid:*:expires")
threshold = now + Beetle.config.gc_threshold
keys.each do |key|
expires_at = redis.get key
if expires_at && expires_at.to_i < threshold
msg_id = msg_id(key)
redis.del(keys(msg_id))
end
end
end
def process(handler)
logger.debug "Beetle: processing message #{msg_id}"
result = nil
begin
result = process_internal(handler)
handler.process_exception(@exception) if @exception
handler.process_failure(result) if result.failure?
rescue Exception => e
Beetle::reraise_expectation_errors!
logger.warn "Beetle: exception '#{e}' during processing of message #{msg_id}"
logger.warn "Beetle: backtrace: #{e.backtrace.join("\n")}"
result = RC::InternalError
end
result
end
private
def process_internal(handler)
if expired?
logger.warn "Beetle: ignored expired message (#{msg_id})!"
ack!
RC::Ancient
elsif !key_exists?
set_timeout!
run_handler!(handler)
elsif completed?
ack!
RC::OK
elsif delayed?
logger.warn "Beetle: ignored delayed message (#{msg_id})!"
RC::Delayed
elsif !timed_out?
RC::HandlerNotYetTimedOut
elsif attempts_limit_reached?
ack!
logger.warn "Beetle: reached the handler execution attempts limit: #{attempts_limit} on #{msg_id}"
RC::AttemptsLimitReached
elsif exceptions_limit_reached?
ack!
logger.warn "Beetle: reached the handler exceptions limit: #{exceptions_limit} on #{msg_id}"
RC::ExceptionsLimitReached
else
set_timeout!
if aquire_mutex!
run_handler!(handler)
else
RC::MutexLocked
end
end
end
def run_handler!(handler)
increment_execution_attempts!
begin
Timeout::timeout(@timeout) { handler.call(self) }
rescue Exception => @exception
Beetle::reraise_expectation_errors!
increment_exception_count!
if attempts_limit_reached?
ack!
logger.debug "Beetle: reached the handler execution attempts limit: #{attempts_limit} on #{msg_id}"
return RC::AttemptsLimitReached
elsif exceptions_limit_reached?
ack!
logger.debug "Beetle: reached the handler exceptions limit: #{exceptions_limit} on #{msg_id}"
return RC::ExceptionsLimitReached
else
delete_mutex!
timed_out!
set_delay!
logger.debug "Beetle: message handler crashed on #{msg_id}"
return RC::HandlerCrash
end
ensure
ActiveRecord::Base.clear_active_connections! if defined?(ActiveRecord)
end
completed!
ack!
RC::OK
end
def redis
@redis ||= self.class.redis
end
def logger
@logger ||= self.class.logger
end
def self.logger
Beetle.config.logger
end
def ack!
logger.debug "Beetle: ack! for message #{msg_id}"
header.ack
if !redundant? || redis.incr(key(:ack_count)) == 2
redis.del(keys)
end
end
end
end
|
module EventMachine
module Protocols
# = Example
#EM.run{
# svc = EM::Protocols::BERTRPC.connect('localhost', 9999)
#
# req = svc.call.calc.add(1, 2)
# req.callback{ |res|
# p(res)
# }
# }
class BERTRPC < EventMachine::Connection
include EventMachine::Deferrable
include ::BERTRPC::Encodes
class Request
attr_accessor :kind, :options
def initialize(svc, kind, options)
@svc = svc
@kind = kind
@options = options
end
def method_missing(cmd, *args)
::BERTRPC::Mod.new(@svc, self, cmd)
end
end
def self.connect(host, port, timeout = nil)
EM.connect(host, port, self)
end
def post_init
super
@connected = EM::DefaultDeferrable.new
end
def connection_completed
super
@connected.succeed
end
def dispatch_response
succeed(@response)
end
def receive_data(bert_response)
raise ::BERTRPC::ProtocolError.new(::BERTRPC::ProtocolError::NO_HEADER) unless bert_response.length > 4
len = bert_response.slice!(0..3).unpack('N').first # just here to strip the length header
raise ::BERTRPC::ProtocolError.new(::BERTRPC::ProtocolError::NO_DATA) unless bert_response.length > 0
@response = decode_bert_response(bert_response)
dispatch_response
end
def call(options = nil)
verify_options(options)
Request.new(self, :call, options)
end
def cast(options = nil)
verify_options(options)
Request.new(self, :cast, options)
end
def verify_options(options)
if options
if cache = options[:cache]
unless cache[0] == :validation && cache[1].is_a?(String)
raise ::BERTRPC::InvalidOption.new("Valid :cache args are [:validation, String]")
end
else
raise ::BERTRPC::InvalidOption.new("Valid options are :cache")
end
end
end
end
end
end
Fixed example in bertrpc.rb, but multiple requests are still broken
module EventMachine
module Protocols
# = Example
#EM.run{
# svc = EM::Protocols::BERTRPC.connect('localhost', 9999)
#
# svc.call.calc.add(1, 2)
# svc.callback{ |res|
# p(res)
# }
# }
class BERTRPC < EventMachine::Connection
include EventMachine::Deferrable
include ::BERTRPC::Encodes
class Request
attr_accessor :kind, :options
def initialize(svc, kind, options)
@svc = svc
@kind = kind
@options = options
end
def method_missing(cmd, *args)
::BERTRPC::Mod.new(@svc, self, cmd)
end
end
def self.connect(host, port, timeout = nil)
EM.connect(host, port, self)
end
def post_init
super
@connected = EM::DefaultDeferrable.new
end
def connection_completed
super
@connected.succeed
end
def dispatch_response
succeed(@response)
end
def receive_data(bert_response)
raise ::BERTRPC::ProtocolError.new(::BERTRPC::ProtocolError::NO_HEADER) unless bert_response.length > 4
len = bert_response.slice!(0..3).unpack('N').first # just here to strip the length header
raise ::BERTRPC::ProtocolError.new(::BERTRPC::ProtocolError::NO_DATA) unless bert_response.length > 0
@response = decode_bert_response(bert_response)
dispatch_response
end
def call(options = nil)
verify_options(options)
Request.new(self, :call, options)
end
def cast(options = nil)
verify_options(options)
Request.new(self, :cast, options)
end
def verify_options(options)
if options
if cache = options[:cache]
unless cache[0] == :validation && cache[1].is_a?(String)
raise ::BERTRPC::InvalidOption.new("Valid :cache args are [:validation, String]")
end
else
raise ::BERTRPC::InvalidOption.new("Valid options are :cache")
end
end
end
end
end
end
|
module Bipbip
VERSION = '0.6.23'
end
bumped version
module Bipbip
VERSION = '0.6.24'
end
|
# encoding: ascii-8bit
require 'bitcoin'
class Bitcoin::Script
OP_0 = 0
OP_FALSE = 0
OP_1 = 81
OP_TRUE = 81
OP_2 = 0x52
OP_3 = 0x53
OP_4 = 0x54
OP_5 = 0x55
OP_6 = 0x56
OP_7 = 0x57
OP_8 = 0x58
OP_9 = 0x59
OP_10 = 0x5a
OP_11 = 0x5b
OP_12 = 0x5c
OP_13 = 0x5d
OP_14 = 0x5e
OP_15 = 0x5f
OP_16 = 0x60
OP_PUSHDATA0 = 0
OP_PUSHDATA1 = 76
OP_PUSHDATA2 = 77
OP_PUSHDATA4 = 78
OP_PUSHDATA_INVALID = 238 # 0xEE
OP_NOP = 97
OP_DUP = 118
OP_HASH160 = 169
OP_EQUAL = 135
OP_VERIFY = 105
OP_EQUALVERIFY = 136
OP_CHECKSIG = 172
OP_CHECKSIGVERIFY = 173
OP_CHECKMULTISIG = 174
OP_CHECKMULTISIGVERIFY = 175
OP_TOALTSTACK = 107
OP_FROMALTSTACK = 108
OP_TUCK = 125
OP_SWAP = 124
OP_BOOLAND = 154
OP_ADD = 147
OP_SUB = 148
OP_GREATERTHANOREQUAL = 162
OP_DROP = 117
OP_HASH256 = 170
OP_SHA256 = 168
OP_SHA1 = 167
OP_RIPEMD160 = 166
OP_NOP1 = 176
OP_NOP2 = 177
OP_NOP3 = 178
OP_NOP4 = 179
OP_NOP5 = 180
OP_NOP6 = 181
OP_NOP7 = 182
OP_NOP8 = 183
OP_NOP9 = 184
OP_NOP10 = 185
OP_CODESEPARATOR = 171
OP_MIN = 163
OP_MAX = 164
OP_2OVER = 112
OP_2SWAP = 114
OP_IFDUP = 115
OP_DEPTH = 116
OP_1NEGATE = 79
OP_WITHIN = 165
OP_NUMEQUAL = 156
OP_NUMEQUALVERIFY = 157
OP_LESSTHAN = 159
OP_LESSTHANOREQUAL = 161
OP_GREATERTHAN = 160
OP_NOT = 145
OP_0NOTEQUAL = 146
OP_ABS = 144
OP_1ADD = 139
OP_1SUB = 140
OP_NEGATE = 143
OP_BOOLOR = 155
OP_NUMNOTEQUAL = 158
OP_RETURN = 106
OP_OVER = 120
OP_IF = 99
OP_NOTIF = 100
OP_ELSE = 103
OP_ENDIF = 104
OP_PICK = 121
OP_SIZE = 130
OP_VER = 98
OP_ROLL = 122
OP_ROT = 123
OP_2DROP = 109
OP_2DUP = 110
OP_3DUP = 111
OP_NIP = 119
OP_CAT = 126
OP_SUBSTR = 127
OP_LEFT = 128
OP_RIGHT = 129
OP_INVERT = 131
OP_AND = 132
OP_OR = 133
OP_XOR = 134
OP_2MUL = 141
OP_2DIV = 142
OP_MUL = 149
OP_DIV = 150
OP_MOD = 151
OP_LSHIFT = 152
OP_RSHIFT = 153
OP_INVALIDOPCODE = 0xff
OPCODES = Hash[*constants.grep(/^OP_/).map{|i| [const_get(i), i.to_s] }.flatten]
OPCODES[0] = "0"
OPCODES[81] = "1"
OPCODES_ALIAS = {
"OP_TRUE" => OP_1,
"OP_FALSE" => OP_0,
"OP_EVAL" => OP_NOP1,
"OP_CHECKHASHVERIFY" => OP_NOP2,
}
DISABLED_OPCODES = [
OP_CAT, OP_SUBSTR, OP_LEFT, OP_RIGHT, OP_INVERT,
OP_AND, OP_OR, OP_XOR, OP_2MUL, OP_2DIV, OP_MUL,
OP_DIV, OP_MOD, OP_LSHIFT, OP_RSHIFT
]
OP_2_16 = (82..96).to_a
OPCODES_PARSE_BINARY = {}
OPCODES.each{|k,v| OPCODES_PARSE_BINARY[k] = v }
OP_2_16.each{|i| OPCODES_PARSE_BINARY[i] = (OP_2_16.index(i)+2).to_s }
OPCODES_PARSE_STRING = {}
OPCODES.each{|k,v| OPCODES_PARSE_STRING[v] = k }
OPCODES_ALIAS.each{|k,v| OPCODES_PARSE_STRING[k] = v }
2.upto(16).each{|i| OPCODES_PARSE_STRING["OP_#{i}"] = OP_2_16[i-2] }
2.upto(16).each{|i| OPCODES_PARSE_STRING["#{i}" ] = OP_2_16[i-2] }
[1,2,4].each{|i| OPCODES_PARSE_STRING.delete("OP_PUSHDATA#{i}") }
attr_reader :raw, :chunks, :debug
# create a new script. +bytes+ is typically input_script + output_script
def initialize(input_script, previous_output_script=nil)
@raw = if previous_output_script
input_script + [ Bitcoin::Script::OP_CODESEPARATOR ].pack("C") + previous_output_script
else
input_script
end
@stack, @stack_alt, @exec_stack = [], [], []
@chunks = parse(@raw)
@last_codeseparator_index = 0
@do_exec = true
end
class ::String
attr_accessor :bitcoin_pushdata
attr_accessor :bitcoin_pushdata_length
end
# parse raw script
def parse(bytes, offset=0)
program = bytes.unpack("C*")
chunks = []
until program.empty?
opcode = program.shift
if (opcode > 0) && (opcode < OP_PUSHDATA1)
len, tmp = opcode, program[0]
chunks << program.shift(len).pack("C*")
# 0x16 = 22 due to OP_2_16 from_string parsing
if len == 1 && tmp <= 22
chunks.last.bitcoin_pushdata = OP_PUSHDATA0
chunks.last.bitcoin_pushdata_length = len
else
raise "invalid OP_PUSHDATA0" if len != chunks.last.bytesize
end
elsif (opcode == OP_PUSHDATA1)
len = program.shift(1)[0]
chunks << program.shift(len).pack("C*")
unless len > OP_PUSHDATA1 && len <= 0xff
chunks.last.bitcoin_pushdata = OP_PUSHDATA1
chunks.last.bitcoin_pushdata_length = len
else
raise "invalid OP_PUSHDATA1" if len != chunks.last.bytesize
end
elsif (opcode == OP_PUSHDATA2)
len = program.shift(2).pack("C*").unpack("v")[0]
chunks << program.shift(len).pack("C*")
unless len > 0xff && len <= 0xffff
chunks.last.bitcoin_pushdata = OP_PUSHDATA2
chunks.last.bitcoin_pushdata_length = len
else
raise "invalid OP_PUSHDATA2" if len != chunks.last.bytesize
end
elsif (opcode == OP_PUSHDATA4)
len = program.shift(4).pack("C*").unpack("V")[0]
chunks << program.shift(len).pack("C*")
unless len > 0xffff # && len <= 0xffffffff
chunks.last.bitcoin_pushdata = OP_PUSHDATA4
chunks.last.bitcoin_pushdata_length = len
else
raise "invalid OP_PUSHDATA4" if len != chunks.last.bytesize
end
else
chunks << opcode
end
end
chunks
rescue Exception => ex
# bail out! #run returns false but serialization roundtrips still create the right payload.
@parse_invalid = true
c = bytes.unpack("C*").pack("C*")
c.bitcoin_pushdata = OP_PUSHDATA_INVALID
c.bitcoin_pushdata_length = c.bytesize
chunks = [ c ]
end
# string representation of the script
def to_string(chunks=nil)
string = ""
(chunks || @chunks).each.with_index{|i,idx|
string << " " unless idx == 0
string << case i
when Fixnum
if opcode = OPCODES_PARSE_BINARY[i]
opcode
else
"(opcode-#{i})"
end
when String
if i.bitcoin_pushdata
"#{i.bitcoin_pushdata}:#{i.bitcoin_pushdata_length}:".force_encoding('binary') + i.unpack("H*")[0]
else
i.unpack("H*")[0]
end
end
}
string
end
def to_binary(chunks=nil)
(chunks || @chunks).map{|chunk|
case chunk
when Fixnum; [chunk].pack("C*")
when String; self.class.pack_pushdata(chunk)
end
}.join
end
alias :to_payload :to_binary
def to_binary_without_signatures(drop_signatures, chunks=nil)
buf = []
(chunks || @chunks).each.with_index{|chunk,idx|
if chunk == OP_CODESEPARATOR and idx <= @last_codeseparator_index
buf.clear
elsif chunk == OP_CODESEPARATOR
# skip
elsif drop_signatures.none?{|e| e == chunk }
buf << chunk
end
}
to_binary(buf)
end
# Adds opcode (OP_0, OP_1, ... OP_CHECKSIG etc.)
# Returns self.
def append_opcode(opcode)
raise "Opcode should be a Fixnum" if !opcode.is_a?(Fixnum)
if opcode >= OP_0 && opcode <= 0xff
@chunks << opcode
else
raise "Opcode should be within [0x00, 0xff]"
end
self
end
# Adds binary string as pushdata. Pushdata will be encoded in the most compact form
# (unless the string contains internal info about serialization that's added by Script class)
# Returns self.
def append_pushdata(pushdata_string)
raise "Pushdata should be a string" if !pushdata_string.is_a?(String)
@chunks << pushdata_string
self
end
def self.pack_pushdata(data)
size = data.bytesize
if data.bitcoin_pushdata
size = data.bitcoin_pushdata_length
pack_pushdata_align(data.bitcoin_pushdata, size, data)
else
head = if size < OP_PUSHDATA1
[size].pack("C")
elsif size <= 0xff
[OP_PUSHDATA1, size].pack("CC")
elsif size <= 0xffff
[OP_PUSHDATA2, size].pack("Cv")
#elsif size <= 0xffffffff
else
[OP_PUSHDATA4, size].pack("CV")
end
head + data
end
end
def self.pack_pushdata_align(pushdata, len, data)
case pushdata
when OP_PUSHDATA1
[OP_PUSHDATA1, len].pack("CC") + data
when OP_PUSHDATA2
[OP_PUSHDATA2, len].pack("Cv") + data
when OP_PUSHDATA4
[OP_PUSHDATA4, len].pack("CV") + data
when OP_PUSHDATA_INVALID
data
else # OP_PUSHDATA0
[len].pack("C") + data
end
end
# script object of a string representation
def self.from_string(script_string)
new(binary_from_string(script_string))
end
class ScriptOpcodeError < StandardError; end
# raw script binary of a string representation
def self.binary_from_string(script_string)
buf = ""
script_string.split(" ").each{|i|
i = if opcode = OPCODES_PARSE_STRING[i]
opcode
else
case i
when /OP_PUSHDATA/ # skip
when /OP_(.+)$/; raise ScriptOpcodeError, "#{i} not defined!"
when /\(opcode\-(\d+)\)/; $1.to_i
when "(opcode"; # skip # fix invalid opcode parsing
when /^(\d+)\)/; $1.to_i # fix invalid opcode parsing
when /(\d+):(\d+):(.+)?/
pushdata, len, data = $1.to_i, $2.to_i, $3
pack_pushdata_align(pushdata, len, [data].pack("H*"))
else
data = [i].pack("H*")
pack_pushdata(data)
end
end
buf << if i.is_a?(Fixnum)
i < 256 ? [i].pack("C") : [OpenSSL::BN.new(i.to_s,10).to_hex].pack("H*")
else
i
end if i
}
buf
end
def invalid?
@script_invalid ||= false
end
# run the script. +check_callback+ is called for OP_CHECKSIG operations
def run(block_timestamp=Time.now.to_i, &check_callback)
return false if @parse_invalid
#p [to_string, block_timestamp, is_p2sh?]
@script_invalid = true if @raw.bytesize > 10_000
@last_codeseparator_index = 0
if block_timestamp >= 1333238400 # Pay to Script Hash (BIP 0016)
return pay_to_script_hash(check_callback) if is_p2sh?
end
@debug = []
@chunks.each.with_index{|chunk,idx|
break if invalid?
@chunk_last_index = idx
@debug << @stack.map{|i| i.unpack("H*") rescue i}
@do_exec = @exec_stack.count(false) == 0 ? true : false
#p [@stack, @do_exec]
case chunk
when Fixnum
if DISABLED_OPCODES.include?(chunk)
@script_invalid = true
@debug << "DISABLED_#{OPCODES[chunk]}"
break
end
next @debug.pop unless (@do_exec || (OP_IF <= chunk && chunk <= OP_ENDIF))
case chunk
when *OPCODES_METHOD.keys
m = method( n=OPCODES_METHOD[chunk] )
@debug << n.to_s.upcase
(m.arity == 1) ? m.call(check_callback) : m.call # invoke opcode method
when *OP_2_16
@stack << OP_2_16.index(chunk) + 2
@debug << "OP_#{chunk-80}"
else
name = OPCODES[chunk] || chunk
puts "Bitcoin::Script: opcode #{name} unkown or not implemented\n#{to_string.inspect}"
raise "opcode #{name} unkown or not implemented"
end
when String
if @do_exec
@debug << "PUSH DATA #{chunk.unpack("H*")[0]}"
@stack << chunk
else
@debug.pop
end
end
}
@debug << @stack.map{|i| i.unpack("H*") rescue i } #if @do_exec
if @script_invalid
@stack << 0
@debug << "INVALID TRANSACTION"
end
@debug << "RESULT"
return false if @stack.empty?
return false if [0, ''].include?(@stack.pop)
true
end
def invalid
@script_invalid = true; nil
end
def self.drop_signatures(script_pubkey, drop_signatures)
script = new(script_pubkey).to_string.split(" ").delete_if{|c| drop_signatures.include?(c) }.join(" ")
script_pubkey = binary_from_string(script)
end
# pay_to_script_hash: https://en.bitcoin.it/wiki/BIP_0016
#
# <sig> {<pub> OP_CHECKSIG} | OP_HASH160 <script_hash> OP_EQUAL
def pay_to_script_hash(check_callback)
return false if @chunks.size < 4
*rest, script, _, script_hash, _ = @chunks
script = rest.pop if script == OP_CODESEPARATOR
script, script_hash = cast_to_string(script), cast_to_string(script_hash)
return false unless Bitcoin.hash160(script.unpack("H*")[0]) == script_hash.unpack("H*")[0]
script = self.class.new(to_binary(rest) + script).inner_p2sh!(script)
result = script.run(&check_callback)
@debug = script.debug
result
end
def inner_p2sh!(script=nil); @inner_p2sh = true; @inner_script_code = script; self; end
def inner_p2sh?; @inner_p2sh; end
# get the inner p2sh script
def inner_p2sh_script
return nil if @chunks.size < 4
*rest, script, _, script_hash, _ = @chunks
script = rest.pop if script == OP_CODESEPARATOR
script, script_hash = cast_to_string(script), cast_to_string(script_hash)
return nil unless Bitcoin.hash160(script.unpack("H*")[0]) == script_hash.unpack("H*")[0]
script
end
def is_pay_to_script_hash?
return false if @inner_p2sh
return false unless @chunks[-2].is_a?(String)
@chunks.size >= 3 && @chunks[-3] == OP_HASH160 &&
@chunks[-2].bytesize == 20 && @chunks[-1] == OP_EQUAL
end
alias :is_p2sh? :is_pay_to_script_hash?
# check if script is in one of the recognized standard formats
def is_standard?
is_pubkey? || is_hash160? || is_multisig? || is_p2sh? || is_op_return?
end
# is this a pubkey script
def is_pubkey?
return false if @chunks.size != 2
(@chunks[1] == OP_CHECKSIG) && @chunks[0] && (@chunks[0].is_a?(String)) && @chunks[0] != OP_RETURN
end
alias :is_send_to_ip? :is_pubkey?
# is this a hash160 (address) script
def is_hash160?
return false if @chunks.size != 5
(@chunks[0..1] + @chunks[-2..-1]) ==
[OP_DUP, OP_HASH160, OP_EQUALVERIFY, OP_CHECKSIG] &&
@chunks[2].is_a?(String) && @chunks[2].bytesize == 20
end
# is this a multisig script
def is_multisig?
return false if @chunks.size < 4 || !@chunks[-2].is_a?(Fixnum)
@chunks[-1] == OP_CHECKMULTISIG and get_multisig_pubkeys.all?{|c| c.is_a?(String) }
end
# is this an op_return script
def is_op_return?
@chunks[0] == OP_RETURN && @chunks.size <= 2
end
# get type of this tx
def type
if is_hash160?; :hash160
elsif is_pubkey?; :pubkey
elsif is_multisig?; :multisig
elsif is_p2sh?; :p2sh
elsif is_op_return?;:op_return
else; :unknown
end
end
# get the public key for this pubkey script
def get_pubkey
return @chunks[0].unpack("H*")[0] if @chunks.size == 1
is_pubkey? ? @chunks[0].unpack("H*")[0] : nil
end
# get the pubkey address for this pubkey script
def get_pubkey_address
Bitcoin.pubkey_to_address(get_pubkey)
end
# get the hash160 for this hash160 or pubkey script
def get_hash160
return @chunks[2..-3][0].unpack("H*")[0] if is_hash160?
return @chunks[-2].unpack("H*")[0] if is_p2sh?
return Bitcoin.hash160(get_pubkey) if is_pubkey?
end
# get the hash160 address for this hash160 script
def get_hash160_address
Bitcoin.hash160_to_address(get_hash160)
end
# get the public keys for this multisig script
def get_multisig_pubkeys
1.upto(@chunks[-2] - 80).map{|i| @chunks[i] }
end
# get the pubkey addresses for this multisig script
def get_multisig_addresses
get_multisig_pubkeys.map{|pub|
begin
Bitcoin::Key.new(nil, pub.unpack("H*")[0]).addr
rescue OpenSSL::PKey::ECError, OpenSSL::PKey::EC::Point::Error
end
}.compact
end
def get_p2sh_address
Bitcoin.hash160_to_p2sh_address(get_hash160)
end
# get the data possibly included in an OP_RETURN script
def get_op_return_data
return nil unless is_op_return?
cast_to_string(@chunks[1]).unpack("H*")[0] if @chunks[1]
end
# get all addresses this script corresponds to (if possible)
def get_addresses
return [get_pubkey_address] if is_pubkey?
return [get_hash160_address] if is_hash160?
return get_multisig_addresses if is_multisig?
return [get_p2sh_address] if is_p2sh?
[]
end
# get single address, or first for multisig script
def get_address
addrs = get_addresses
addrs.is_a?(Array) ? addrs[0] : addrs
end
# generate pubkey tx script for given +pubkey+. returns a raw binary script of the form:
# <pubkey> OP_CHECKSIG
def self.to_pubkey_script(pubkey)
pk = [pubkey].pack("H*")
[[pk.bytesize].pack("C"), pk, "\xAC"].join
end
# generate hash160 tx for given +address+. returns a raw binary script of the form:
# OP_DUP OP_HASH160 <hash160> OP_EQUALVERIFY OP_CHECKSIG
def self.to_hash160_script(hash160)
return nil unless hash160
# DUP HASH160 length hash160 EQUALVERIFY CHECKSIG
[ ["76", "a9", "14", hash160, "88", "ac"].join ].pack("H*")
end
# generate p2sh output script for given +p2sh+ hash160. returns a raw binary script of the form:
# OP_HASH160 <p2sh> OP_EQUAL
def self.to_p2sh_script(p2sh)
return nil unless p2sh
# HASH160 length hash EQUAL
[ ["a9", "14", p2sh, "87"].join ].pack("H*")
end
# generate hash160 or p2sh output script, depending on the type of the given +address+.
# see #to_hash160_script and #to_p2sh_script.
def self.to_address_script(address)
hash160 = Bitcoin.hash160_from_address(address)
case Bitcoin.address_type(address)
when :hash160; to_hash160_script(hash160)
when :p2sh; to_p2sh_script(hash160)
end
end
# generate multisig output script for given +pubkeys+, expecting +m+ signatures.
# returns a raw binary script of the form:
# <m> <pubkey> [<pubkey> ...] <n_pubkeys> OP_CHECKMULTISIG
def self.to_multisig_script(m, *pubkeys)
pubs = pubkeys.map{|pk|p=[pk].pack("H*"); [p.bytesize].pack("C") + p}
[ [80 + m.to_i].pack("C"), *pubs, [80 + pubs.size].pack("C"), "\xAE"].join
end
# generate OP_RETURN output script with given data. returns a raw binary script of the form:
# OP_RETURN <data>
def self.to_op_return_script(data = nil)
return "\x6A" unless data
data = [data].pack("H*")
["\x6A", [data.bytesize].pack("C"), data].join
end
# generate input script sig spending a pubkey output with given +signature+ and +pubkey+.
# returns a raw binary script sig of the form:
# <signature> [<pubkey>]
def self.to_pubkey_script_sig(signature, pubkey)
hash_type = "\x01"
#pubkey = [pubkey].pack("H*") if pubkey.bytesize != 65
return [ [signature.bytesize+1].pack("C"), signature, hash_type ].join unless pubkey
case pubkey[0]
when "\x04"
expected_size = 65
when "\x02", "\x03"
expected_size = 33
end
if !expected_size || pubkey.bytesize != expected_size
raise "pubkey is not in binary form"
end
[ [signature.bytesize+1].pack("C"), signature, hash_type, [pubkey.bytesize].pack("C"), pubkey ].join
end
# generate p2sh multisig output script for given +args+.
# returns the p2sh output script, and the redeem script needed to spend it.
# see #to_multisig_script for the redeem script, and #to_p2sh_script for the p2sh script.
def self.to_p2sh_multisig_script(*args)
redeem_script = to_multisig_script(*args)
p2sh_script = to_p2sh_script(Bitcoin.hash160(redeem_script.hth))
return p2sh_script, redeem_script
end
# alias for #to_pubkey_script_sig
def self.to_signature_pubkey_script(*a)
to_pubkey_script_sig(*a)
end
# generate input script sig spending a multisig output script.
# returns a raw binary script sig of the form:
# OP_0 <sig> [<sig> ...]
def self.to_multisig_script_sig(*sigs)
sigs.map!{|s| s + "\x01" }
from_string("0 #{sigs.map{|s|s.unpack('H*')[0]}.join(' ')}").raw
end
# generate input script sig spending a p2sh-multisig output script.
# returns a raw binary script sig of the form:
# OP_0 <sig> [<sig> ...] <redeem_script>
def self.to_p2sh_multisig_script_sig(redeem_script, *sigs)
all_sigs = ""
sigs[0].each do |sig|
full_sig = sig + "\x01"
sig_len = [full_sig.bytesize].pack("C*")
all_sigs += (sig_len + full_sig)
end
push = [OP_PUSHDATA1].pack("C*")
script_len = [redeem_script.bytesize].pack("C*")
full_script = "\x00" + all_sigs + push + script_len + redeem_script
return full_script
end
def get_signatures_required
return false unless is_multisig?
@chunks[0] - 80
end
# This matches CScript::GetSigOpCount(bool fAccurate)
# Note: this does not cover P2SH script which is to be unserialized
# and checked explicitly when validating blocks.
def sigops_count_accurate(is_accurate)
count = 0
last_opcode = nil
@chunks.each do |chunk| # pushdate or opcode
if chunk == OP_CHECKSIG || chunk == OP_CHECKSIGVERIFY
count += 1
elsif chunk == OP_CHECKMULTISIG || chunk == OP_CHECKMULTISIGVERIFY
# Accurate mode counts exact number of pubkeys required (not signatures, but pubkeys!). Only used in P2SH scripts.
# Inaccurate mode counts every multisig as 20 signatures.
if is_accurate && last_opcode && last_opcode.is_a?(Fixnum) && last_opcode >= OP_1 && last_opcode <= OP_16
count += ::Bitcoin::Script.decode_OP_N(last_opcode)
else
count += 20
end
end
last_opcode = chunk
end
count
end
# This method applies to script_sig that is an input for p2sh output.
# Bitcoind has somewhat special way to return count for invalid input scripts:
# it returns 0 when the opcode can't be parsed or when it's over OP_16.
# Also, if the OP_{N} is used anywhere it's treated as 0-length data.
# See CScript::GetSigOpCount(const CScript& scriptSig) in bitcoind.
def sigops_count_for_p2sh
# This is a pay-to-script-hash scriptPubKey;
# get the last item that the scriptSig
# pushes onto the stack:
return 0 if @chunks.size == 0
data = nil
@chunks.each do |chunk|
case chunk
when Fixnum
data = ""
return 0 if chunk > OP_16
when String
data = chunk
end
end
return 0 if data == ""
::Bitcoin::Script.new(data).sigops_count_accurate(true)
end
# Converts OP_{0,1,2,...,16} into 0, 1, 2, ..., 16.
# Returns nil for other opcodes.
def self.decode_OP_N(opcode)
if opcode == OP_0
return 0
end
if opcode.is_a?(Fixnum) && opcode >= OP_1 && opcode <= OP_16
return opcode - (OP_1 - 1);
else
nil
end
end
## OPCODES
# Does nothing
def op_nop; end
def op_nop1; end
def op_nop2; end
def op_nop3; end
def op_nop4; end
def op_nop5; end
def op_nop6; end
def op_nop7; end
def op_nop8; end
def op_nop9; end
def op_nop10; end
# Duplicates the top stack item.
def op_dup
@stack << (@stack[-1].dup rescue @stack[-1])
end
# The input is hashed using SHA-256.
def op_sha256
buf = pop_string
@stack << Digest::SHA256.digest(buf)
end
# The input is hashed using SHA-1.
def op_sha1
buf = pop_string
@stack << Digest::SHA1.digest(buf)
end
# The input is hashed twice: first with SHA-256 and then with RIPEMD-160.
def op_hash160
buf = pop_string
@stack << Digest::RMD160.digest(Digest::SHA256.digest(buf))
end
# The input is hashed using RIPEMD-160.
def op_ripemd160
buf = pop_string
@stack << Digest::RMD160.digest(buf)
end
# The input is hashed two times with SHA-256.
def op_hash256
buf = pop_string
@stack << Digest::SHA256.digest(Digest::SHA256.digest(buf))
end
# Puts the input onto the top of the alt stack. Removes it from the main stack.
def op_toaltstack
@stack_alt << @stack.pop
end
# Puts the input onto the top of the main stack. Removes it from the alt stack.
def op_fromaltstack
@stack << @stack_alt.pop
end
# The item at the top of the stack is copied and inserted before the second-to-top item.
def op_tuck
@stack[-2..-1] = [ @stack[-1], *@stack[-2..-1] ]
end
# The top two items on the stack are swapped.
def op_swap
@stack[-2..-1] = @stack[-2..-1].reverse if @stack[-2]
end
# If both a and b are not 0, the output is 1. Otherwise 0.
def op_booland
a, b = pop_int(2)
@stack << (![a,b].any?{|n| n == 0 } ? 1 : 0)
end
# If a or b is not 0, the output is 1. Otherwise 0.
def op_boolor
a, b = pop_int(2)
@stack << ( (a != 0 || b != 0) ? 1 : 0 )
end
# a is added to b.
def op_add
a, b = pop_int(2)
@stack << a + b
end
# b is subtracted from a.
def op_sub
a, b = pop_int(2)
@stack << a - b
end
# Returns 1 if a is less than b, 0 otherwise.
def op_lessthan
a, b = pop_int(2)
@stack << (a < b ? 1 : 0)
end
# Returns 1 if a is less than or equal to b, 0 otherwise.
def op_lessthanorequal
a, b = pop_int(2)
@stack << (a <= b ? 1 : 0)
end
# Returns 1 if a is greater than b, 0 otherwise.
def op_greaterthan
a, b = pop_int(2)
@stack << (a > b ? 1 : 0)
end
# Returns 1 if a is greater than or equal to b, 0 otherwise.
def op_greaterthanorequal
a, b = pop_int(2)
@stack << (a >= b ? 1 : 0)
end
# If the input is 0 or 1, it is flipped. Otherwise the output will be 0.
def op_not
a = pop_int
@stack << (a == 0 ? 1 : 0)
end
def op_0notequal
a = pop_int
@stack << (a != 0 ? 1 : 0)
end
# The input is made positive.
def op_abs
a = pop_int
@stack << a.abs
end
# The input is divided by 2. Currently disabled.
def op_2div
a = pop_int
@stack << (a >> 1)
end
# The input is multiplied by 2. Currently disabled.
def op_2mul
a = pop_int
@stack << (a << 1)
end
# 1 is added to the input.
def op_1add
a = pop_int
@stack << (a + 1)
end
# 1 is subtracted from the input.
def op_1sub
a = pop_int
@stack << (a - 1)
end
# The sign of the input is flipped.
def op_negate
a = pop_int
@stack << -a
end
# Removes the top stack item.
def op_drop
@stack.pop
end
# Returns 1 if the inputs are exactly equal, 0 otherwise.
def op_equal
#a, b = @stack.pop(2)
a, b = pop_int(2)
@stack << (a == b ? 1 : 0)
end
# Marks transaction as invalid if top stack value is not true. True is removed, but false is not.
def op_verify
res = pop_int
if res == 0
@stack << res
@script_invalid = true # raise 'transaction invalid' ?
else
@script_invalid = false
end
end
# Same as OP_EQUAL, but runs OP_VERIFY afterward.
def op_equalverify
op_equal; op_verify
end
# An empty array of bytes is pushed onto the stack.
def op_0
@stack << "" # []
end
# The number 1 is pushed onto the stack. Same as OP_TRUE
def op_1
@stack << 1
end
# Returns the smaller of a and b.
def op_min
@stack << pop_int(2).min
end
# Returns the larger of a and b.
def op_max
@stack << pop_int(2).max
end
# Copies the pair of items two spaces back in the stack to the front.
def op_2over
@stack << @stack[-4]
@stack << @stack[-4]
end
# Swaps the top two pairs of items.
def op_2swap
p1 = @stack.pop(2)
p2 = @stack.pop(2)
@stack += p1 += p2
end
# If the input is true, duplicate it.
def op_ifdup
if cast_to_bignum(@stack.last) != 0
@stack << @stack.last
end
end
# The number -1 is pushed onto the stack.
def op_1negate
@stack << -1
end
# Puts the number of stack items onto the stack.
def op_depth
@stack << @stack.size
end
# Returns 1 if x is within the specified range (left-inclusive), 0 otherwise.
def op_within
bn1, bn2, bn3 = pop_int(3)
@stack << ( (bn2 <= bn1 && bn1 < bn3) ? 1 : 0 )
end
# Returns 1 if the numbers are equal, 0 otherwise.
def op_numequal
a, b = pop_int(2)
@stack << (a == b ? 1 : 0)
end
# Returns 1 if the numbers are not equal, 0 otherwise.
def op_numnotequal
a, b = pop_int(2)
@stack << (a != b ? 1 : 0)
end
# Marks transaction as invalid.
def op_return
@script_invalid = true; nil
end
# Copies the second-to-top stack item to the top.
def op_over
item = @stack[-2]
@stack << item if item
end
# If the top stack value is not 0, the statements are executed. The top stack value is removed.
def op_if
value = false
if @do_exec
return if @stack.size < 1
value = pop_int == 1 ? true : false
end
@exec_stack << value
end
# If the top stack value is 0, the statements are executed. The top stack value is removed.
def op_notif
value = false
if @do_exec
return if @stack.size < 1
value = pop_int == 1 ? false : true
end
@exec_stack << value
end
# If the preceding OP_IF or OP_NOTIF or OP_ELSE was not executed then these statements are and if the preceding OP_IF or OP_NOTIF or OP_ELSE was executed then these statements are not.
def op_else
return if @exec_stack.empty?
@exec_stack[-1] = !@exec_stack[-1]
end
# Ends an if/else block.
def op_endif
return if @exec_stack.empty?
@exec_stack.pop
end
# The item n back in the stack is copied to the top.
def op_pick
pos = pop_int
item = @stack[-(pos+1)]
@stack << item if item
end
# The item n back in the stack is moved to the top.
def op_roll
pos = pop_int
idx = -(pos+1)
item = @stack[idx]
if item
@stack.delete_at(idx)
@stack << item if item
end
end
# The top three items on the stack are rotated to the left.
def op_rot
return if @stack.size < 3
@stack[-3..-1] = [ @stack[-2], @stack[-1], @stack[-3] ]
end
# Removes the top two stack items.
def op_2drop
@stack.pop(2)
end
# Duplicates the top two stack items.
def op_2dup
@stack.push(*@stack[-2..-1])
end
# Duplicates the top three stack items.
def op_3dup
@stack.push(*@stack[-3..-1])
end
# Removes the second-to-top stack item.
def op_nip
@stack.delete_at(-2)
end
# Returns the length of the input string.
def op_size
item = @stack[-1]
size = case item
when String; item.bytesize
when Numeric; OpenSSL::BN.new(item.to_s).to_mpi.size - 4
end
@stack << size
end
# Transaction is invalid unless occuring in an unexecuted OP_IF branch
def op_ver
invalid if @do_exec
end
def pop_int(count=nil)
return cast_to_bignum(@stack.pop) unless count
@stack.pop(count).map{|i| cast_to_bignum(i) }
end
def pop_string(count=nil)
return cast_to_string(@stack.pop) unless count
@stack.pop(count).map{|i| cast_to_string(i) }
end
def cast_to_bignum(buf)
return (invalid; 0) unless buf
case buf
when Numeric; buf
when String; OpenSSL::BN.new([buf.bytesize].pack("N") + buf.reverse, 0).to_i
else; raise TypeError, 'cast_to_bignum: failed to cast: %s (%s)' % [buf, buf.class]
end
end
def cast_to_string(buf)
return (invalid; "") unless buf
case buf
when Numeric; OpenSSL::BN.new(buf.to_s).to_s(0)[4..-1]
when String; buf;
else; raise TypeError, 'cast_to_string: failed to cast: %s (%s)' % [buf, buf.class]
end
end
# Same as OP_NUMEQUAL, but runs OP_VERIFY afterward.
def op_numequalverify
op_numequal; op_verify
end
# All of the signature checking words will only match signatures
# to the data after the most recently-executed OP_CODESEPARATOR.
def op_codeseparator
@codehash_start = @chunks.size - @chunks.reverse.index(OP_CODESEPARATOR)
@last_codeseparator_index = @chunk_last_index
end
def codehash_script(opcode)
# CScript scriptCode(pbegincodehash, pend);
script = to_string(@chunks[(@codehash_start||0)...@chunks.size-@chunks.reverse.index(opcode)])
checkhash = Bitcoin.hash160(Bitcoin::Script.binary_from_string(script).unpack("H*")[0])
[script, checkhash]
end
# do a CHECKSIG operation on the current stack,
# asking +check_callback+ to do the actual signature verification.
# This is used by Protocol::Tx#verify_input_signature
def op_checksig(check_callback)
return invalid if @stack.size < 2
pubkey = cast_to_string(@stack.pop)
#return (@stack << 0) unless Bitcoin::Script.is_canonical_pubkey?(pubkey) # only for isStandard
drop_sigs = [ cast_to_string(@stack[-1]) ]
signature = cast_to_string(@stack.pop)
#return (@stack << 0) unless Bitcoin::Script.is_canonical_signature?(signature) # only for isStandard
return (@stack << 0) if signature == ""
sig, hash_type = parse_sig(signature)
subscript = sighash_subscript(drop_sigs)
if check_callback == nil # for tests
@stack << 1
else # real signature check callback
@stack <<
((check_callback.call(pubkey, sig, hash_type, subscript) == true) ? 1 : 0)
end
end
def sighash_subscript(drop_sigs)
if inner_p2sh? && @inner_script_code
::Bitcoin::Script.new(@inner_script_code).to_binary_without_signatures(drop_sigs)
else
to_binary_without_signatures(drop_sigs)
end
end
# Same as OP_CHECKSIG, but OP_VERIFY is executed afterward.
def op_checksigverify(check_callback)
op_checksig(check_callback)
op_verify
end
# do a CHECKMULTISIG operation on the current stack,
# asking +check_callback+ to do the actual signature verification.
#
# CHECKMULTISIG does a m-of-n signatures verification on scripts of the form:
# 0 <sig1> <sig2> | 2 <pub1> <pub2> 2 OP_CHECKMULTISIG
# 0 <sig1> <sig2> | 2 <pub1> <pub2> <pub3> 3 OP_CHECKMULTISIG
# 0 <sig1> <sig2> <sig3> | 3 <pub1> <pub2> <pub3> 3 OP_CHECKMULTISIG
#
# see https://en.bitcoin.it/wiki/BIP_0011 for details.
# see https://github.com/bitcoin/bitcoin/blob/master/src/script.cpp#L931
#
# TODO: validate signature order
# TODO: take global opcode count
def op_checkmultisig(check_callback)
return invalid if @stack.size < 1
n_pubkeys = pop_int
return invalid unless (0..20).include?(n_pubkeys)
#return invalid if (nOpCount += n_pubkeys) > 201
return invalid if @stack.size < n_pubkeys
pubkeys = pop_string(n_pubkeys)
return invalid if @stack.size < 1
n_sigs = pop_int
return invalid if n_sigs < 0 || n_sigs > n_pubkeys
return invalid if @stack.size < n_sigs
sigs = pop_string(n_sigs)
drop_sigs = sigs.dup
# Bitcoin-core removes an extra item from the stack
@stack.pop
subscript = sighash_subscript(drop_sigs)
success = true
while success && n_sigs > 0
sig, pub = sigs.pop, pubkeys.pop
unless sig && sig.size > 0
success = false
break
end
signature, hash_type = parse_sig(sig)
if pub.size > 0 && check_callback.call(pub, signature, hash_type, subscript)
n_sigs -= 1
else
sigs << sig
end
n_pubkeys -= 1
success = false if n_sigs > n_pubkeys
end
@stack << (success ? 1 : 0)
end
# Same as OP_CHECKMULTISIG, but OP_VERIFY is executed afterward.
def op_checkmultisigverify(check_callback)
op_checkmultisig(check_callback)
op_verify
end
# op_eval: https://en.bitcoin.it/wiki/BIP_0012
# the BIP was never accepted and must be handled as old OP_NOP1
def op_nop1
end
OPCODES_METHOD = Hash[*instance_methods.grep(/^op_/).map{|m|
[ (OPCODES.find{|k,v| v == m.to_s.upcase }.first rescue nil), m ]
}.flatten]
OPCODES_METHOD[0] = :op_0
OPCODES_METHOD[81] = :op_1
def self.is_canonical_pubkey?(pubkey)
return false if pubkey.bytesize < 33 # "Non-canonical public key: too short"
case pubkey[0]
when "\x04"
return false if pubkey.bytesize != 65 # "Non-canonical public key: invalid length for uncompressed key"
when "\x02", "\x03"
return false if pubkey.bytesize != 33 # "Non-canonical public key: invalid length for compressed key"
else
return false # "Non-canonical public key: compressed nor uncompressed"
end
true
end
SIGHASH_TYPE = { all: 1, none: 2, single: 3, anyonecanpay: 128 }
def self.is_canonical_signature?(sig)
return false if sig.bytesize < 9 # Non-canonical signature: too short
return false if sig.bytesize > 73 # Non-canonical signature: too long
s = sig.unpack("C*")
hash_type = s[-1] & (~(SIGHASH_TYPE[:anyonecanpay]))
return false if hash_type < SIGHASH_TYPE[:all] || hash_type > SIGHASH_TYPE[:single] # Non-canonical signature: unknown hashtype byte
return false if s[0] != 0x30 # Non-canonical signature: wrong type
return false if s[1] != s.size-3 # Non-canonical signature: wrong length marker
# TODO: add/port rest from bitcoind
true
end
private
def parse_sig(sig)
hash_type = sig[-1].unpack("C")[0]
sig = sig[0...-1]
return sig, hash_type
end
end
add support for multisig signers to sign seperately
# encoding: ascii-8bit
require 'bitcoin'
class Bitcoin::Script
OP_0 = 0
OP_FALSE = 0
OP_1 = 81
OP_TRUE = 81
OP_2 = 0x52
OP_3 = 0x53
OP_4 = 0x54
OP_5 = 0x55
OP_6 = 0x56
OP_7 = 0x57
OP_8 = 0x58
OP_9 = 0x59
OP_10 = 0x5a
OP_11 = 0x5b
OP_12 = 0x5c
OP_13 = 0x5d
OP_14 = 0x5e
OP_15 = 0x5f
OP_16 = 0x60
OP_PUSHDATA0 = 0
OP_PUSHDATA1 = 76
OP_PUSHDATA2 = 77
OP_PUSHDATA4 = 78
OP_PUSHDATA_INVALID = 238 # 0xEE
OP_NOP = 97
OP_DUP = 118
OP_HASH160 = 169
OP_EQUAL = 135
OP_VERIFY = 105
OP_EQUALVERIFY = 136
OP_CHECKSIG = 172
OP_CHECKSIGVERIFY = 173
OP_CHECKMULTISIG = 174
OP_CHECKMULTISIGVERIFY = 175
OP_TOALTSTACK = 107
OP_FROMALTSTACK = 108
OP_TUCK = 125
OP_SWAP = 124
OP_BOOLAND = 154
OP_ADD = 147
OP_SUB = 148
OP_GREATERTHANOREQUAL = 162
OP_DROP = 117
OP_HASH256 = 170
OP_SHA256 = 168
OP_SHA1 = 167
OP_RIPEMD160 = 166
OP_NOP1 = 176
OP_NOP2 = 177
OP_NOP3 = 178
OP_NOP4 = 179
OP_NOP5 = 180
OP_NOP6 = 181
OP_NOP7 = 182
OP_NOP8 = 183
OP_NOP9 = 184
OP_NOP10 = 185
OP_CODESEPARATOR = 171
OP_MIN = 163
OP_MAX = 164
OP_2OVER = 112
OP_2SWAP = 114
OP_IFDUP = 115
OP_DEPTH = 116
OP_1NEGATE = 79
OP_WITHIN = 165
OP_NUMEQUAL = 156
OP_NUMEQUALVERIFY = 157
OP_LESSTHAN = 159
OP_LESSTHANOREQUAL = 161
OP_GREATERTHAN = 160
OP_NOT = 145
OP_0NOTEQUAL = 146
OP_ABS = 144
OP_1ADD = 139
OP_1SUB = 140
OP_NEGATE = 143
OP_BOOLOR = 155
OP_NUMNOTEQUAL = 158
OP_RETURN = 106
OP_OVER = 120
OP_IF = 99
OP_NOTIF = 100
OP_ELSE = 103
OP_ENDIF = 104
OP_PICK = 121
OP_SIZE = 130
OP_VER = 98
OP_ROLL = 122
OP_ROT = 123
OP_2DROP = 109
OP_2DUP = 110
OP_3DUP = 111
OP_NIP = 119
OP_CAT = 126
OP_SUBSTR = 127
OP_LEFT = 128
OP_RIGHT = 129
OP_INVERT = 131
OP_AND = 132
OP_OR = 133
OP_XOR = 134
OP_2MUL = 141
OP_2DIV = 142
OP_MUL = 149
OP_DIV = 150
OP_MOD = 151
OP_LSHIFT = 152
OP_RSHIFT = 153
OP_INVALIDOPCODE = 0xff
OPCODES = Hash[*constants.grep(/^OP_/).map{|i| [const_get(i), i.to_s] }.flatten]
OPCODES[0] = "0"
OPCODES[81] = "1"
OPCODES_ALIAS = {
"OP_TRUE" => OP_1,
"OP_FALSE" => OP_0,
"OP_EVAL" => OP_NOP1,
"OP_CHECKHASHVERIFY" => OP_NOP2,
}
DISABLED_OPCODES = [
OP_CAT, OP_SUBSTR, OP_LEFT, OP_RIGHT, OP_INVERT,
OP_AND, OP_OR, OP_XOR, OP_2MUL, OP_2DIV, OP_MUL,
OP_DIV, OP_MOD, OP_LSHIFT, OP_RSHIFT
]
OP_2_16 = (82..96).to_a
OPCODES_PARSE_BINARY = {}
OPCODES.each{|k,v| OPCODES_PARSE_BINARY[k] = v }
OP_2_16.each{|i| OPCODES_PARSE_BINARY[i] = (OP_2_16.index(i)+2).to_s }
OPCODES_PARSE_STRING = {}
OPCODES.each{|k,v| OPCODES_PARSE_STRING[v] = k }
OPCODES_ALIAS.each{|k,v| OPCODES_PARSE_STRING[k] = v }
2.upto(16).each{|i| OPCODES_PARSE_STRING["OP_#{i}"] = OP_2_16[i-2] }
2.upto(16).each{|i| OPCODES_PARSE_STRING["#{i}" ] = OP_2_16[i-2] }
[1,2,4].each{|i| OPCODES_PARSE_STRING.delete("OP_PUSHDATA#{i}") }
SIGHASH_TYPE = { all: 1, none: 2, single: 3, anyonecanpay: 128 }
attr_reader :raw, :chunks, :debug
# create a new script. +bytes+ is typically input_script + output_script
def initialize(input_script, previous_output_script=nil)
@raw = if previous_output_script
input_script + [ Bitcoin::Script::OP_CODESEPARATOR ].pack("C") + previous_output_script
else
input_script
end
@stack, @stack_alt, @exec_stack = [], [], []
@chunks = parse(@raw)
@last_codeseparator_index = 0
@do_exec = true
end
class ::String
attr_accessor :bitcoin_pushdata
attr_accessor :bitcoin_pushdata_length
end
# parse raw script
def parse(bytes, offset=0)
program = bytes.unpack("C*")
chunks = []
until program.empty?
opcode = program.shift
if (opcode > 0) && (opcode < OP_PUSHDATA1)
len, tmp = opcode, program[0]
chunks << program.shift(len).pack("C*")
# 0x16 = 22 due to OP_2_16 from_string parsing
if len == 1 && tmp <= 22
chunks.last.bitcoin_pushdata = OP_PUSHDATA0
chunks.last.bitcoin_pushdata_length = len
else
raise "invalid OP_PUSHDATA0" if len != chunks.last.bytesize
end
elsif (opcode == OP_PUSHDATA1)
len = program.shift(1)[0]
chunks << program.shift(len).pack("C*")
unless len > OP_PUSHDATA1 && len <= 0xff
chunks.last.bitcoin_pushdata = OP_PUSHDATA1
chunks.last.bitcoin_pushdata_length = len
else
raise "invalid OP_PUSHDATA1" if len != chunks.last.bytesize
end
elsif (opcode == OP_PUSHDATA2)
len = program.shift(2).pack("C*").unpack("v")[0]
chunks << program.shift(len).pack("C*")
unless len > 0xff && len <= 0xffff
chunks.last.bitcoin_pushdata = OP_PUSHDATA2
chunks.last.bitcoin_pushdata_length = len
else
raise "invalid OP_PUSHDATA2" if len != chunks.last.bytesize
end
elsif (opcode == OP_PUSHDATA4)
len = program.shift(4).pack("C*").unpack("V")[0]
chunks << program.shift(len).pack("C*")
unless len > 0xffff # && len <= 0xffffffff
chunks.last.bitcoin_pushdata = OP_PUSHDATA4
chunks.last.bitcoin_pushdata_length = len
else
raise "invalid OP_PUSHDATA4" if len != chunks.last.bytesize
end
else
chunks << opcode
end
end
chunks
rescue Exception => ex
# bail out! #run returns false but serialization roundtrips still create the right payload.
@parse_invalid = true
c = bytes.unpack("C*").pack("C*")
c.bitcoin_pushdata = OP_PUSHDATA_INVALID
c.bitcoin_pushdata_length = c.bytesize
chunks = [ c ]
end
# string representation of the script
def to_string(chunks=nil)
string = ""
(chunks || @chunks).each.with_index{|i,idx|
string << " " unless idx == 0
string << case i
when Fixnum
if opcode = OPCODES_PARSE_BINARY[i]
opcode
else
"(opcode-#{i})"
end
when String
if i.bitcoin_pushdata
"#{i.bitcoin_pushdata}:#{i.bitcoin_pushdata_length}:".force_encoding('binary') + i.unpack("H*")[0]
else
i.unpack("H*")[0]
end
end
}
string
end
def to_binary(chunks=nil)
(chunks || @chunks).map{|chunk|
case chunk
when Fixnum; [chunk].pack("C*")
when String; self.class.pack_pushdata(chunk)
end
}.join
end
alias :to_payload :to_binary
def to_binary_without_signatures(drop_signatures, chunks=nil)
buf = []
(chunks || @chunks).each.with_index{|chunk,idx|
if chunk == OP_CODESEPARATOR and idx <= @last_codeseparator_index
buf.clear
elsif chunk == OP_CODESEPARATOR
# skip
elsif drop_signatures.none?{|e| e == chunk }
buf << chunk
end
}
to_binary(buf)
end
# Adds opcode (OP_0, OP_1, ... OP_CHECKSIG etc.)
# Returns self.
def append_opcode(opcode)
raise "Opcode should be a Fixnum" if !opcode.is_a?(Fixnum)
if opcode >= OP_0 && opcode <= 0xff
@chunks << opcode
else
raise "Opcode should be within [0x00, 0xff]"
end
self
end
# Adds binary string as pushdata. Pushdata will be encoded in the most compact form
# (unless the string contains internal info about serialization that's added by Script class)
# Returns self.
def append_pushdata(pushdata_string)
raise "Pushdata should be a string" if !pushdata_string.is_a?(String)
@chunks << pushdata_string
self
end
def self.pack_pushdata(data)
size = data.bytesize
if data.bitcoin_pushdata
size = data.bitcoin_pushdata_length
pack_pushdata_align(data.bitcoin_pushdata, size, data)
else
head = if size < OP_PUSHDATA1
[size].pack("C")
elsif size <= 0xff
[OP_PUSHDATA1, size].pack("CC")
elsif size <= 0xffff
[OP_PUSHDATA2, size].pack("Cv")
#elsif size <= 0xffffffff
else
[OP_PUSHDATA4, size].pack("CV")
end
head + data
end
end
def self.pack_pushdata_align(pushdata, len, data)
case pushdata
when OP_PUSHDATA1
[OP_PUSHDATA1, len].pack("CC") + data
when OP_PUSHDATA2
[OP_PUSHDATA2, len].pack("Cv") + data
when OP_PUSHDATA4
[OP_PUSHDATA4, len].pack("CV") + data
when OP_PUSHDATA_INVALID
data
else # OP_PUSHDATA0
[len].pack("C") + data
end
end
# script object of a string representation
def self.from_string(script_string)
new(binary_from_string(script_string))
end
class ScriptOpcodeError < StandardError; end
# raw script binary of a string representation
def self.binary_from_string(script_string)
buf = ""
script_string.split(" ").each{|i|
i = if opcode = OPCODES_PARSE_STRING[i]
opcode
else
case i
when /OP_PUSHDATA/ # skip
when /OP_(.+)$/; raise ScriptOpcodeError, "#{i} not defined!"
when /\(opcode\-(\d+)\)/; $1.to_i
when "(opcode"; # skip # fix invalid opcode parsing
when /^(\d+)\)/; $1.to_i # fix invalid opcode parsing
when /(\d+):(\d+):(.+)?/
pushdata, len, data = $1.to_i, $2.to_i, $3
pack_pushdata_align(pushdata, len, [data].pack("H*"))
else
data = [i].pack("H*")
pack_pushdata(data)
end
end
buf << if i.is_a?(Fixnum)
i < 256 ? [i].pack("C") : [OpenSSL::BN.new(i.to_s,10).to_hex].pack("H*")
else
i
end if i
}
buf
end
def invalid?
@script_invalid ||= false
end
# run the script. +check_callback+ is called for OP_CHECKSIG operations
def run(block_timestamp=Time.now.to_i, &check_callback)
return false if @parse_invalid
#p [to_string, block_timestamp, is_p2sh?]
@script_invalid = true if @raw.bytesize > 10_000
@last_codeseparator_index = 0
if block_timestamp >= 1333238400 # Pay to Script Hash (BIP 0016)
return pay_to_script_hash(check_callback) if is_p2sh?
end
@debug = []
@chunks.each.with_index{|chunk,idx|
break if invalid?
@chunk_last_index = idx
@debug << @stack.map{|i| i.unpack("H*") rescue i}
@do_exec = @exec_stack.count(false) == 0 ? true : false
#p [@stack, @do_exec]
case chunk
when Fixnum
if DISABLED_OPCODES.include?(chunk)
@script_invalid = true
@debug << "DISABLED_#{OPCODES[chunk]}"
break
end
next @debug.pop unless (@do_exec || (OP_IF <= chunk && chunk <= OP_ENDIF))
case chunk
when *OPCODES_METHOD.keys
m = method( n=OPCODES_METHOD[chunk] )
@debug << n.to_s.upcase
(m.arity == 1) ? m.call(check_callback) : m.call # invoke opcode method
when *OP_2_16
@stack << OP_2_16.index(chunk) + 2
@debug << "OP_#{chunk-80}"
else
name = OPCODES[chunk] || chunk
puts "Bitcoin::Script: opcode #{name} unkown or not implemented\n#{to_string.inspect}"
raise "opcode #{name} unkown or not implemented"
end
when String
if @do_exec
@debug << "PUSH DATA #{chunk.unpack("H*")[0]}"
@stack << chunk
else
@debug.pop
end
end
}
@debug << @stack.map{|i| i.unpack("H*") rescue i } #if @do_exec
if @script_invalid
@stack << 0
@debug << "INVALID TRANSACTION"
end
@debug << "RESULT"
return false if @stack.empty?
return false if [0, ''].include?(@stack.pop)
true
end
def invalid
@script_invalid = true; nil
end
def self.drop_signatures(script_pubkey, drop_signatures)
script = new(script_pubkey).to_string.split(" ").delete_if{|c| drop_signatures.include?(c) }.join(" ")
script_pubkey = binary_from_string(script)
end
# pay_to_script_hash: https://en.bitcoin.it/wiki/BIP_0016
#
# <sig> {<pub> OP_CHECKSIG} | OP_HASH160 <script_hash> OP_EQUAL
def pay_to_script_hash(check_callback)
return false if @chunks.size < 4
*rest, script, _, script_hash, _ = @chunks
script = rest.pop if script == OP_CODESEPARATOR
script, script_hash = cast_to_string(script), cast_to_string(script_hash)
return false unless Bitcoin.hash160(script.unpack("H*")[0]) == script_hash.unpack("H*")[0]
script = self.class.new(to_binary(rest) + script).inner_p2sh!(script)
result = script.run(&check_callback)
@debug = script.debug
result
end
def inner_p2sh!(script=nil); @inner_p2sh = true; @inner_script_code = script; self; end
def inner_p2sh?; @inner_p2sh; end
# get the inner p2sh script
def inner_p2sh_script
return nil if @chunks.size < 4
*rest, script, _, script_hash, _ = @chunks
script = rest.pop if script == OP_CODESEPARATOR
script, script_hash = cast_to_string(script), cast_to_string(script_hash)
return nil unless Bitcoin.hash160(script.unpack("H*")[0]) == script_hash.unpack("H*")[0]
script
end
def is_pay_to_script_hash?
return false if @inner_p2sh
return false unless @chunks[-2].is_a?(String)
@chunks.size >= 3 && @chunks[-3] == OP_HASH160 &&
@chunks[-2].bytesize == 20 && @chunks[-1] == OP_EQUAL
end
alias :is_p2sh? :is_pay_to_script_hash?
# check if script is in one of the recognized standard formats
def is_standard?
is_pubkey? || is_hash160? || is_multisig? || is_p2sh? || is_op_return?
end
# is this a pubkey script
def is_pubkey?
return false if @chunks.size != 2
(@chunks[1] == OP_CHECKSIG) && @chunks[0] && (@chunks[0].is_a?(String)) && @chunks[0] != OP_RETURN
end
alias :is_send_to_ip? :is_pubkey?
# is this a hash160 (address) script
def is_hash160?
return false if @chunks.size != 5
(@chunks[0..1] + @chunks[-2..-1]) ==
[OP_DUP, OP_HASH160, OP_EQUALVERIFY, OP_CHECKSIG] &&
@chunks[2].is_a?(String) && @chunks[2].bytesize == 20
end
# is this a multisig script
def is_multisig?
return false if @chunks.size < 4 || !@chunks[-2].is_a?(Fixnum)
@chunks[-1] == OP_CHECKMULTISIG and get_multisig_pubkeys.all?{|c| c.is_a?(String) }
end
# is this an op_return script
def is_op_return?
@chunks[0] == OP_RETURN && @chunks.size <= 2
end
# get type of this tx
def type
if is_hash160?; :hash160
elsif is_pubkey?; :pubkey
elsif is_multisig?; :multisig
elsif is_p2sh?; :p2sh
elsif is_op_return?;:op_return
else; :unknown
end
end
# get the public key for this pubkey script
def get_pubkey
return @chunks[0].unpack("H*")[0] if @chunks.size == 1
is_pubkey? ? @chunks[0].unpack("H*")[0] : nil
end
# get the pubkey address for this pubkey script
def get_pubkey_address
Bitcoin.pubkey_to_address(get_pubkey)
end
# get the hash160 for this hash160 or pubkey script
def get_hash160
return @chunks[2..-3][0].unpack("H*")[0] if is_hash160?
return @chunks[-2].unpack("H*")[0] if is_p2sh?
return Bitcoin.hash160(get_pubkey) if is_pubkey?
end
# get the hash160 address for this hash160 script
def get_hash160_address
Bitcoin.hash160_to_address(get_hash160)
end
# get the public keys for this multisig script
def get_multisig_pubkeys
1.upto(@chunks[-2] - 80).map{|i| @chunks[i] }
end
# get the pubkey addresses for this multisig script
def get_multisig_addresses
get_multisig_pubkeys.map{|pub|
begin
Bitcoin::Key.new(nil, pub.unpack("H*")[0]).addr
rescue OpenSSL::PKey::ECError, OpenSSL::PKey::EC::Point::Error
end
}.compact
end
def get_p2sh_address
Bitcoin.hash160_to_p2sh_address(get_hash160)
end
# get the data possibly included in an OP_RETURN script
def get_op_return_data
return nil unless is_op_return?
cast_to_string(@chunks[1]).unpack("H*")[0] if @chunks[1]
end
# get all addresses this script corresponds to (if possible)
def get_addresses
return [get_pubkey_address] if is_pubkey?
return [get_hash160_address] if is_hash160?
return get_multisig_addresses if is_multisig?
return [get_p2sh_address] if is_p2sh?
[]
end
# get single address, or first for multisig script
def get_address
addrs = get_addresses
addrs.is_a?(Array) ? addrs[0] : addrs
end
# generate pubkey tx script for given +pubkey+. returns a raw binary script of the form:
# <pubkey> OP_CHECKSIG
def self.to_pubkey_script(pubkey)
pk = [pubkey].pack("H*")
[[pk.bytesize].pack("C"), pk, "\xAC"].join
end
# generate hash160 tx for given +address+. returns a raw binary script of the form:
# OP_DUP OP_HASH160 <hash160> OP_EQUALVERIFY OP_CHECKSIG
def self.to_hash160_script(hash160)
return nil unless hash160
# DUP HASH160 length hash160 EQUALVERIFY CHECKSIG
[ ["76", "a9", "14", hash160, "88", "ac"].join ].pack("H*")
end
# generate p2sh output script for given +p2sh+ hash160. returns a raw binary script of the form:
# OP_HASH160 <p2sh> OP_EQUAL
def self.to_p2sh_script(p2sh)
return nil unless p2sh
# HASH160 length hash EQUAL
[ ["a9", "14", p2sh, "87"].join ].pack("H*")
end
# generate hash160 or p2sh output script, depending on the type of the given +address+.
# see #to_hash160_script and #to_p2sh_script.
def self.to_address_script(address)
hash160 = Bitcoin.hash160_from_address(address)
case Bitcoin.address_type(address)
when :hash160; to_hash160_script(hash160)
when :p2sh; to_p2sh_script(hash160)
end
end
# generate multisig output script for given +pubkeys+, expecting +m+ signatures.
# returns a raw binary script of the form:
# <m> <pubkey> [<pubkey> ...] <n_pubkeys> OP_CHECKMULTISIG
def self.to_multisig_script(m, *pubkeys)
pubs = pubkeys.map{|pk|p=[pk].pack("H*"); [p.bytesize].pack("C") + p}
[ [80 + m.to_i].pack("C"), *pubs, [80 + pubs.size].pack("C"), "\xAE"].join
end
# generate OP_RETURN output script with given data. returns a raw binary script of the form:
# OP_RETURN <data>
def self.to_op_return_script(data = nil)
return "\x6A" unless data
data = [data].pack("H*")
["\x6A", [data.bytesize].pack("C"), data].join
end
# generate input script sig spending a pubkey output with given +signature+ and +pubkey+.
# returns a raw binary script sig of the form:
# <signature> [<pubkey>]
def self.to_pubkey_script_sig(signature, pubkey)
hash_type = "\x01"
#pubkey = [pubkey].pack("H*") if pubkey.bytesize != 65
return [ [signature.bytesize+1].pack("C"), signature, hash_type ].join unless pubkey
case pubkey[0]
when "\x04"
expected_size = 65
when "\x02", "\x03"
expected_size = 33
end
if !expected_size || pubkey.bytesize != expected_size
raise "pubkey is not in binary form"
end
[ [signature.bytesize+1].pack("C"), signature, hash_type, [pubkey.bytesize].pack("C"), pubkey ].join
end
# generate p2sh multisig output script for given +args+.
# returns the p2sh output script, and the redeem script needed to spend it.
# see #to_multisig_script for the redeem script, and #to_p2sh_script for the p2sh script.
def self.to_p2sh_multisig_script(*args)
redeem_script = to_multisig_script(*args)
p2sh_script = to_p2sh_script(Bitcoin.hash160(redeem_script.hth))
return p2sh_script, redeem_script
end
# alias for #to_pubkey_script_sig
def self.to_signature_pubkey_script(*a)
to_pubkey_script_sig(*a)
end
# generate input script sig spending a multisig output script.
# returns a raw binary script sig of the form:
# OP_0 <sig> [<sig> ...]
def self.to_multisig_script_sig(*sigs)
sigs.map!{|s| s + "\x01" }
from_string("0 #{sigs.map{|s|s.unpack('H*')[0]}.join(' ')}").raw
end
# take a multisig script sig (or p2sh multisig script sig) and add
# another signature to it after the OP_0. Used to sign a tx by
# multiple parties. Signatures must be in the same order as the
# pubkeys in the output script being redeemed.
def self.add_sig_to_multisig_script_sig(sig, script_sig)
sig += [SIGHASH_TYPE[:all]].pack("C*")
sig_len = [sig.bytesize].pack("C*")
script_sig.insert(1, sig_len + sig)
end
# generate input script sig spending a p2sh-multisig output script.
# returns a raw binary script sig of the form:
# OP_0 <sig> [<sig> ...] <redeem_script>
def self.to_p2sh_multisig_script_sig(redeem_script, *sigs)
partial_script = [OP_0].pack("C*")
sigs[0].reverse_each { |sig| partial_script = add_sig_to_multisig_script_sig(sig, partial_script) }
push = [OP_PUSHDATA1].pack("C*")
script_len = [redeem_script.bytesize].pack("C*")
full_script_sig = partial_script + push + script_len + redeem_script
end
def get_signatures_required
return false unless is_multisig?
@chunks[0] - 80
end
# This matches CScript::GetSigOpCount(bool fAccurate)
# Note: this does not cover P2SH script which is to be unserialized
# and checked explicitly when validating blocks.
def sigops_count_accurate(is_accurate)
count = 0
last_opcode = nil
@chunks.each do |chunk| # pushdate or opcode
if chunk == OP_CHECKSIG || chunk == OP_CHECKSIGVERIFY
count += 1
elsif chunk == OP_CHECKMULTISIG || chunk == OP_CHECKMULTISIGVERIFY
# Accurate mode counts exact number of pubkeys required (not signatures, but pubkeys!). Only used in P2SH scripts.
# Inaccurate mode counts every multisig as 20 signatures.
if is_accurate && last_opcode && last_opcode.is_a?(Fixnum) && last_opcode >= OP_1 && last_opcode <= OP_16
count += ::Bitcoin::Script.decode_OP_N(last_opcode)
else
count += 20
end
end
last_opcode = chunk
end
count
end
# This method applies to script_sig that is an input for p2sh output.
# Bitcoind has somewhat special way to return count for invalid input scripts:
# it returns 0 when the opcode can't be parsed or when it's over OP_16.
# Also, if the OP_{N} is used anywhere it's treated as 0-length data.
# See CScript::GetSigOpCount(const CScript& scriptSig) in bitcoind.
def sigops_count_for_p2sh
# This is a pay-to-script-hash scriptPubKey;
# get the last item that the scriptSig
# pushes onto the stack:
return 0 if @chunks.size == 0
data = nil
@chunks.each do |chunk|
case chunk
when Fixnum
data = ""
return 0 if chunk > OP_16
when String
data = chunk
end
end
return 0 if data == ""
::Bitcoin::Script.new(data).sigops_count_accurate(true)
end
# Converts OP_{0,1,2,...,16} into 0, 1, 2, ..., 16.
# Returns nil for other opcodes.
def self.decode_OP_N(opcode)
if opcode == OP_0
return 0
end
if opcode.is_a?(Fixnum) && opcode >= OP_1 && opcode <= OP_16
return opcode - (OP_1 - 1);
else
nil
end
end
## OPCODES
# Does nothing
def op_nop; end
def op_nop1; end
def op_nop2; end
def op_nop3; end
def op_nop4; end
def op_nop5; end
def op_nop6; end
def op_nop7; end
def op_nop8; end
def op_nop9; end
def op_nop10; end
# Duplicates the top stack item.
def op_dup
@stack << (@stack[-1].dup rescue @stack[-1])
end
# The input is hashed using SHA-256.
def op_sha256
buf = pop_string
@stack << Digest::SHA256.digest(buf)
end
# The input is hashed using SHA-1.
def op_sha1
buf = pop_string
@stack << Digest::SHA1.digest(buf)
end
# The input is hashed twice: first with SHA-256 and then with RIPEMD-160.
def op_hash160
buf = pop_string
@stack << Digest::RMD160.digest(Digest::SHA256.digest(buf))
end
# The input is hashed using RIPEMD-160.
def op_ripemd160
buf = pop_string
@stack << Digest::RMD160.digest(buf)
end
# The input is hashed two times with SHA-256.
def op_hash256
buf = pop_string
@stack << Digest::SHA256.digest(Digest::SHA256.digest(buf))
end
# Puts the input onto the top of the alt stack. Removes it from the main stack.
def op_toaltstack
@stack_alt << @stack.pop
end
# Puts the input onto the top of the main stack. Removes it from the alt stack.
def op_fromaltstack
@stack << @stack_alt.pop
end
# The item at the top of the stack is copied and inserted before the second-to-top item.
def op_tuck
@stack[-2..-1] = [ @stack[-1], *@stack[-2..-1] ]
end
# The top two items on the stack are swapped.
def op_swap
@stack[-2..-1] = @stack[-2..-1].reverse if @stack[-2]
end
# If both a and b are not 0, the output is 1. Otherwise 0.
def op_booland
a, b = pop_int(2)
@stack << (![a,b].any?{|n| n == 0 } ? 1 : 0)
end
# If a or b is not 0, the output is 1. Otherwise 0.
def op_boolor
a, b = pop_int(2)
@stack << ( (a != 0 || b != 0) ? 1 : 0 )
end
# a is added to b.
def op_add
a, b = pop_int(2)
@stack << a + b
end
# b is subtracted from a.
def op_sub
a, b = pop_int(2)
@stack << a - b
end
# Returns 1 if a is less than b, 0 otherwise.
def op_lessthan
a, b = pop_int(2)
@stack << (a < b ? 1 : 0)
end
# Returns 1 if a is less than or equal to b, 0 otherwise.
def op_lessthanorequal
a, b = pop_int(2)
@stack << (a <= b ? 1 : 0)
end
# Returns 1 if a is greater than b, 0 otherwise.
def op_greaterthan
a, b = pop_int(2)
@stack << (a > b ? 1 : 0)
end
# Returns 1 if a is greater than or equal to b, 0 otherwise.
def op_greaterthanorequal
a, b = pop_int(2)
@stack << (a >= b ? 1 : 0)
end
# If the input is 0 or 1, it is flipped. Otherwise the output will be 0.
def op_not
a = pop_int
@stack << (a == 0 ? 1 : 0)
end
def op_0notequal
a = pop_int
@stack << (a != 0 ? 1 : 0)
end
# The input is made positive.
def op_abs
a = pop_int
@stack << a.abs
end
# The input is divided by 2. Currently disabled.
def op_2div
a = pop_int
@stack << (a >> 1)
end
# The input is multiplied by 2. Currently disabled.
def op_2mul
a = pop_int
@stack << (a << 1)
end
# 1 is added to the input.
def op_1add
a = pop_int
@stack << (a + 1)
end
# 1 is subtracted from the input.
def op_1sub
a = pop_int
@stack << (a - 1)
end
# The sign of the input is flipped.
def op_negate
a = pop_int
@stack << -a
end
# Removes the top stack item.
def op_drop
@stack.pop
end
# Returns 1 if the inputs are exactly equal, 0 otherwise.
def op_equal
#a, b = @stack.pop(2)
a, b = pop_int(2)
@stack << (a == b ? 1 : 0)
end
# Marks transaction as invalid if top stack value is not true. True is removed, but false is not.
def op_verify
res = pop_int
if res == 0
@stack << res
@script_invalid = true # raise 'transaction invalid' ?
else
@script_invalid = false
end
end
# Same as OP_EQUAL, but runs OP_VERIFY afterward.
def op_equalverify
op_equal; op_verify
end
# An empty array of bytes is pushed onto the stack.
def op_0
@stack << "" # []
end
# The number 1 is pushed onto the stack. Same as OP_TRUE
def op_1
@stack << 1
end
# Returns the smaller of a and b.
def op_min
@stack << pop_int(2).min
end
# Returns the larger of a and b.
def op_max
@stack << pop_int(2).max
end
# Copies the pair of items two spaces back in the stack to the front.
def op_2over
@stack << @stack[-4]
@stack << @stack[-4]
end
# Swaps the top two pairs of items.
def op_2swap
p1 = @stack.pop(2)
p2 = @stack.pop(2)
@stack += p1 += p2
end
# If the input is true, duplicate it.
def op_ifdup
if cast_to_bignum(@stack.last) != 0
@stack << @stack.last
end
end
# The number -1 is pushed onto the stack.
def op_1negate
@stack << -1
end
# Puts the number of stack items onto the stack.
def op_depth
@stack << @stack.size
end
# Returns 1 if x is within the specified range (left-inclusive), 0 otherwise.
def op_within
bn1, bn2, bn3 = pop_int(3)
@stack << ( (bn2 <= bn1 && bn1 < bn3) ? 1 : 0 )
end
# Returns 1 if the numbers are equal, 0 otherwise.
def op_numequal
a, b = pop_int(2)
@stack << (a == b ? 1 : 0)
end
# Returns 1 if the numbers are not equal, 0 otherwise.
def op_numnotequal
a, b = pop_int(2)
@stack << (a != b ? 1 : 0)
end
# Marks transaction as invalid.
def op_return
@script_invalid = true; nil
end
# Copies the second-to-top stack item to the top.
def op_over
item = @stack[-2]
@stack << item if item
end
# If the top stack value is not 0, the statements are executed. The top stack value is removed.
def op_if
value = false
if @do_exec
return if @stack.size < 1
value = pop_int == 1 ? true : false
end
@exec_stack << value
end
# If the top stack value is 0, the statements are executed. The top stack value is removed.
def op_notif
value = false
if @do_exec
return if @stack.size < 1
value = pop_int == 1 ? false : true
end
@exec_stack << value
end
# If the preceding OP_IF or OP_NOTIF or OP_ELSE was not executed then these statements are and if the preceding OP_IF or OP_NOTIF or OP_ELSE was executed then these statements are not.
def op_else
return if @exec_stack.empty?
@exec_stack[-1] = !@exec_stack[-1]
end
# Ends an if/else block.
def op_endif
return if @exec_stack.empty?
@exec_stack.pop
end
# The item n back in the stack is copied to the top.
def op_pick
pos = pop_int
item = @stack[-(pos+1)]
@stack << item if item
end
# The item n back in the stack is moved to the top.
def op_roll
pos = pop_int
idx = -(pos+1)
item = @stack[idx]
if item
@stack.delete_at(idx)
@stack << item if item
end
end
# The top three items on the stack are rotated to the left.
def op_rot
return if @stack.size < 3
@stack[-3..-1] = [ @stack[-2], @stack[-1], @stack[-3] ]
end
# Removes the top two stack items.
def op_2drop
@stack.pop(2)
end
# Duplicates the top two stack items.
def op_2dup
@stack.push(*@stack[-2..-1])
end
# Duplicates the top three stack items.
def op_3dup
@stack.push(*@stack[-3..-1])
end
# Removes the second-to-top stack item.
def op_nip
@stack.delete_at(-2)
end
# Returns the length of the input string.
def op_size
item = @stack[-1]
size = case item
when String; item.bytesize
when Numeric; OpenSSL::BN.new(item.to_s).to_mpi.size - 4
end
@stack << size
end
# Transaction is invalid unless occuring in an unexecuted OP_IF branch
def op_ver
invalid if @do_exec
end
def pop_int(count=nil)
return cast_to_bignum(@stack.pop) unless count
@stack.pop(count).map{|i| cast_to_bignum(i) }
end
def pop_string(count=nil)
return cast_to_string(@stack.pop) unless count
@stack.pop(count).map{|i| cast_to_string(i) }
end
def cast_to_bignum(buf)
return (invalid; 0) unless buf
case buf
when Numeric; buf
when String; OpenSSL::BN.new([buf.bytesize].pack("N") + buf.reverse, 0).to_i
else; raise TypeError, 'cast_to_bignum: failed to cast: %s (%s)' % [buf, buf.class]
end
end
def cast_to_string(buf)
return (invalid; "") unless buf
case buf
when Numeric; OpenSSL::BN.new(buf.to_s).to_s(0)[4..-1]
when String; buf;
else; raise TypeError, 'cast_to_string: failed to cast: %s (%s)' % [buf, buf.class]
end
end
# Same as OP_NUMEQUAL, but runs OP_VERIFY afterward.
def op_numequalverify
op_numequal; op_verify
end
# All of the signature checking words will only match signatures
# to the data after the most recently-executed OP_CODESEPARATOR.
def op_codeseparator
@codehash_start = @chunks.size - @chunks.reverse.index(OP_CODESEPARATOR)
@last_codeseparator_index = @chunk_last_index
end
def codehash_script(opcode)
# CScript scriptCode(pbegincodehash, pend);
script = to_string(@chunks[(@codehash_start||0)...@chunks.size-@chunks.reverse.index(opcode)])
checkhash = Bitcoin.hash160(Bitcoin::Script.binary_from_string(script).unpack("H*")[0])
[script, checkhash]
end
# do a CHECKSIG operation on the current stack,
# asking +check_callback+ to do the actual signature verification.
# This is used by Protocol::Tx#verify_input_signature
def op_checksig(check_callback)
return invalid if @stack.size < 2
pubkey = cast_to_string(@stack.pop)
#return (@stack << 0) unless Bitcoin::Script.is_canonical_pubkey?(pubkey) # only for isStandard
drop_sigs = [ cast_to_string(@stack[-1]) ]
signature = cast_to_string(@stack.pop)
#return (@stack << 0) unless Bitcoin::Script.is_canonical_signature?(signature) # only for isStandard
return (@stack << 0) if signature == ""
sig, hash_type = parse_sig(signature)
subscript = sighash_subscript(drop_sigs)
if check_callback == nil # for tests
@stack << 1
else # real signature check callback
@stack <<
((check_callback.call(pubkey, sig, hash_type, subscript) == true) ? 1 : 0)
end
end
def sighash_subscript(drop_sigs)
if inner_p2sh? && @inner_script_code
::Bitcoin::Script.new(@inner_script_code).to_binary_without_signatures(drop_sigs)
else
to_binary_without_signatures(drop_sigs)
end
end
# Same as OP_CHECKSIG, but OP_VERIFY is executed afterward.
def op_checksigverify(check_callback)
op_checksig(check_callback)
op_verify
end
# do a CHECKMULTISIG operation on the current stack,
# asking +check_callback+ to do the actual signature verification.
#
# CHECKMULTISIG does a m-of-n signatures verification on scripts of the form:
# 0 <sig1> <sig2> | 2 <pub1> <pub2> 2 OP_CHECKMULTISIG
# 0 <sig1> <sig2> | 2 <pub1> <pub2> <pub3> 3 OP_CHECKMULTISIG
# 0 <sig1> <sig2> <sig3> | 3 <pub1> <pub2> <pub3> 3 OP_CHECKMULTISIG
#
# see https://en.bitcoin.it/wiki/BIP_0011 for details.
# see https://github.com/bitcoin/bitcoin/blob/master/src/script.cpp#L931
#
# TODO: validate signature order
# TODO: take global opcode count
def op_checkmultisig(check_callback)
return invalid if @stack.size < 1
n_pubkeys = pop_int
return invalid unless (0..20).include?(n_pubkeys)
#return invalid if (nOpCount += n_pubkeys) > 201
return invalid if @stack.size < n_pubkeys
pubkeys = pop_string(n_pubkeys)
return invalid if @stack.size < 1
n_sigs = pop_int
return invalid if n_sigs < 0 || n_sigs > n_pubkeys
return invalid if @stack.size < n_sigs
sigs = pop_string(n_sigs)
drop_sigs = sigs.dup
# Bitcoin-core removes an extra item from the stack
@stack.pop
subscript = sighash_subscript(drop_sigs)
success = true
while success && n_sigs > 0
sig, pub = sigs.pop, pubkeys.pop
unless sig && sig.size > 0
success = false
break
end
signature, hash_type = parse_sig(sig)
if pub.size > 0 && check_callback.call(pub, signature, hash_type, subscript)
n_sigs -= 1
else
sigs << sig
end
n_pubkeys -= 1
success = false if n_sigs > n_pubkeys
end
@stack << (success ? 1 : 0)
end
# Same as OP_CHECKMULTISIG, but OP_VERIFY is executed afterward.
def op_checkmultisigverify(check_callback)
op_checkmultisig(check_callback)
op_verify
end
# op_eval: https://en.bitcoin.it/wiki/BIP_0012
# the BIP was never accepted and must be handled as old OP_NOP1
def op_nop1
end
OPCODES_METHOD = Hash[*instance_methods.grep(/^op_/).map{|m|
[ (OPCODES.find{|k,v| v == m.to_s.upcase }.first rescue nil), m ]
}.flatten]
OPCODES_METHOD[0] = :op_0
OPCODES_METHOD[81] = :op_1
def self.is_canonical_pubkey?(pubkey)
return false if pubkey.bytesize < 33 # "Non-canonical public key: too short"
case pubkey[0]
when "\x04"
return false if pubkey.bytesize != 65 # "Non-canonical public key: invalid length for uncompressed key"
when "\x02", "\x03"
return false if pubkey.bytesize != 33 # "Non-canonical public key: invalid length for compressed key"
else
return false # "Non-canonical public key: compressed nor uncompressed"
end
true
end
def self.is_canonical_signature?(sig)
return false if sig.bytesize < 9 # Non-canonical signature: too short
return false if sig.bytesize > 73 # Non-canonical signature: too long
s = sig.unpack("C*")
hash_type = s[-1] & (~(SIGHASH_TYPE[:anyonecanpay]))
return false if hash_type < SIGHASH_TYPE[:all] || hash_type > SIGHASH_TYPE[:single] # Non-canonical signature: unknown hashtype byte
return false if s[0] != 0x30 # Non-canonical signature: wrong type
return false if s[1] != s.size-3 # Non-canonical signature: wrong length marker
# TODO: add/port rest from bitcoind
true
end
private
def parse_sig(sig)
hash_type = sig[-1].unpack("C")[0]
sig = sig[0...-1]
return sig, hash_type
end
end
|
module Bobkit
VERSION = "0.0.2"
end
version bump
module Bobkit
VERSION = "0.0.3"
end |
module Botvac
VERSION = "0.1.1"
end
bump version
module Botvac
VERSION = "0.1.2"
end
|
module Brief
class Document
include Brief::Document::Rendering
include Brief::Document::FrontMatter
include Brief::Document::Templating
include Brief::Document::Attachments
include Brief::Document::SourceMap
def self.from_contents(content, frontmatter, &block)
end
attr_accessor :path, :content, :frontmatter, :raw_content, :options
def initialize(path, options = {})
if path.respond_to?(:key?) && options.empty?
@frontmatter = path.to_mash
else
@path = Pathname(path)
end
@options = options.to_mash
if @path && self.path.exist?
@raw_content = self.path.read
load_frontmatter
elsif options[:contents]
@raw_content = options[:contents]
end
end
def document
self
end
def clone
new(path, options)
end
def title
(data && data.title) || css('h1:first-of-type').text || path.to_s.split("/").last.gsub(/\..*/,'')
end
def to_s
"#{ model_class }.at_path(#{relative_path})"
end
def inspect
"#{ model_class }.at_path(#{relative_path})"
end
def relative_path
briefcase.present? ? path.relative_path_from(briefcase.docs_path) : path
end
def content_hash
Digest::MD5.hexdigest(@content.to_s)
end
def file_hash
Digest::MD5.hexdigest(path.read.to_s)
end
def content_stale?
content_hash != file_hash
end
def raw= val
@raw_set = true
@raw_content = val
#document.load_frontmatter
@raw_content
end
def set_raw?
!!@raw_set
end
def save
if set_raw?
file_contents = raw_content
else
file_contents = combined_data_and_content
end
path.open('w') {|fh| fh.write(file_contents) }
refresh!
end
def refresh!
@content = nil
@raw_content = path.read
@frontmatter = nil
@raw_frontmatter = nil
@refreshing = true
@content_hash = nil
load_frontmatter
true
end
def save!
if set_raw?
file_contents = raw_content
else
file_contents = combined_data_and_content
end
path.open('w+') {|fh| fh.write(file_contents) }
refresh!
end
def combined_data_and_content
return content if data.nil? || data.empty?
frontmatter.to_hash.to_yaml + "---\n\n#{ content }"
end
def data
frontmatter
end
def include_attachments?
attachments.length > 0
end
def attachments
Array(data.attachments)
end
def in_briefcase(briefcase)
@briefcase_root = briefcase.root
unless Brief::Util.ensure_child_path(briefcase.docs_path, path)
raise 'Invalid document path'
end
self
end
def briefcase
(@briefcase_root && Brief.cases[@briefcase_root.basename.to_s]) || Brief.case(true)
end
def has_sections?
model_class.section_mappings.length > 0
end
def section_headings
sections.keys
end
def sections_data
section_headings.reduce({}) do |memo, heading|
section = sections.send(heading)
items = section.items rescue nil
memo[heading] = items if items
memo
end
end
def sections
mappings = model_class.section_mappings
@sections = {}.to_mash
mappings.each do |name, mapping|
fragment = css("section[data-heading='#{name}']").first
@sections[name.parameterize.downcase.underscore] = Brief::Document::Section.new(name, fragment, mapping)
end
@sections
end
def content= value
@content = value
end
def content
if @content.nil? && path && path.exist?
@content = path.read
end
@content || generate_content
end
# Shortcut for querying the rendered HTML by css selectors.
#
# This will allow for model data attributes to be pulled from the
# document contents.
#
# Returns a Nokogiri::HTML::Element
def css(*args, &block)
parser.send(:css, *args, &block)
end
# Returns a Nokogiri::HTML::Element
def at(*args, &block)
parser.send(:at, *args, &block)
end
def extract_content(*args)
options = args.extract_options!
args = options.delete(:args) if options.is_a?(Hash) && options.key?(:args)
case
when options.empty? && args.length == 1 && args.first.is_a?(String)
results = css(args.first)
results = results.first if results.length > 1 && args.first.match(/:first-of-type/)
results.try(:text).to_s
else
binding.pry
end
end
def relative_path_identifier
if Brief.case
path.relative_path_from(Brief.case.root)
else
path.to_s
end
end
def extension
path.extname
end
def model_attributes
(data || {}).to_hash
.merge(path: path, document: self)
.reverse_merge(type: document_type)
end
def to_model
model_class.try(:new, model_attributes)
end
def exist?
path && path.exist?
end
def model_class
case
when @model_class
@model_class
when briefcase
briefcase.model_class_for(self)
when data && data.type
Brief::Model.for_type(data.type)
when parent_folder_name.length > 0
Brief::Model.for_folder_name(parent_folder_name)
else
raise 'Could not determine the model class to use for this document. Specify the type, or put it in a folder that maps to the correct type.'
end
end
def document_type
options.fetch(:type) { document_type! }
end
def document_type!
existing = data && data.type
return existing if existing
parent_folder_name.try(:singularize)
end
def parent_folder_name
path.parent.basename.to_s.downcase
end
# Each model class tracks the instances of the models created
# and ensures that there is a 1-1 relationship between a document path
# and the model.
def model_instance_registered?
model_class && model_class.models.any? do |model|
model.path == path
end
end
def respond_to?(*args)
method = args.first
super || (data && data.respond_to?(method)) || (data && data.key?(method))
end
# The structure analyzer of the document is responsible for grouping
# the content under the headings by wrapping them in divs, and creating
# relationships between the nodes. This is what lets us provide an easy
# iteration API on top of the parsed document
def structure
@structure_analyzer ||= Brief::Document::Structure.new(fragment, raw_content.lines.to_a)
end
# The Parser wraps the rendered HTML in a nokogiri element so we can easily manipulate it.
# Prior to doing so, we use the structure analyzer to build more metadata into the markup
def parser
@parser ||= begin
structure.prescan
structure.create_wrappers.tap do |f|
transformer_for(f).all
end
end
end
# The transformer is responsible for performing content modifications
# on the rendered document. This is useful for supporting extensions that
# are driven by the markdown language.
#
# TODO: This is hidden behind a feature flag, and requires the document
# to have metadata that specifies transform = true
def transformer_for(doc_fragment=nil)
doc_fragment ||= fragment
@transformer ||= Brief::Document::Transformer.new(doc_fragment, self)
end
def fragment
@fragment ||= Nokogiri::HTML.fragment(to_raw_html)
end
def type
document_type
end
def method_missing(meth, *args, &block)
if data.respond_to?(meth)
data.send(meth, *args, &block)
else
super
end
end
end
end
adding a convenience method for getting a refreshed document
module Brief
class Document
include Brief::Document::Rendering
include Brief::Document::FrontMatter
include Brief::Document::Templating
include Brief::Document::Attachments
include Brief::Document::SourceMap
def self.from_contents(content, frontmatter, &block)
end
attr_accessor :path, :content, :frontmatter, :raw_content, :options
def initialize(path, options = {})
if path.respond_to?(:key?) && options.empty?
@frontmatter = path.to_mash
else
@path = Pathname(path)
end
@options = options.to_mash
if @path && self.path.exist?
@raw_content = self.path.read
load_frontmatter
elsif options[:contents]
@raw_content = options[:contents]
end
end
def document
self
end
def clone
new(path, options)
end
def title
(data && data.title) || css('h1:first-of-type').text || path.to_s.split("/").last.gsub(/\..*/,'')
end
def to_s
"#{ model_class }.at_path(#{relative_path})"
end
def inspect
"#{ model_class }.at_path(#{relative_path})"
end
def relative_path
briefcase.present? ? path.relative_path_from(briefcase.docs_path) : path
end
def content_hash
Digest::MD5.hexdigest(@content.to_s)
end
def file_hash
Digest::MD5.hexdigest(path.read.to_s)
end
def content_stale?
content_hash != file_hash
end
def raw= val
@raw_set = true
@raw_content = val
#document.load_frontmatter
@raw_content
end
def set_raw?
!!@raw_set
end
def save
if set_raw?
file_contents = raw_content
else
file_contents = combined_data_and_content
end
path.open('w') {|fh| fh.write(file_contents) }
refresh!
end
def refresh!
@content = nil
@raw_content = path.read
@frontmatter = nil
@raw_frontmatter = nil
@refreshing = true
@content_hash = nil
load_frontmatter
true
end
def refreshed
refresh!
self
end
def save!
if set_raw?
file_contents = raw_content
else
file_contents = combined_data_and_content
end
path.open('w+') {|fh| fh.write(file_contents) }
refresh!
end
def combined_data_and_content
return content if data.nil? || data.empty?
frontmatter.to_hash.to_yaml + "---\n\n#{ content }"
end
def data
frontmatter
end
def include_attachments?
attachments.length > 0
end
def attachments
Array(data.attachments)
end
def in_briefcase(briefcase)
@briefcase_root = briefcase.root
unless Brief::Util.ensure_child_path(briefcase.docs_path, path)
raise 'Invalid document path'
end
self
end
def briefcase
(@briefcase_root && Brief.cases[@briefcase_root.basename.to_s]) || Brief.case(true)
end
def has_sections?
model_class.section_mappings.length > 0
end
def section_headings
sections.keys
end
def sections_data
section_headings.reduce({}) do |memo, heading|
section = sections.send(heading)
items = section.items rescue nil
memo[heading] = items if items
memo
end
end
def sections
mappings = model_class.section_mappings
@sections = {}.to_mash
mappings.each do |name, mapping|
fragment = css("section[data-heading='#{name}']").first
@sections[name.parameterize.downcase.underscore] = Brief::Document::Section.new(name, fragment, mapping)
end
@sections
end
def content= value
@content = value
end
def content
if @content.nil? && path && path.exist?
@content = path.read
end
@content || generate_content
end
# Shortcut for querying the rendered HTML by css selectors.
#
# This will allow for model data attributes to be pulled from the
# document contents.
#
# Returns a Nokogiri::HTML::Element
def css(*args, &block)
parser.send(:css, *args, &block)
end
# Returns a Nokogiri::HTML::Element
def at(*args, &block)
parser.send(:at, *args, &block)
end
def extract_content(*args)
options = args.extract_options!
args = options.delete(:args) if options.is_a?(Hash) && options.key?(:args)
case
when options.empty? && args.length == 1 && args.first.is_a?(String)
results = css(args.first)
results = results.first if results.length > 1 && args.first.match(/:first-of-type/)
results.try(:text).to_s
else
binding.pry
end
end
def relative_path_identifier
if Brief.case
path.relative_path_from(Brief.case.root)
else
path.to_s
end
end
def extension
path.extname
end
def model_attributes
(data || {}).to_hash
.merge(path: path, document: self)
.reverse_merge(type: document_type)
end
def to_model
model_class.try(:new, model_attributes)
end
def exist?
path && path.exist?
end
def model_class
case
when @model_class
@model_class
when briefcase
briefcase.model_class_for(self)
when data && data.type
Brief::Model.for_type(data.type)
when parent_folder_name.length > 0
Brief::Model.for_folder_name(parent_folder_name)
else
raise 'Could not determine the model class to use for this document. Specify the type, or put it in a folder that maps to the correct type.'
end
end
def document_type
options.fetch(:type) { document_type! }
end
def document_type!
existing = data && data.type
return existing if existing
parent_folder_name.try(:singularize)
end
def parent_folder_name
path.parent.basename.to_s.downcase
end
# Each model class tracks the instances of the models created
# and ensures that there is a 1-1 relationship between a document path
# and the model.
def model_instance_registered?
model_class && model_class.models.any? do |model|
model.path == path
end
end
def respond_to?(*args)
method = args.first
super || (data && data.respond_to?(method)) || (data && data.key?(method))
end
# The structure analyzer of the document is responsible for grouping
# the content under the headings by wrapping them in divs, and creating
# relationships between the nodes. This is what lets us provide an easy
# iteration API on top of the parsed document
def structure
@structure_analyzer ||= Brief::Document::Structure.new(fragment, raw_content.lines.to_a)
end
# The Parser wraps the rendered HTML in a nokogiri element so we can easily manipulate it.
# Prior to doing so, we use the structure analyzer to build more metadata into the markup
def parser
@parser ||= begin
structure.prescan
structure.create_wrappers.tap do |f|
transformer_for(f).all
end
end
end
# The transformer is responsible for performing content modifications
# on the rendered document. This is useful for supporting extensions that
# are driven by the markdown language.
#
# TODO: This is hidden behind a feature flag, and requires the document
# to have metadata that specifies transform = true
def transformer_for(doc_fragment=nil)
doc_fragment ||= fragment
@transformer ||= Brief::Document::Transformer.new(doc_fragment, self)
end
def fragment
@fragment ||= Nokogiri::HTML.fragment(to_raw_html)
end
def type
document_type
end
def method_missing(meth, *args, &block)
if data.respond_to?(meth)
data.send(meth, *args, &block)
else
super
end
end
end
end
|
require 'httparty'
require 'json'
require 'rest-client'
require 'orderedhash'
require 'net/http/post/multipart'
require 'brightcove-api/version'
module Brightcove
class API
include HTTParty
disable_rails_query_string_format
DEFAULT_HEADERS = {
'User-Agent' => "brightcove-api gem #{VERSION}"
}
headers(DEFAULT_HEADERS)
READ_API_URL = 'http://api.brightcove.com/services/library'
WRITE_API_URL = 'http://api.brightcove.com/services/post'
attr_accessor :read_api_url
attr_accessor :write_api_url
attr_accessor :token
# RestClient POST timeout for reading conection
attr_accessor :timeout
# RestClient POST timeout for opening connection
attr_accessor :open_timeout
# Initialize with your API token
def initialize(token, read_api_url = READ_API_URL, write_api_url = WRITE_API_URL)
@token = token
@read_api_url = read_api_url
@write_api_url = write_api_url
@timeout = nil
@open_timeout = nil
end
def debug(location = $stderr)
self.class.debug_output(location)
end
def set_http_headers(http_headers = {})
http_headers.merge!(DEFAULT_HEADERS)
headers(http_headers)
end
def set_timeout(timeout)
default_timeout(timeout)
end
def build_query_from_options(api_method, options = {})
# normalize options to a hash
unless options.respond_to?(:merge!)
options = CGI.parse(options)
end
options.merge!({:command => api_method, :token => @token})
options.merge!({:format => :xml}) if options.key?(:output) && 'mrss'.eql?(options[:output])
{ :query => options }
end
# Call Brightcove using a particular API method, api_method.
# The options parameter can be either a query string or a hash. In either case, it is where
# you can add any parameters appropriate for the API call. If a query string, it will be
# normalized to a hash via CGI.parse.
def get(api_method, options = {})
self.class.get(@read_api_url, build_query_from_options(api_method, options))
end
# Post to Brightcove using a particular API method, api_method. The parameters hash is where you add all the required parameters appropriate for the API call.
def post(api_method, parameters = {})
parameters.merge!({"token" => @token})
body = {}
body.merge!({:method => api_method})
body.merge!({:params => parameters})
self.class.post(@write_api_url, {:body => {:json => JSON.generate(body)}})
end
def post_file(api_method, file, parameters = {})
parameters.merge!({"token" => @token})
body = {}
body.merge!({:method => api_method})
body.merge!({:params => parameters})
# Brightcove requires that the JSON-RPC call absolutely
# be the first part of a multi-part POST like create_video.
if RUBY_VERSION >= '1.9'
payload = {}
else
payload = OrderedHash.new
end
payload[:json] = body.to_json
payload[:file] = File.new(file, 'rb')
execution_payload = {
:method => :post,
:url => @write_api_url,
:payload => payload,
:content_type => :json,
:accept => :json,
:multipart => true
}
execution_payload[:timeout] = @timeout if @timeout
execution_payload[:open_timeout] = @open_timeout if @open_timeout
response = RestClient::Request.execute(execution_payload)
JSON.parse(response)
end
def post_file_streaming(api_method, upload_file, content_type, parameters)
parameters.merge!({"token" => @token})
body = {}
body.merge!({:method => api_method})
body.merge!({:params => parameters})
# Brightcove requires that the JSON-RPC call absolutely
# be the first part of a multi-part POST like create_video.
if RUBY_VERSION >= '1.9'
payload = {}
else
payload = OrderedHash.new
end
url = URI.parse(@write_api_url)
response = nil
File.open(upload_file) do |file|
payload[:json] = body.to_json
payload[:file] = UploadIO.new(file, content_type)
request = Net::HTTP::Post::Multipart.new(url.path, payload)
response = Net::HTTP.start(url.host, url.port) do |http|
http.read_timeout = @timeout if @timeout
http.open_timeout = @open_timeout if @open_timeout
http.request(request)
end
end
JSON.parse(response.body)
end
end
end
Add post_io_streaming method to submit stream from any IO handle
require 'httparty'
require 'json'
require 'rest-client'
require 'orderedhash'
require 'net/http/post/multipart'
require 'brightcove-api/version'
module Brightcove
class API
include HTTParty
disable_rails_query_string_format
DEFAULT_HEADERS = {
'User-Agent' => "brightcove-api gem #{VERSION}"
}
headers(DEFAULT_HEADERS)
READ_API_URL = 'http://api.brightcove.com/services/library'
WRITE_API_URL = 'http://api.brightcove.com/services/post'
attr_accessor :read_api_url
attr_accessor :write_api_url
attr_accessor :token
# RestClient POST timeout for reading conection
attr_accessor :timeout
# RestClient POST timeout for opening connection
attr_accessor :open_timeout
# Initialize with your API token
def initialize(token, read_api_url = READ_API_URL, write_api_url = WRITE_API_URL)
@token = token
@read_api_url = read_api_url
@write_api_url = write_api_url
@timeout = nil
@open_timeout = nil
end
def debug(location = $stderr)
self.class.debug_output(location)
end
def set_http_headers(http_headers = {})
http_headers.merge!(DEFAULT_HEADERS)
headers(http_headers)
end
def set_timeout(timeout)
default_timeout(timeout)
end
def build_query_from_options(api_method, options = {})
# normalize options to a hash
unless options.respond_to?(:merge!)
options = CGI.parse(options)
end
options.merge!({:command => api_method, :token => @token})
options.merge!({:format => :xml}) if options.key?(:output) && 'mrss'.eql?(options[:output])
{ :query => options }
end
# Call Brightcove using a particular API method, api_method.
# The options parameter can be either a query string or a hash. In either case, it is where
# you can add any parameters appropriate for the API call. If a query string, it will be
# normalized to a hash via CGI.parse.
def get(api_method, options = {})
self.class.get(@read_api_url, build_query_from_options(api_method, options))
end
# Post to Brightcove using a particular API method, api_method. The parameters hash is where you add all the required parameters appropriate for the API call.
def post(api_method, parameters = {})
parameters.merge!({"token" => @token})
body = {}
body.merge!({:method => api_method})
body.merge!({:params => parameters})
self.class.post(@write_api_url, {:body => {:json => JSON.generate(body)}})
end
def post_file(api_method, file, parameters = {})
parameters.merge!({"token" => @token})
body = {}
body.merge!({:method => api_method})
body.merge!({:params => parameters})
# Brightcove requires that the JSON-RPC call absolutely
# be the first part of a multi-part POST like create_video.
if RUBY_VERSION >= '1.9'
payload = {}
else
payload = OrderedHash.new
end
payload[:json] = body.to_json
payload[:file] = File.new(file, 'rb')
execution_payload = {
:method => :post,
:url => @write_api_url,
:payload => payload,
:content_type => :json,
:accept => :json,
:multipart => true
}
execution_payload[:timeout] = @timeout if @timeout
execution_payload[:open_timeout] = @open_timeout if @open_timeout
response = RestClient::Request.execute(execution_payload)
JSON.parse(response)
end
def post_file_streaming(api_method, upload_file, content_type, parameters)
File.open(upload_file) { |file| post_io_streaming(api_method, file, content_type, parameters) }
end
def post_io_streaming(api_method, file, content_type, parameters)
parameters.merge!({"token" => @token})
body = {}
body.merge!({:method => api_method})
body.merge!({:params => parameters})
# Brightcove requires that the JSON-RPC call absolutely
# be the first part of a multi-part POST like create_video.
if RUBY_VERSION >= '1.9'
payload = {}
else
payload = OrderedHash.new
end
url = URI.parse(@write_api_url)
response = nil
payload[:json] = body.to_json
payload[:file] = UploadIO.new(file, content_type)
request = Net::HTTP::Post::Multipart.new(url.path, payload)
response = Net::HTTP.start(url.host, url.port) do |http|
http.read_timeout = @timeout if @timeout
http.open_timeout = @open_timeout if @open_timeout
http.request(request)
end
JSON.parse(response.body)
end
end
end |
module Brivo
module API
module HTTP
MAX_RETRIES = 3
API_URL = -'https://api.brivo.com/v1/api'
PAGE_SIZE = 100
def set_access_token
uri = URI.parse("https://auth.brivo.com/oauth/token?grant_type=password&username=#{username}&password=#{password}")
authorization_code = Base64.strict_encode64("#{client_id}:#{secret}")
res = Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |https|
req = Net::HTTP::Post.new(uri)
req['Content-Type'] = 'application/json'
req['Authorization'] = "Basic #{authorization_code}"
req['api-key'] = api_key
https.request(req)
end
response = JSON.parse res.body
@access_token = response['access_token']
@access_token_expiry = monotonic_time + (response['expires_in'] - 2)
end
def http_request path, method: :get, params: {}, offset: nil
attempts = 0
begin
attempts += 1
uri = "#{API_URL}/#{path}"
parsed_uri = URI.parse(uri)
if offset
if parsed_uri.query.nil?
parsed_uri.query = "pageSize=#{PAGE_SIZE}&offset=#{offset}"
else
parsed_uri.query = "#{parsed_uri.query}&pageSize=#{PAGE_SIZE}&offset=#{offset}"
end
end
http_methods = {
get: Net::HTTP::Get,
post: Net::HTTP::Post,
put: Net::HTTP::Put,
delete: Net::HTTP::Delete
}
response = Net::HTTP.start(parsed_uri.host, parsed_uri.port, use_ssl: true) do |https|
request = http_methods[method].new(parsed_uri)
request.body = params.to_json
set_access_token if monotonic_time > @access_token_expiry
request['Content-Type'] = 'application/json'
request['Authorization'] = "bearer #{@access_token}"
request['api-key'] = api_key
https.request(request)
end
# http://apidocs.brivo.com/#response-codes
case response.code.to_i
when 200
qq JSON.parse(response.body)
JSON.parse(response.body)
when 204
true
when 400
raise Brivo::BadRequest
when 401
raise Brivo::Unauthorized
when 403
raise Brivo::Forbidden
when 415
raise Brivo::UnsupportedMediaType
when 503
raise Brivo::ServiceUnavailable
when 596
raise Brivo::ServiceNotFound
else
raise Brivo::UnkownResponse
end
rescue StandardError
if attempts > MAX_RETRIES
raise
else
retry
end
end
end
private
def monotonic_time
Process.clock_gettime(Process::CLOCK_MONOTONIC)
end
end
end
end
Remove qq debugging
module Brivo
module API
module HTTP
MAX_RETRIES = 3
API_URL = -'https://api.brivo.com/v1/api'
PAGE_SIZE = 100
def set_access_token
uri = URI.parse("https://auth.brivo.com/oauth/token?grant_type=password&username=#{username}&password=#{password}")
authorization_code = Base64.strict_encode64("#{client_id}:#{secret}")
res = Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |https|
req = Net::HTTP::Post.new(uri)
req['Content-Type'] = 'application/json'
req['Authorization'] = "Basic #{authorization_code}"
req['api-key'] = api_key
https.request(req)
end
response = JSON.parse res.body
@access_token = response['access_token']
@access_token_expiry = monotonic_time + (response['expires_in'] - 2)
end
def http_request path, method: :get, params: {}, offset: nil
attempts = 0
begin
attempts += 1
uri = "#{API_URL}/#{path}"
parsed_uri = URI.parse(uri)
if offset
if parsed_uri.query.nil?
parsed_uri.query = "pageSize=#{PAGE_SIZE}&offset=#{offset}"
else
parsed_uri.query = "#{parsed_uri.query}&pageSize=#{PAGE_SIZE}&offset=#{offset}"
end
end
http_methods = {
get: Net::HTTP::Get,
post: Net::HTTP::Post,
put: Net::HTTP::Put,
delete: Net::HTTP::Delete
}
response = Net::HTTP.start(parsed_uri.host, parsed_uri.port, use_ssl: true) do |https|
request = http_methods[method].new(parsed_uri)
request.body = params.to_json
set_access_token if monotonic_time > @access_token_expiry
request['Content-Type'] = 'application/json'
request['Authorization'] = "bearer #{@access_token}"
request['api-key'] = api_key
https.request(request)
end
# http://apidocs.brivo.com/#response-codes
case response.code.to_i
when 200
JSON.parse(response.body)
when 204
true
when 400
raise Brivo::BadRequest
when 401
raise Brivo::Unauthorized
when 403
raise Brivo::Forbidden
when 415
raise Brivo::UnsupportedMediaType
when 503
raise Brivo::ServiceUnavailable
when 596
raise Brivo::ServiceNotFound
else
raise Brivo::UnkownResponse
end
rescue StandardError
if attempts > MAX_RETRIES
raise
else
retry
end
end
end
private
def monotonic_time
Process.clock_gettime(Process::CLOCK_MONOTONIC)
end
end
end
end
|
module Bronto
VERSION = "0.0.9"
end
Update to version 0.1.0.
module Bronto
VERSION = "0.1.0"
end
|
# encoding: utf-8
module Bullet
VERSION = "2.0.0.rc3"
end
Bumping version to 2.0.0
# encoding: utf-8
module Bullet
VERSION = "2.0.0"
end
|
Add Buoys::Renderer class
module Buoys
class Renderer
def initialize(context, key, *args)
@context, @key, @args = context, key, args
Buoys::Loader.load_buoys_files if Buoys::Loader.buoys.keys.empty?
end
def render
return [] unless @key
buoy = Buoys::Buoy.new(@context, @key, @args)
build_links(buoy)
end
private
def build_links(buoy)
links = buoy.links.dup
links.unshift *collect_previous_links(buoy)
links.last.mark_as_current!
links
end
def collect_previous_links(buoy)
links = []
while buoy = buoy.previous
links.unshift *buoy.links
end
links
end
end
end
|
module CellectClient
ConnectionError = Class.new(StandardError)
MAX_TRIES = 1
def self.add_seen(session, workflow_id, user_id, subject_id)
RequestToHost.new(session, workflow_id)
.request(:add_seen, subject_id: subject_id, user_id: user_id)
end
def self.load_user(session, workflow_id, user_id)
RequestToHost.new(session, workflow_id)
.request(:load_user, user_id: user_id)
end
def self.remove_subject(subject_id, workflow_id, group_id)
RequestToAll.new(workflow_id)
.request(:remove_subject, subject_id, group_id: group_id)
end
def self.get_subjects(session, workflow_id, user_id, group_id, limit)
RequestToHost.new(session, workflow_id)
.request(:get_subjects, group_id: group_id, user_id: user_id, limit: limit)
end
def set_client_params(params)
set_host_param(params)
set_workflow_param(params)
end
class Request
attr_reader :workflow_id
def initialize(workflow_id)
@workflow_id = workflow_id
end
def request(action, *params)
tries ||= MAX_TRIES
Cellect::Client.connection.send(action, *params)
rescue StandardError => e
raise ConnectionError, e if tries <= 0
tries -= 1
yield if block_given?
retry
end
end
class RequestToAll < Request
def request(action, *params)
case params
when Hash
params[:workflow_id] = workflow_id
when Array
params.last[:workflow_id] = workflow_id if params.last.is_a? Hash
end
super action, *params
end
end
class RequestToHost < Request
def initialize(session, workflow_id)
@session = session
super workflow_id
end
def request(action, params={})
params[:host] = host
params[:workflow_id] = workflow_id
super(action, params) { params[:host] = reset_host }
end
def host
@host ||= if (h = @session[workflow_id]) && Cellect::Client.host_exists?(h)
h
else
choose_host
end
end
def reset_host
@host = choose_host
end
def choose_host
@session[workflow_id] = Cellect::Client.choose_host
end
end
end
Add new relic instrumentation to cellect client
module CellectClient
ConnectionError = Class.new(StandardError)
MAX_TRIES = 1
def self.add_seen(session, workflow_id, user_id, subject_id)
RequestToHost.new(session, workflow_id)
.request(:add_seen, subject_id: subject_id, user_id: user_id)
end
def self.load_user(session, workflow_id, user_id)
RequestToHost.new(session, workflow_id)
.request(:load_user, user_id: user_id)
end
def self.remove_subject(subject_id, workflow_id, group_id)
RequestToAll.new(workflow_id)
.request(:remove_subject, subject_id, group_id: group_id)
end
def self.get_subjects(session, workflow_id, user_id, group_id, limit)
RequestToHost.new(session, workflow_id)
.request(:get_subjects, group_id: group_id, user_id: user_id, limit: limit)
end
singleton_class.class_eval do
include ::NewRelic::Agent::MethodTracer
%i(add_seen load_user remove_subject get_subjects).each do |client_method|
add_method_tracer client_method, "cellect/#{client_method}"
end
end
class Request
attr_reader :workflow_id
def initialize(workflow_id)
@workflow_id = workflow_id
end
def request(action, *params)
tries ||= MAX_TRIES
Cellect::Client.connection.send(action, *params)
rescue StandardError => e
raise ConnectionError, e if tries <= 0
tries -= 1
yield if block_given?
retry
end
end
class RequestToAll < Request
def request(action, *params)
case params
when Hash
params[:workflow_id] = workflow_id
when Array
params.last[:workflow_id] = workflow_id if params.last.is_a? Hash
end
super action, *params
end
end
class RequestToHost < Request
def initialize(session, workflow_id)
@session = session
super workflow_id
end
def request(action, params={})
params[:host] = host
params[:workflow_id] = workflow_id
super(action, params) { params[:host] = reset_host }
end
def host
@host ||= if (h = @session[workflow_id]) && Cellect::Client.host_exists?(h)
h
else
choose_host
end
end
def reset_host
@host = choose_host
end
def choose_host
@session[workflow_id] = Cellect::Client.choose_host
end
end
end
|
require 'rest_client'
require 'json'
module ChargeBee
module Rest
def self.request(method, url, env, params=nil)
raise APIError.new('No environment configured.') unless env
api_key = env.api_key
headers = {}
if(ChargeBee.verify_ca_certs?)
ssl_opts = {
:verify_ssl => OpenSSL::SSL::VERIFY_PEER,
:ssl_ca_file => ChargeBee.ca_cert_path
}
else
ssl_opts = {
:verify_ssl => false
}
end
case method.to_s.downcase.to_sym
when :get, :head, :delete
headers = { :params => params }.merge(headers)
payload = nil
else
payload = params
end
headers = {
"User-Agent" => "Chargebee-Ruby-Client",
:accept => :json
}.merge(headers)
opts = {
:method => method,
:url => env.api_url(url),
:user => api_key,
:headers => headers,
:payload => payload,
:open_timeout => 50,
:timeout => 100
}.merge(ssl_opts)
begin
response = RestClient::Request.execute(opts)
rescue Exception => e
case(e)
when SocketError
raise APIError.new("Error while connecting to chargebee. If you see this repeatedly, contact us at support@chargebee.com")
when RestClient::ExceptionWithResponse
if rcode = e.http_code and rbody = e.http_body
raise handle_for_error(e, rcode, rbody)
else
raise APIError.new(e.message)
end
when RestClient::Exception
raise APIError.new("Unexpected error received: #{e.message}", e.http_code, e.http_body)
else
raise APIError.new(e.message)
end
end
rbody = response.body
rcode = response.code
begin
resp = JSON.parse(rbody)
rescue JSON::ParserError
raise APIError.new("Invalid response object from API", rcode, rbody)
end
resp = Util.symbolize_keys(resp)
resp
end
def self.handle_for_error(e, rcode=nil, rbody=nil)
if(rcode == 204)
raise APIError.new("No response returned by the chargebee api", rcode)
end
begin
error_obj = JSON.parse(rbody)
error_obj = Util.symbolize_keys(error_obj)
rescue JSON::ParseError
raise APIError.new("Invalid JSON response #{rbody.inspect} received with HTTP response code #{rcode}", rcode, rbody)
end
raise APIError.new(error_obj.to_s, rcode, rbody, error_obj)
end
end
end
Passing library version along User-Agent header
Signed-off-by: rraman <843cbacc61c8fe45886819ff1516e2e179374496@chargebee.com>
require 'rest_client'
require 'json'
module ChargeBee
module Rest
def self.request(method, url, env, params=nil)
raise APIError.new('No environment configured.') unless env
api_key = env.api_key
headers = {}
if(ChargeBee.verify_ca_certs?)
ssl_opts = {
:verify_ssl => OpenSSL::SSL::VERIFY_PEER,
:ssl_ca_file => ChargeBee.ca_cert_path
}
else
ssl_opts = {
:verify_ssl => false
}
end
case method.to_s.downcase.to_sym
when :get, :head, :delete
headers = { :params => params }.merge(headers)
payload = nil
else
payload = params
end
user_agent = "Chargebee-Ruby-Client v#{ChargeBee::VERSION}"
headers = {
"User-Agent" => user_agent,
:accept => :json
}.merge(headers)
opts = {
:method => method,
:url => env.api_url(url),
:user => api_key,
:headers => headers,
:payload => payload,
:open_timeout => 50,
:timeout => 100
}.merge(ssl_opts)
begin
response = RestClient::Request.execute(opts)
rescue Exception => e
case(e)
when SocketError
raise APIError.new("Error while connecting to chargebee. If you see this repeatedly, contact us at support@chargebee.com")
when RestClient::ExceptionWithResponse
if rcode = e.http_code and rbody = e.http_body
raise handle_for_error(e, rcode, rbody)
else
raise APIError.new(e.message)
end
when RestClient::Exception
raise APIError.new("Unexpected error received: #{e.message}", e.http_code, e.http_body)
else
raise APIError.new(e.message)
end
end
rbody = response.body
rcode = response.code
begin
resp = JSON.parse(rbody)
rescue JSON::ParserError
raise APIError.new("Invalid response object from API", rcode, rbody)
end
resp = Util.symbolize_keys(resp)
resp
end
def self.handle_for_error(e, rcode=nil, rbody=nil)
if(rcode == 204)
raise APIError.new("No response returned by the chargebee api", rcode)
end
begin
error_obj = JSON.parse(rbody)
error_obj = Util.symbolize_keys(error_obj)
rescue JSON::ParseError
raise APIError.new("Invalid JSON response #{rbody.inspect} received with HTTP response code #{rcode}", rcode, rbody)
end
raise APIError.new(error_obj.to_s, rcode, rbody, error_obj)
end
end
end |
module Uno
class Game
attr_reader :state
attr_reader :discard_pile
attr_reader :draw_pile
attr_reader :players
def current_player
@players[@current_player_index]
end
def next_player
next_player_index = (@current_player_index + 1) % players.count
@players[next_player_index]
end
def initialize
@state = :waiting_to_start
@deck = Deck.generate
@players = []
end
def start
raise GameHasStartedError unless @state == :waiting_to_start
raise NotEnoughPlayersError unless players.count >= 2
@deck.shuffle!
@discard_pile = [@deck.pop]
@draw_pile = @deck
@players.shuffle
@current_player_index = 0
@players.each do |player|
player.empty_hand!
7.times do
player.put_card_in_hand @draw_pile.pop
end
end
@state = :waiting_for_player_to_move
end
def add_player(player)
raise GameIsOverError if @state == :game_over
raise GameHasStartedError unless @state == :waiting_to_start
@players << player unless players.include?(player)
end
def play(player, card_played)
raise GameIsOverError if @state == :game_over
raise GameHasNotStartedError unless @state == :waiting_for_player_to_move
raise NotPlayersTurnError unless player == current_player
raise PlayerDoesNotHaveThatCardError unless player.has_card?(card_played)
raise InvalidMoveError unless Rules.card_can_be_played?(card_played, discard_pile)
# Take the card from the player and put it on top of the discard pile
@discard_pile.push current_player.take_card_from_hand(card_played)
@state = :game_over if current_player.hand.size == 0
# Apply any special actions the card demands
@players = @players.reverse if Rules.play_is_reversed?(card_played, @players.count)
2.times {next_player.put_card_in_hand @draw_pile.pop} if Rules.next_player_must_draw_two?(card_played)
skip_next_player if Rules.next_player_is_skipped?(card_played, @players.count)
move_to_next_player
end
def skip(player)
raise NotPlayersTurnError if player != current_player
current_player.put_card_in_hand @draw_pile.pop
move_to_next_player
end
private
def skip_next_player
move_to_next_player
end
def move_to_next_player
@current_player_index = ((@current_player_index + 1) % @players.length)
end
end
end
Refactored some commonly used code into its own method
module Uno
class Game
attr_reader :state
attr_reader :discard_pile
attr_reader :draw_pile
attr_reader :players
def current_player
@players[@current_player_index]
end
def next_player
@players[next_player_index]
end
def initialize
@state = :waiting_to_start
@deck = Deck.generate
@players = []
end
def start
raise GameHasStartedError unless @state == :waiting_to_start
raise NotEnoughPlayersError unless players.count >= 2
@deck.shuffle!
@discard_pile = [@deck.pop]
@draw_pile = @deck
@players.shuffle
@current_player_index = 0
@players.each do |player|
player.empty_hand!
7.times do
player.put_card_in_hand @draw_pile.pop
end
end
@state = :waiting_for_player_to_move
end
def add_player(player)
raise GameIsOverError if @state == :game_over
raise GameHasStartedError unless @state == :waiting_to_start
@players << player unless players.include?(player)
end
def play(player, card_played)
raise GameIsOverError if @state == :game_over
raise GameHasNotStartedError unless @state == :waiting_for_player_to_move
raise NotPlayersTurnError unless player == current_player
raise PlayerDoesNotHaveThatCardError unless player.has_card?(card_played)
raise InvalidMoveError unless Rules.card_can_be_played?(card_played, discard_pile)
# Take the card from the player and put it on top of the discard pile
@discard_pile.push current_player.take_card_from_hand(card_played)
@state = :game_over if current_player.hand.size == 0
# Apply any special actions the card demands
@players = @players.reverse if Rules.play_is_reversed?(card_played, @players.count)
2.times {next_player.put_card_in_hand @draw_pile.pop} if Rules.next_player_must_draw_two?(card_played)
skip_next_player if Rules.next_player_is_skipped?(card_played, @players.count)
move_to_next_player
end
def skip(player)
raise NotPlayersTurnError if player != current_player
current_player.put_card_in_hand @draw_pile.pop
move_to_next_player
end
private
def skip_next_player
move_to_next_player
end
def move_to_next_player
@current_player_index = next_player_index
end
def next_player_index
(@current_player_index + 1) % players.count
end
end
end |
require 'faraday'
require 'faraday_middleware'
require_relative 'error'
require_relative 'version'
Dir[File.expand_path('../resources/*.rb', __FILE__)].each { |f| require f }
module Closeio
class Client
include Closeio::Client::Activity
include Closeio::Client::BulkAction
include Closeio::Client::Contact
include Closeio::Client::CustomField
include Closeio::Client::EmailAccount
include Closeio::Client::EmailTemplate
include Closeio::Client::Event
include Closeio::Client::IntegrationLink
include Closeio::Client::Lead
include Closeio::Client::LeadStatus
include Closeio::Client::Opportunity
include Closeio::Client::OpportunityStatus
include Closeio::Client::Organization
include Closeio::Client::Report
include Closeio::Client::Sequence
include Closeio::Client::SequenceSchedule
include Closeio::Client::SequenceSubscription
include Closeio::Client::SmartView
include Closeio::Client::Task
include Closeio::Client::User
include Closeio::Client::Webhook
attr_reader :api_key, :logger, :ca_file, :errors, :utc_offset
def initialize(api_key, logger = true, ca_file = nil, errors = false, utc_offset: 0)
@api_key = api_key
@logger = logger
@ca_file = ca_file
@errors = errors
@utc_offset = utc_offset
end
def get(path, options = {})
connection.get(path, options).body
end
def post(path, req_body)
connection.post do |req|
req.url(path)
req.body = req_body
end.body
end
def put(path, options = {})
connection.put(path, options).body
end
def delete(path, options = {})
connection.delete(path, options).body
end
def paginate(path, options = {})
results = []
skip = 0
begin
res = get(path, options.merge!(_skip: skip))
unless res['data'].nil? || res['data'].empty?
results.push res['data']
skip += res['data'].count
end
end while res['has_more']
{ has_more: false, total_results: res['total_results'], data: results.flatten }
end
private
def assemble_list_query(query, options)
options[:query] = if query.respond_to? :map
query.map { |k, v| "#{k}:\"#{v}\"" }.join(' ')
else
query
end
options
end
def connection
Faraday.new(
url: 'https://api.close.com/api/v1',
headers: {
accept: 'application/json',
'User-Agent' => "closeio-ruby-gem/v#{Closeio::VERSION}",
'X-TZ-Offset' => utc_offset.to_s
},
ssl: { ca_file: ca_file }
) do |conn|
conn.basic_auth api_key, ''
conn.request :json
conn.response :logger if logger
conn.response :json
conn.use FaradayMiddleware::CloseioErrorHandler if errors
conn.adapter Faraday.default_adapter
end
end
end
end
[#66] Fix faraday basic_auth warning in connection
require 'faraday'
require 'faraday_middleware'
require_relative 'error'
require_relative 'version'
Dir[File.expand_path('../resources/*.rb', __FILE__)].each { |f| require f }
module Closeio
class Client
include Closeio::Client::Activity
include Closeio::Client::BulkAction
include Closeio::Client::Contact
include Closeio::Client::CustomField
include Closeio::Client::EmailAccount
include Closeio::Client::EmailTemplate
include Closeio::Client::Event
include Closeio::Client::IntegrationLink
include Closeio::Client::Lead
include Closeio::Client::LeadStatus
include Closeio::Client::Opportunity
include Closeio::Client::OpportunityStatus
include Closeio::Client::Organization
include Closeio::Client::Report
include Closeio::Client::Sequence
include Closeio::Client::SequenceSchedule
include Closeio::Client::SequenceSubscription
include Closeio::Client::SmartView
include Closeio::Client::Task
include Closeio::Client::User
include Closeio::Client::Webhook
attr_reader :api_key, :logger, :ca_file, :errors, :utc_offset
def initialize(api_key, logger = true, ca_file = nil, errors = false, utc_offset: 0)
@api_key = api_key
@logger = logger
@ca_file = ca_file
@errors = errors
@utc_offset = utc_offset
end
def get(path, options = {})
connection.get(path, options).body
end
def post(path, req_body)
connection.post do |req|
req.url(path)
req.body = req_body
end.body
end
def put(path, options = {})
connection.put(path, options).body
end
def delete(path, options = {})
connection.delete(path, options).body
end
def paginate(path, options = {})
results = []
skip = 0
begin
res = get(path, options.merge!(_skip: skip))
unless res['data'].nil? || res['data'].empty?
results.push res['data']
skip += res['data'].count
end
end while res['has_more']
{ has_more: false, total_results: res['total_results'], data: results.flatten }
end
private
def assemble_list_query(query, options)
options[:query] = if query.respond_to? :map
query.map { |k, v| "#{k}:\"#{v}\"" }.join(' ')
else
query
end
options
end
def connection
Faraday.new(
url: 'https://api.close.com/api/v1',
headers: {
accept: 'application/json',
'User-Agent' => "closeio-ruby-gem/v#{Closeio::VERSION}",
'X-TZ-Offset' => utc_offset.to_s
},
ssl: { ca_file: ca_file }
) do |conn|
conn.request :basic_auth, api_key, ''
conn.request :json
conn.response :logger if logger
conn.response :json
conn.use FaradayMiddleware::CloseioErrorHandler if errors
conn.adapter Faraday.default_adapter
end
end
end
end
|
require 'thor'
module Jenkins2API
# Command module wraps all the cli commands
module Command
# Contains all the commands under +build+ namespace
class Build < Jenkins2API::ThorCommand
desc 'slave-name JOB_NAME BUILD_ID', 'Get Node name for a specific build'
method_option :ec2id, default: false, type: :boolean
# Displays the name of the slave where the build was executed
def slave_name(name, build_id)
slave_name = client.build.slave_name(name, build_id)
if options[:ec2id]
slave_name = slave_name.match(/(i-[0-9a-zA-Z]+)/)
.captures
.first
end
puts slave_name
end
end
end
end
Added log command to build. (#8)
require 'thor'
module Jenkins2API
# Command module wraps all the cli commands
module Command
# Contains all the commands under +build+ namespace
class Build < Jenkins2API::ThorCommand
desc 'slave-name JOB_NAME BUILD_ID', 'Get Node name for a specific build'
method_option :ec2id, default: false, type: :boolean
# Displays the name of the slave where the build was executed
def slave_name(name, build_id)
slave_name = client.build.slave_name(name, build_id)
if options[:ec2id]
slave_name = slave_name.match(/(i-[0-9a-zA-Z]+)/)
.captures
.first
end
puts slave_name
end
desc 'logs JOB_NAME BUILD_ID', 'Get the logs for a specific build'
# Retrieve logs for a specific job and join them by newline
def logs(name, build_id)
puts client.build.logtext_lines(name, build_id).join("\n")
end
end
end
end
|
require_relative 'configurations/error'
require_relative 'configurations/configuration'
require_relative 'configurations/configurable'
# Configurations provides a unified approach to do configurations with the flexibility to do everything
# from arbitrary configurations to type asserted configurations for your gem or any other ruby code.
# @version 1.0.0
# @author Beat Richartz
#
module Configurations
extend Configurable
# Version number of Configurations
#
VERSION = '1.4.0'
end
version bump
require_relative 'configurations/error'
require_relative 'configurations/configuration'
require_relative 'configurations/configurable'
# Configurations provides a unified approach to do configurations with the flexibility to do everything
# from arbitrary configurations to type asserted configurations for your gem or any other ruby code.
# @version 1.0.0
# @author Beat Richartz
#
module Configurations
extend Configurable
# Version number of Configurations
#
VERSION = '2.0.0.pre'
end
|
## lib/trollop.rb -- trollop command-line processing library
## Author:: William Morgan (mailto: wmorgan-trollop@masanjin.net)
## Copyright:: Copyright 2007 William Morgan
## License:: the same terms as ruby itself
require 'date'
module Trollop
VERSION = "2.0"
## Thrown by Parser in the event of a commandline error. Not needed if
## you're using the Trollop::options entry.
class CommandlineError < StandardError;
end
## Thrown by Parser if the user passes in '-h' or '--help'. Handled
## automatically by Trollop#options.
class HelpNeeded < StandardError;
end
## Thrown by Parser if the user passes in '-h' or '--version'. Handled
## automatically by Trollop#options.
class VersionNeeded < StandardError;
end
## Regex for floating point numbers
FLOAT_RE = /^-?((\d+(\.\d+)?)|(\.\d+))([eE][-+]?[\d]+)?$/
## Regex for parameters
PARAM_RE = /^-(-|\.$|[^\d\.])/
## The commandline parser. In typical usage, the methods in this class
## will be handled internally by Trollop::options. In this case, only the
## #opt, #banner and #version, #depends, and #conflicts methods will
## typically be called.
##
## If you want to instantiate this class yourself (for more complicated
## argument-parsing logic), call #parse to actually produce the output hash,
## and consider calling it from within
## Trollop::with_standard_exception_handling.
class Parser
## The set of values that indicate a flag option when passed as the
## +:type+ parameter of #opt.
FLAG_TYPES = [:flag, :bool, :boolean]
## The set of values that indicate a single-parameter (normal) option when
## passed as the +:type+ parameter of #opt.
##
## A value of +io+ corresponds to a readable IO resource, including
## a filename, URI, or the strings 'stdin' or '-'.
SINGLE_ARG_TYPES = [:int, :integer, :string, :double, :float, :io, :date]
## The set of values that indicate a multiple-parameter option (i.e., that
## takes multiple space-separated values on the commandline) when passed as
## the +:type+ parameter of #opt.
MULTI_ARG_TYPES = [:ints, :integers, :strings, :doubles, :floats, :ios, :dates]
## The complete set of legal values for the +:type+ parameter of #opt.
TYPES = FLAG_TYPES + SINGLE_ARG_TYPES + MULTI_ARG_TYPES
INVALID_SHORT_ARG_REGEX = /[\d-]/ #:nodoc:
## The values from the commandline that were not interpreted by #parse.
attr_reader :leftovers
## The complete configuration hashes for each option. (Mainly useful
## for testing.)
attr_reader :specs
attr_reader :order
## Initializes the parser, and instance-evaluates any block given.
def initialize *a, &b
@version = nil
@leftovers = []
@specs = {}
@long = {}
@short = {}
@order = []
@constraints = []
@stop_words = []
@stop_on_unknown = false
@help_formatter = nil
#instance_eval(&b) if b # can't take arguments
cloaker(&b).bind(self).call(*a) if b
end
## Define an option. +name+ is the option name, a unique identifier
## for the option that you will use internally, which should be a
## symbol or a string. +desc+ is a string description which will be
## displayed in help messages.
##
## Takes the following optional arguments:
##
## [+:long+] Specify the long form of the argument, i.e. the form with two dashes. If unspecified, will be automatically derived based on the argument name by turning the +name+ option into a string, and replacing any _'s by -'s.
## [+:short+] Specify the short form of the argument, i.e. the form with one dash. If unspecified, will be automatically derived from +name+.
## [+:type+] Require that the argument take a parameter or parameters of type +type+. For a single parameter, the value can be a member of +SINGLE_ARG_TYPES+, or a corresponding Ruby class (e.g. +Integer+ for +:int+). For multiple-argument parameters, the value can be any member of +MULTI_ARG_TYPES+ constant. If unset, the default argument type is +:flag+, meaning that the argument does not take a parameter. The specification of +:type+ is not necessary if a +:default+ is given.
## [+:default+] Set the default value for an argument. Without a default value, the hash returned by #parse (and thus Trollop::options) will have a +nil+ value for this key unless the argument is given on the commandline. The argument type is derived automatically from the class of the default value given, so specifying a +:type+ is not necessary if a +:default+ is given. (But see below for an important caveat when +:multi+: is specified too.) If the argument is a flag, and the default is set to +true+, then if it is specified on the the commandline the value will be +false+.
## [+:required+] If set to +true+, the argument must be provided on the commandline.
## [+:multi+] If set to +true+, allows multiple occurrences of the option on the commandline. Otherwise, only a single instance of the option is allowed. (Note that this is different from taking multiple parameters. See below.)
##
## Note that there are two types of argument multiplicity: an argument
## can take multiple values, e.g. "--arg 1 2 3". An argument can also
## be allowed to occur multiple times, e.g. "--arg 1 --arg 2".
##
## Arguments that take multiple values should have a +:type+ parameter
## drawn from +MULTI_ARG_TYPES+ (e.g. +:strings+), or a +:default:+
## value of an array of the correct type (e.g. [String]). The
## value of this argument will be an array of the parameters on the
## commandline.
##
## Arguments that can occur multiple times should be marked with
## +:multi+ => +true+. The value of this argument will also be an array.
## In contrast with regular non-multi options, if not specified on
## the commandline, the default value will be [], not nil.
##
## These two attributes can be combined (e.g. +:type+ => +:strings+,
## +:multi+ => +true+), in which case the value of the argument will be
## an array of arrays.
##
## There's one ambiguous case to be aware of: when +:multi+: is true and a
## +:default+ is set to an array (of something), it's ambiguous whether this
## is a multi-value argument as well as a multi-occurrence argument.
## In thise case, Trollop assumes that it's not a multi-value argument.
## If you want a multi-value, multi-occurrence argument with a default
## value, you must specify +:type+ as well.
def opt name, desc="", opts={}
raise ArgumentError, "you already have an argument named '#{name}'" if @specs.member? name
## fill in :type
opts[:type] = # normalize
case opts[:type]
when :boolean, :bool;
:flag
when :integer;
:int
when :integers;
:ints
when :double;
:float
when :doubles;
:floats
when Class
case opts[:type].name
when 'TrueClass', 'FalseClass';
:flag
when 'String';
:string
when 'Integer';
:int
when 'Float';
:float
when 'IO';
:io
when 'Date';
:date
else
raise ArgumentError, "unsupported argument type '#{opts[:type].class.name}'"
end
when nil;
nil
else
raise ArgumentError, "unsupported argument type '#{opts[:type]}'" unless TYPES.include?(opts[:type])
opts[:type]
end
## for options with :multi => true, an array default doesn't imply
## a multi-valued argument. for that you have to specify a :type
## as well. (this is how we disambiguate an ambiguous situation;
## see the docs for Parser#opt for details.)
disambiguated_default = if opts[:multi] && opts[:default].is_a?(Array) && !opts[:type]
opts[:default].first
else
opts[:default]
end
type_from_default =
case disambiguated_default
when Integer;
:int
when Numeric;
:float
when TrueClass, FalseClass;
:flag
when String;
:string
when IO;
:io
when Date;
:date
when Array
if opts[:default].empty?
raise ArgumentError, "multiple argument type cannot be deduced from an empty array for '#{opts[:default][0].class.name}'"
end
case opts[:default][0] # the first element determines the types
when Integer;
:ints
when Numeric;
:floats
when String;
:strings
when IO;
:ios
when Date;
:dates
else
raise ArgumentError, "unsupported multiple argument type '#{opts[:default][0].class.name}'"
end
when nil;
nil
else
raise ArgumentError, "unsupported argument type '#{opts[:default].class.name}'"
end
raise ArgumentError, ":type specification and default type don't match (default type is #{type_from_default})" if opts[:type] && type_from_default && opts[:type] != type_from_default
opts[:type] = opts[:type] || type_from_default || :flag
## fill in :long
opts[:long] = opts[:long] ? opts[:long].to_s : name.to_s.gsub("_", "-")
opts[:long] = case opts[:long]
when /^--([^-].*)$/;
$1
when /^[^-]/;
opts[:long]
else
; raise ArgumentError, "invalid long option name #{opts[:long].inspect}"
end
raise ArgumentError, "long option name #{opts[:long].inspect} is already taken; please specify a (different) :long" if @long[opts[:long]]
## fill in :short
opts[:short] = opts[:short].to_s if opts[:short] unless opts[:short] == :none
opts[:short] = case opts[:short]
when /^-(.)$/;
$1
when nil, :none, /^.$/;
opts[:short]
else
raise ArgumentError, "invalid short option name '#{opts[:short].inspect}'"
end
if opts[:short]
raise ArgumentError, "short option name #{opts[:short].inspect} is already taken; please specify a (different) :short" if @short[opts[:short]]
raise ArgumentError, "a short option name can't be a number or a dash" if opts[:short] =~ INVALID_SHORT_ARG_REGEX
end
## fill in :default for flags
opts[:default] = false if opts[:type] == :flag && opts[:default].nil?
## autobox :default for :multi (multi-occurrence) arguments
opts[:default] = [opts[:default]] if opts[:default] && opts[:multi] && !opts[:default].is_a?(Array)
## fill in :multi
opts[:multi] ||= false
opts[:desc] ||= desc
@long[opts[:long]] = name
@short[opts[:short]] = name if opts[:short] && opts[:short] != :none
@specs[name] = opts
@order << [:opt, name]
end
## Sets the version string. If set, the user can request the version
## on the commandline. Should probably be of the form "<program name>
## <version number>".
def version s=nil;
@version = s if s; @version
end
## Adds text to the help display. Can be interspersed with calls to
## #opt to build a multi-section help page.
def banner s;
@order << [:text, s]
end
alias :text :banner
def help_formatter(formatter)
@help_formatter = formatter
end
def current_help_formatter
@help_formatter
end
## Marks two (or more!) options as requiring each other. Only handles
## undirected (i.e., mutual) dependencies. Directed dependencies are
## better modeled with Trollop::die.
def depends *syms
syms.each { |sym| raise ArgumentError, "unknown option '#{sym}'" unless @specs[sym] }
@constraints << [:depends, syms]
end
## Marks two (or more!) options as conflicting.
def conflicts *syms
syms.each { |sym| raise ArgumentError, "unknown option '#{sym}'" unless @specs[sym] }
@constraints << [:conflicts, syms]
end
## Defines a set of words which cause parsing to terminate when
## encountered, such that any options to the left of the word are
## parsed as usual, and options to the right of the word are left
## intact.
##
## A typical use case would be for subcommand support, where these
## would be set to the list of subcommands. A subsequent Trollop
## invocation would then be used to parse subcommand options, after
## shifting the subcommand off of ARGV.
def stop_on *words
@stop_words = [*words].flatten
end
## Similar to #stop_on, but stops on any unknown word when encountered
## (unless it is a parameter for an argument). This is useful for
## cases where you don't know the set of subcommands ahead of time,
## i.e., without first parsing the global options.
def stop_on_unknown
@stop_on_unknown = true
end
## Parses the commandline. Typically called by Trollop::options,
## but you can call it directly if you need more control.
##
## throws CommandlineError, HelpNeeded, and VersionNeeded exceptions.
def parse cmdline=ARGV
vals = {}
required = {}
opt :version, 'Prints version and exits' if @version unless @specs[:version] || @long['version']
opt :help, "\x1B[38;5;222mShows help message for current command\x1B[0m" unless @specs[:help] || @long['help']
@specs.each do |sym, opts|
required[sym] = true if opts[:required]
vals[sym] = opts[:default]
vals[sym] = [] if opts[:multi] && !opts[:default] # multi arguments default to [], not nil
end
resolve_default_short_options!
## resolve symbols
given_args = {}
@leftovers = each_arg cmdline do |arg, params|
## handle --no- forms
arg, negative_given = if arg =~ /^--no-([^-]\S*)$/
["--#{$1}", true]
else
[arg, false]
end
sym = case arg
when /^-([^-])$/;
@short[$1]
when /^--([^-]\S*)$/;
@long[$1] || @long["no-#{$1}"]
else
# raise CommandlineError, "invalid argument syntax: '#{arg}'"
puts "\n \x1B[48;5;196m ERROR \x1B[0m \xe2\x80\x94 Invalid argument syntax: #{arg}\n\n"
exit
end
sym = nil if arg =~ /--no-/ # explicitly invalidate --no-no- arguments
# raise CommandlineError, "unknown argument '#{arg}'" unless sym
unless sym
puts "\n \x1B[48;5;196m ERROR \x1B[0m \xe2\x80\x94 Unknown flag: #{arg}\n\n"
exit
end
if given_args.include?(sym) && !@specs[sym][:multi]
# raise CommandlineError, "option '#{arg}' specified multiple times"
puts "\n \x1B[48;5;196m ERROR \x1B[0m \xe2\x80\x94 Flag specified multiple times: #{arg}\n\n"
exit
end
given_args[sym] ||= {}
given_args[sym][:arg] = arg
given_args[sym][:negative_given] = negative_given
given_args[sym][:params] ||= []
# The block returns the number of parameters taken.
num_params_taken = 0
unless params.nil?
if SINGLE_ARG_TYPES.include?(@specs[sym][:type])
given_args[sym][:params] << params[0, 1] # take the first parameter
num_params_taken = 1
elsif MULTI_ARG_TYPES.include?(@specs[sym][:type])
given_args[sym][:params] << params # take all the parameters
num_params_taken = params.size
end
end
num_params_taken
end
## check for version and help args
raise VersionNeeded if given_args.include? :version
raise HelpNeeded if given_args.include? :help
## check constraint satisfaction
@constraints.each do |type, syms|
constraint_sym = syms.find { |sym| given_args[sym] }
next unless constraint_sym
case type
when :depends
syms.each { |sym| raise CommandlineError, "--#{@specs[constraint_sym][:long]} requires --#{@specs[sym][:long]}" unless given_args.include? sym }
when :conflicts
syms.each { |sym| raise CommandlineError, "--#{@specs[constraint_sym][:long]} conflicts with --#{@specs[sym][:long]}" if given_args.include?(sym) && (sym != constraint_sym) }
end
end
required.each do |sym, val|
raise CommandlineError, "option --#{@specs[sym][:long]} must be specified" unless given_args.include? sym
end
## parse parameters
given_args.each do |sym, given_data|
arg, params, negative_given = given_data.values_at :arg, :params, :negative_given
opts = @specs[sym]
raise CommandlineError, "option '#{arg}' needs a parameter" if params.empty? && opts[:type] != :flag
vals["#{sym}_given".intern] = true # mark argument as specified on the commandline
case opts[:type]
when :flag
vals[sym] = (sym.to_s =~ /^no_/ ? negative_given : !negative_given)
when :int, :ints
vals[sym] = params.map { |pg| pg.map { |p| parse_integer_parameter p, arg } }
when :float, :floats
vals[sym] = params.map { |pg| pg.map { |p| parse_float_parameter p, arg } }
when :string, :strings
vals[sym] = params.map { |pg| pg.map { |p| p.to_s } }
when :io, :ios
vals[sym] = params.map { |pg| pg.map { |p| parse_io_parameter p, arg } }
when :date, :dates
vals[sym] = params.map { |pg| pg.map { |p| parse_date_parameter p, arg } }
end
if SINGLE_ARG_TYPES.include?(opts[:type])
unless opts[:multi] # single parameter
vals[sym] = vals[sym][0][0]
else # multiple options, each with a single parameter
vals[sym] = vals[sym].map { |p| p[0] }
end
elsif MULTI_ARG_TYPES.include?(opts[:type]) && !opts[:multi]
vals[sym] = vals[sym][0] # single option, with multiple parameters
end
# else: multiple options, with multiple parameters
end
## modify input in place with only those
## arguments we didn't process
cmdline.clear
@leftovers.each { |l| cmdline << l }
## allow openstruct-style accessors
class << vals
def method_missing(m, *args)
self[m] || self[m.to_s]
end
end
vals
end
def parse_date_parameter param, arg #:nodoc:
begin
begin
time = Chronic.parse(param)
rescue NameError
# chronic is not available
end
time ? Date.new(time.year, time.month, time.day) : Date.parse(param)
rescue ArgumentError
raise CommandlineError, "option '#{arg}' needs a date"
end
end
## Print the help message to +stream+.
def educate stream=$stdout
width # hack: calculate it now; otherwise we have to be careful not to
# call this unless the cursor's at the beginning of a line.
left = {}
@specs.each do |name, spec|
left[name] = "--#{spec[:long]}" +
(spec[:type] == :flag && spec[:default] ? ", --no-#{spec[:long]}" : "") +
(spec[:short] && spec[:short] != :none ? ", -#{spec[:short]}" : "") +
case spec[:type]
when :flag;
""
when :int;
" <i>"
when :ints;
" <i+>"
when :string;
" <s>"
when :strings;
" <s+>"
when :float;
" <f>"
when :floats;
" <f+>"
when :io;
" <filename/uri>"
when :ios;
" <filename/uri+>"
when :date;
" <date>"
when :dates;
" <date+>"
end
end
leftcol_width = left.values.map { |s| s.length }.max || 0
rightcol_start = leftcol_width + 6 # spaces
unless @order.size > 0 && @order.first.first == :text
stream.puts "#@version\n" if @version
stream.puts "Options:"
end
@order.each do |what, opt|
if what == :text
stream.puts wrap(opt)
next
end
spec = @specs[opt]
stream.printf " %#{leftcol_width}s: ", left[opt]
desc = spec[:desc] + begin
default_s = case spec[:default]
when $stdout;
"<stdout>"
when $stdin;
"<stdin>"
when $stderr;
"<stderr>"
when Array
spec[:default].join(", ")
else
spec[:default].to_s
end
if spec[:default]
if spec[:desc] =~ /\.$/
" (Default: #{default_s})"
else
" (default: #{default_s})"
end
else
""
end
end
stream.puts wrap(desc, :width => width - rightcol_start - 1, :prefix => rightcol_start)
end
end
def width #:nodoc:
@width ||= if $stdout.tty?
begin
require 'curses'
Curses::init_screen
x = Curses::cols
Curses::close_screen
x
rescue Exception
80
end
else
80
end
end
def wrap str, opts={} # :nodoc:
if str == ""
[""]
else
str.split("\n").map { |s| wrap_line s, opts }.flatten
end
end
## The per-parser version of Trollop::die (see that for documentation).
def die arg, msg
if msg
$stderr.puts "Error: argument --#{@specs[arg][:long]} #{msg}."
else
$stderr.puts "Error: #{arg}."
end
$stderr.puts "Try --help for help."
exit(1)
end
private
## yield successive arg, parameter pairs
def each_arg args
remains = []
i = 0
until i >= args.length
if @stop_words.member? args[i]
remains += args[i .. -1]
return remains
end
case args[i]
when /^--$/ # arg terminator
remains += args[(i + 1) .. -1]
return remains
when /^--(\S+?)=(.*)$/ # long argument with equals
yield "--#{$1}", [$2]
i += 1
when /^--(\S+)$/ # long argument
params = collect_argument_parameters(args, i + 1)
unless params.empty?
num_params_taken = yield args[i], params
unless num_params_taken
if @stop_on_unknown
remains += args[i + 1 .. -1]
return remains
else
remains += params
end
end
i += 1 + num_params_taken
else # long argument no parameter
yield args[i], nil
i += 1
end
when /^-(\S+)$/ # one or more short arguments
shortargs = $1.split(//)
shortargs.each_with_index do |a, j|
if j == (shortargs.length - 1)
params = collect_argument_parameters(args, i + 1)
unless params.empty?
num_params_taken = yield "-#{a}", params
unless num_params_taken
if @stop_on_unknown
remains += args[i + 1 .. -1]
return remains
else
remains += params
end
end
i += 1 + num_params_taken
else # argument no parameter
yield "-#{a}", nil
i += 1
end
else
yield "-#{a}", nil
end
end
else
if @stop_on_unknown
remains += args[i .. -1]
return remains
else
remains << args[i]
i += 1
end
end
end
remains
end
def parse_integer_parameter param, arg
raise CommandlineError, "option '#{arg}' needs an integer" unless param =~ /^\d+$/
param.to_i
end
def parse_float_parameter param, arg
raise CommandlineError, "option '#{arg}' needs a floating-point number" unless param =~ FLOAT_RE
param.to_f
end
def parse_io_parameter param, arg
case param
when /^(stdin|-)$/i;
$stdin
else
require 'open-uri'
begin
open param
rescue SystemCallError => e
raise CommandlineError, "file or url for option '#{arg}' cannot be opened: #{e.message}"
end
end
end
def collect_argument_parameters args, start_at
params = []
pos = start_at
while args[pos] && args[pos] !~ PARAM_RE && !@stop_words.member?(args[pos]) do
params << args[pos]
pos += 1
end
params
end
def resolve_default_short_options!
@order.each do |type, name|
next unless type == :opt
opts = @specs[name]
next if opts[:short]
c = opts[:long].split(//).find { |d| d !~ INVALID_SHORT_ARG_REGEX && !@short.member?(d) }
if c # found a character to use
opts[:short] = c
@short[c] = name
end
end
end
def wrap_line str, opts={}
prefix = opts[:prefix] || 0
width = opts[:width] || (self.width - 1)
start = 0
ret = []
until start > str.length
nextt =
if start + width >= str.length
str.length
else
x = str.rindex(/\s/, start + width)
x = str.index(/\s/, start) if x && x < start
x || str.length
end
ret << (ret.empty? ? "" : " " * prefix) + str[start ... nextt]
start = nextt + 1
end
ret
end
## instance_eval but with ability to handle block arguments
## thanks to _why: http://redhanded.hobix.com/inspect/aBlockCostume.html
def cloaker &b
(
class << self;
self;
end).class_eval do
define_method :cloaker_, &b
meth = instance_method :cloaker_
remove_method :cloaker_
meth
end
end
end
## The easy, syntactic-sugary entry method into Trollop. Creates a Parser,
## passes the block to it, then parses +args+ with it, handling any errors or
## requests for help or version information appropriately (and then exiting).
## Modifies +args+ in place. Returns a hash of option values.
##
## The block passed in should contain zero or more calls to +opt+
## (Parser#opt), zero or more calls to +text+ (Parser#text), and
## probably a call to +version+ (Parser#version).
##
## The returned block contains a value for every option specified with
## +opt+. The value will be the value given on the commandline, or the
## default value if the option was not specified on the commandline. For
## every option specified on the commandline, a key "<option
## name>_given" will also be set in the hash.
##
## Example:
##
## require 'trollop'
## opts = Trollop::options do
## opt :monkey, "Use monkey mode" # a flag --monkey, defaulting to false
## opt :name, "Monkey name", :type => :string # a string --name <s>, defaulting to nil
## opt :num_limbs, "Number of limbs", :default => 4 # an integer --num-limbs <i>, defaulting to 4
## end
##
## ## if called with no arguments
## p opts # => {:monkey=>false, :name=>nil, :num_limbs=>4, :help=>false}
##
## ## if called with --monkey
## p opts # => {:monkey=>true, :name=>nil, :num_limbs=>4, :help=>false, :monkey_given=>true}
##
## See more examples at http://trollop.rubyforge.org.
def options args=ARGV, *a, &b
@last_parser = Parser.new(*a, &b)
with_standard_exception_handling(@last_parser) { @last_parser.parse args }
end
## If Trollop::options doesn't do quite what you want, you can create a Parser
## object and call Parser#parse on it. That method will throw CommandlineError,
## HelpNeeded and VersionNeeded exceptions when necessary; if you want to
## have these handled for you in the standard manner (e.g. show the help
## and then exit upon an HelpNeeded exception), call your code from within
## a block passed to this method.
##
## Note that this method will call System#exit after handling an exception!
##
## Usage example:
##
## require 'trollop'
## p = Trollop::Parser.new do
## opt :monkey, "Use monkey mode" # a flag --monkey, defaulting to false
## opt :goat, "Use goat mode", :default => true # a flag --goat, defaulting to true
## end
##
## opts = Trollop::with_standard_exception_handling p do
## o = p.parse ARGV
## raise Trollop::HelpNeeded if ARGV.empty? # show help screen
## o
## end
##
## Requires passing in the parser object.
def with_standard_exception_handling(parser)
begin
yield
rescue CommandlineError => e
raise Convoy::UserError.new(e.message, e)
#$stderr.puts "Error: #{e.message}."
#$stderr.puts "Try --help for help."
#exit(1)
rescue HelpNeeded
parser.current_help_formatter ? parser.current_help_formatter.print(parser) : parser.educate
exit
rescue VersionNeeded
puts parser.version
exit
end
end
## Informs the user that their usage of 'arg' was wrong, as detailed by
## 'msg', and dies. Example:
##
## options do
## opt :volume, :default => 0.0
## end
##
## die :volume, "too loud" if opts[:volume] > 10.0
## die :volume, "too soft" if opts[:volume] < 0.1
##
## In the one-argument case, simply print that message, a notice
## about -h, and die. Example:
##
## options do
## opt :whatever # ...
## end
##
## Trollop::die "need at least one filename" if ARGV.empty?
def die arg, msg=nil
if @last_parser
@last_parser.die arg, msg
else
raise ArgumentError, "Trollop::die can only be called after Trollop::options"
end
end
module_function :options, :die, :with_standard_exception_handling
end # module
Made it possible to use digits as short arguments.
## lib/trollop.rb -- trollop command-line processing library
## Author:: William Morgan (mailto: wmorgan-trollop@masanjin.net)
## Copyright:: Copyright 2007 William Morgan
## License:: the same terms as ruby itself
require 'date'
module Trollop
VERSION = "2.0"
## Thrown by Parser in the event of a commandline error. Not needed if
## you're using the Trollop::options entry.
class CommandlineError < StandardError;
end
## Thrown by Parser if the user passes in '-h' or '--help'. Handled
## automatically by Trollop#options.
class HelpNeeded < StandardError;
end
## Thrown by Parser if the user passes in '-h' or '--version'. Handled
## automatically by Trollop#options.
class VersionNeeded < StandardError;
end
## Regex for floating point numbers
FLOAT_RE = /^-?((\d+(\.\d+)?)|(\.\d+))([eE][-+]?[\d]+)?$/
## Regex for parameters
PARAM_RE = /^-(-|\.$|[^\d\.])/
## The commandline parser. In typical usage, the methods in this class
## will be handled internally by Trollop::options. In this case, only the
## #opt, #banner and #version, #depends, and #conflicts methods will
## typically be called.
##
## If you want to instantiate this class yourself (for more complicated
## argument-parsing logic), call #parse to actually produce the output hash,
## and consider calling it from within
## Trollop::with_standard_exception_handling.
class Parser
## The set of values that indicate a flag option when passed as the
## +:type+ parameter of #opt.
FLAG_TYPES = [:flag, :bool, :boolean]
## The set of values that indicate a single-parameter (normal) option when
## passed as the +:type+ parameter of #opt.
##
## A value of +io+ corresponds to a readable IO resource, including
## a filename, URI, or the strings 'stdin' or '-'.
SINGLE_ARG_TYPES = [:int, :integer, :string, :double, :float, :io, :date]
## The set of values that indicate a multiple-parameter option (i.e., that
## takes multiple space-separated values on the commandline) when passed as
## the +:type+ parameter of #opt.
MULTI_ARG_TYPES = [:ints, :integers, :strings, :doubles, :floats, :ios, :dates]
## The complete set of legal values for the +:type+ parameter of #opt.
TYPES = FLAG_TYPES + SINGLE_ARG_TYPES + MULTI_ARG_TYPES
INVALID_SHORT_ARG_REGEX = /[-]/ #:nodoc:
## The values from the commandline that were not interpreted by #parse.
attr_reader :leftovers
## The complete configuration hashes for each option. (Mainly useful
## for testing.)
attr_reader :specs
attr_reader :order
## Initializes the parser, and instance-evaluates any block given.
def initialize *a, &b
@version = nil
@leftovers = []
@specs = {}
@long = {}
@short = {}
@order = []
@constraints = []
@stop_words = []
@stop_on_unknown = false
@help_formatter = nil
#instance_eval(&b) if b # can't take arguments
cloaker(&b).bind(self).call(*a) if b
end
## Define an option. +name+ is the option name, a unique identifier
## for the option that you will use internally, which should be a
## symbol or a string. +desc+ is a string description which will be
## displayed in help messages.
##
## Takes the following optional arguments:
##
## [+:long+] Specify the long form of the argument, i.e. the form with two dashes. If unspecified, will be automatically derived based on the argument name by turning the +name+ option into a string, and replacing any _'s by -'s.
## [+:short+] Specify the short form of the argument, i.e. the form with one dash. If unspecified, will be automatically derived from +name+.
## [+:type+] Require that the argument take a parameter or parameters of type +type+. For a single parameter, the value can be a member of +SINGLE_ARG_TYPES+, or a corresponding Ruby class (e.g. +Integer+ for +:int+). For multiple-argument parameters, the value can be any member of +MULTI_ARG_TYPES+ constant. If unset, the default argument type is +:flag+, meaning that the argument does not take a parameter. The specification of +:type+ is not necessary if a +:default+ is given.
## [+:default+] Set the default value for an argument. Without a default value, the hash returned by #parse (and thus Trollop::options) will have a +nil+ value for this key unless the argument is given on the commandline. The argument type is derived automatically from the class of the default value given, so specifying a +:type+ is not necessary if a +:default+ is given. (But see below for an important caveat when +:multi+: is specified too.) If the argument is a flag, and the default is set to +true+, then if it is specified on the the commandline the value will be +false+.
## [+:required+] If set to +true+, the argument must be provided on the commandline.
## [+:multi+] If set to +true+, allows multiple occurrences of the option on the commandline. Otherwise, only a single instance of the option is allowed. (Note that this is different from taking multiple parameters. See below.)
##
## Note that there are two types of argument multiplicity: an argument
## can take multiple values, e.g. "--arg 1 2 3". An argument can also
## be allowed to occur multiple times, e.g. "--arg 1 --arg 2".
##
## Arguments that take multiple values should have a +:type+ parameter
## drawn from +MULTI_ARG_TYPES+ (e.g. +:strings+), or a +:default:+
## value of an array of the correct type (e.g. [String]). The
## value of this argument will be an array of the parameters on the
## commandline.
##
## Arguments that can occur multiple times should be marked with
## +:multi+ => +true+. The value of this argument will also be an array.
## In contrast with regular non-multi options, if not specified on
## the commandline, the default value will be [], not nil.
##
## These two attributes can be combined (e.g. +:type+ => +:strings+,
## +:multi+ => +true+), in which case the value of the argument will be
## an array of arrays.
##
## There's one ambiguous case to be aware of: when +:multi+: is true and a
## +:default+ is set to an array (of something), it's ambiguous whether this
## is a multi-value argument as well as a multi-occurrence argument.
## In thise case, Trollop assumes that it's not a multi-value argument.
## If you want a multi-value, multi-occurrence argument with a default
## value, you must specify +:type+ as well.
def opt name, desc="", opts={}
raise ArgumentError, "you already have an argument named '#{name}'" if @specs.member? name
## fill in :type
opts[:type] = # normalize
case opts[:type]
when :boolean, :bool;
:flag
when :integer;
:int
when :integers;
:ints
when :double;
:float
when :doubles;
:floats
when Class
case opts[:type].name
when 'TrueClass', 'FalseClass';
:flag
when 'String';
:string
when 'Integer';
:int
when 'Float';
:float
when 'IO';
:io
when 'Date';
:date
else
raise ArgumentError, "unsupported argument type '#{opts[:type].class.name}'"
end
when nil;
nil
else
raise ArgumentError, "unsupported argument type '#{opts[:type]}'" unless TYPES.include?(opts[:type])
opts[:type]
end
## for options with :multi => true, an array default doesn't imply
## a multi-valued argument. for that you have to specify a :type
## as well. (this is how we disambiguate an ambiguous situation;
## see the docs for Parser#opt for details.)
disambiguated_default = if opts[:multi] && opts[:default].is_a?(Array) && !opts[:type]
opts[:default].first
else
opts[:default]
end
type_from_default =
case disambiguated_default
when Integer;
:int
when Numeric;
:float
when TrueClass, FalseClass;
:flag
when String;
:string
when IO;
:io
when Date;
:date
when Array
if opts[:default].empty?
raise ArgumentError, "multiple argument type cannot be deduced from an empty array for '#{opts[:default][0].class.name}'"
end
case opts[:default][0] # the first element determines the types
when Integer;
:ints
when Numeric;
:floats
when String;
:strings
when IO;
:ios
when Date;
:dates
else
raise ArgumentError, "unsupported multiple argument type '#{opts[:default][0].class.name}'"
end
when nil;
nil
else
raise ArgumentError, "unsupported argument type '#{opts[:default].class.name}'"
end
raise ArgumentError, ":type specification and default type don't match (default type is #{type_from_default})" if opts[:type] && type_from_default && opts[:type] != type_from_default
opts[:type] = opts[:type] || type_from_default || :flag
## fill in :long
opts[:long] = opts[:long] ? opts[:long].to_s : name.to_s.gsub("_", "-")
opts[:long] = case opts[:long]
when /^--([^-].*)$/;
$1
when /^[^-]/;
opts[:long]
else
; raise ArgumentError, "invalid long option name #{opts[:long].inspect}"
end
raise ArgumentError, "long option name #{opts[:long].inspect} is already taken; please specify a (different) :long" if @long[opts[:long]]
## fill in :short
opts[:short] = opts[:short].to_s if opts[:short] unless opts[:short] == :none
opts[:short] = case opts[:short]
when /^-(.)$/;
$1
when nil, :none, /^.$/;
opts[:short]
else
raise ArgumentError, "invalid short option name '#{opts[:short].inspect}'"
end
if opts[:short]
raise ArgumentError, "short option name #{opts[:short].inspect} is already taken; please specify a (different) :short" if @short[opts[:short]]
raise ArgumentError, "a short option name can't be a number or a dash" if opts[:short] =~ INVALID_SHORT_ARG_REGEX
end
## fill in :default for flags
opts[:default] = false if opts[:type] == :flag && opts[:default].nil?
## autobox :default for :multi (multi-occurrence) arguments
opts[:default] = [opts[:default]] if opts[:default] && opts[:multi] && !opts[:default].is_a?(Array)
## fill in :multi
opts[:multi] ||= false
opts[:desc] ||= desc
@long[opts[:long]] = name
@short[opts[:short]] = name if opts[:short] && opts[:short] != :none
@specs[name] = opts
@order << [:opt, name]
end
## Sets the version string. If set, the user can request the version
## on the commandline. Should probably be of the form "<program name>
## <version number>".
def version s=nil;
@version = s if s; @version
end
## Adds text to the help display. Can be interspersed with calls to
## #opt to build a multi-section help page.
def banner s;
@order << [:text, s]
end
alias :text :banner
def help_formatter(formatter)
@help_formatter = formatter
end
def current_help_formatter
@help_formatter
end
## Marks two (or more!) options as requiring each other. Only handles
## undirected (i.e., mutual) dependencies. Directed dependencies are
## better modeled with Trollop::die.
def depends *syms
syms.each { |sym| raise ArgumentError, "unknown option '#{sym}'" unless @specs[sym] }
@constraints << [:depends, syms]
end
## Marks two (or more!) options as conflicting.
def conflicts *syms
syms.each { |sym| raise ArgumentError, "unknown option '#{sym}'" unless @specs[sym] }
@constraints << [:conflicts, syms]
end
## Defines a set of words which cause parsing to terminate when
## encountered, such that any options to the left of the word are
## parsed as usual, and options to the right of the word are left
## intact.
##
## A typical use case would be for subcommand support, where these
## would be set to the list of subcommands. A subsequent Trollop
## invocation would then be used to parse subcommand options, after
## shifting the subcommand off of ARGV.
def stop_on *words
@stop_words = [*words].flatten
end
## Similar to #stop_on, but stops on any unknown word when encountered
## (unless it is a parameter for an argument). This is useful for
## cases where you don't know the set of subcommands ahead of time,
## i.e., without first parsing the global options.
def stop_on_unknown
@stop_on_unknown = true
end
## Parses the commandline. Typically called by Trollop::options,
## but you can call it directly if you need more control.
##
## throws CommandlineError, HelpNeeded, and VersionNeeded exceptions.
def parse cmdline=ARGV
vals = {}
required = {}
opt :version, 'Prints version and exits' if @version unless @specs[:version] || @long['version']
opt :help, "\x1B[38;5;222mShows help message for current command\x1B[0m" unless @specs[:help] || @long['help']
@specs.each do |sym, opts|
required[sym] = true if opts[:required]
vals[sym] = opts[:default]
vals[sym] = [] if opts[:multi] && !opts[:default] # multi arguments default to [], not nil
end
resolve_default_short_options!
## resolve symbols
given_args = {}
@leftovers = each_arg cmdline do |arg, params|
## handle --no- forms
arg, negative_given = if arg =~ /^--no-([^-]\S*)$/
["--#{$1}", true]
else
[arg, false]
end
sym = case arg
when /^-([^-])$/;
@short[$1]
when /^--([^-]\S*)$/;
@long[$1] || @long["no-#{$1}"]
else
# raise CommandlineError, "invalid argument syntax: '#{arg}'"
puts "\n \x1B[48;5;196m ERROR \x1B[0m \xe2\x80\x94 Invalid argument syntax: #{arg}\n\n"
exit
end
sym = nil if arg =~ /--no-/ # explicitly invalidate --no-no- arguments
# raise CommandlineError, "unknown argument '#{arg}'" unless sym
unless sym
puts "\n \x1B[48;5;196m ERROR \x1B[0m \xe2\x80\x94 Unknown flag: #{arg}\n\n"
exit
end
if given_args.include?(sym) && !@specs[sym][:multi]
# raise CommandlineError, "option '#{arg}' specified multiple times"
puts "\n \x1B[48;5;196m ERROR \x1B[0m \xe2\x80\x94 Flag specified multiple times: #{arg}\n\n"
exit
end
given_args[sym] ||= {}
given_args[sym][:arg] = arg
given_args[sym][:negative_given] = negative_given
given_args[sym][:params] ||= []
# The block returns the number of parameters taken.
num_params_taken = 0
unless params.nil?
if SINGLE_ARG_TYPES.include?(@specs[sym][:type])
given_args[sym][:params] << params[0, 1] # take the first parameter
num_params_taken = 1
elsif MULTI_ARG_TYPES.include?(@specs[sym][:type])
given_args[sym][:params] << params # take all the parameters
num_params_taken = params.size
end
end
num_params_taken
end
## check for version and help args
raise VersionNeeded if given_args.include? :version
raise HelpNeeded if given_args.include? :help
## check constraint satisfaction
@constraints.each do |type, syms|
constraint_sym = syms.find { |sym| given_args[sym] }
next unless constraint_sym
case type
when :depends
syms.each { |sym| raise CommandlineError, "--#{@specs[constraint_sym][:long]} requires --#{@specs[sym][:long]}" unless given_args.include? sym }
when :conflicts
syms.each { |sym| raise CommandlineError, "--#{@specs[constraint_sym][:long]} conflicts with --#{@specs[sym][:long]}" if given_args.include?(sym) && (sym != constraint_sym) }
end
end
required.each do |sym, val|
raise CommandlineError, "option --#{@specs[sym][:long]} must be specified" unless given_args.include? sym
end
## parse parameters
given_args.each do |sym, given_data|
arg, params, negative_given = given_data.values_at :arg, :params, :negative_given
opts = @specs[sym]
raise CommandlineError, "option '#{arg}' needs a parameter" if params.empty? && opts[:type] != :flag
vals["#{sym}_given".intern] = true # mark argument as specified on the commandline
case opts[:type]
when :flag
vals[sym] = (sym.to_s =~ /^no_/ ? negative_given : !negative_given)
when :int, :ints
vals[sym] = params.map { |pg| pg.map { |p| parse_integer_parameter p, arg } }
when :float, :floats
vals[sym] = params.map { |pg| pg.map { |p| parse_float_parameter p, arg } }
when :string, :strings
vals[sym] = params.map { |pg| pg.map { |p| p.to_s } }
when :io, :ios
vals[sym] = params.map { |pg| pg.map { |p| parse_io_parameter p, arg } }
when :date, :dates
vals[sym] = params.map { |pg| pg.map { |p| parse_date_parameter p, arg } }
end
if SINGLE_ARG_TYPES.include?(opts[:type])
unless opts[:multi] # single parameter
vals[sym] = vals[sym][0][0]
else # multiple options, each with a single parameter
vals[sym] = vals[sym].map { |p| p[0] }
end
elsif MULTI_ARG_TYPES.include?(opts[:type]) && !opts[:multi]
vals[sym] = vals[sym][0] # single option, with multiple parameters
end
# else: multiple options, with multiple parameters
end
## modify input in place with only those
## arguments we didn't process
cmdline.clear
@leftovers.each { |l| cmdline << l }
## allow openstruct-style accessors
class << vals
def method_missing(m, *args)
self[m] || self[m.to_s]
end
end
vals
end
def parse_date_parameter param, arg #:nodoc:
begin
begin
time = Chronic.parse(param)
rescue NameError
# chronic is not available
end
time ? Date.new(time.year, time.month, time.day) : Date.parse(param)
rescue ArgumentError
raise CommandlineError, "option '#{arg}' needs a date"
end
end
## Print the help message to +stream+.
def educate stream=$stdout
width # hack: calculate it now; otherwise we have to be careful not to
# call this unless the cursor's at the beginning of a line.
left = {}
@specs.each do |name, spec|
left[name] = "--#{spec[:long]}" +
(spec[:type] == :flag && spec[:default] ? ", --no-#{spec[:long]}" : "") +
(spec[:short] && spec[:short] != :none ? ", -#{spec[:short]}" : "") +
case spec[:type]
when :flag;
""
when :int;
" <i>"
when :ints;
" <i+>"
when :string;
" <s>"
when :strings;
" <s+>"
when :float;
" <f>"
when :floats;
" <f+>"
when :io;
" <filename/uri>"
when :ios;
" <filename/uri+>"
when :date;
" <date>"
when :dates;
" <date+>"
end
end
leftcol_width = left.values.map { |s| s.length }.max || 0
rightcol_start = leftcol_width + 6 # spaces
unless @order.size > 0 && @order.first.first == :text
stream.puts "#@version\n" if @version
stream.puts "Options:"
end
@order.each do |what, opt|
if what == :text
stream.puts wrap(opt)
next
end
spec = @specs[opt]
stream.printf " %#{leftcol_width}s: ", left[opt]
desc = spec[:desc] + begin
default_s = case spec[:default]
when $stdout;
"<stdout>"
when $stdin;
"<stdin>"
when $stderr;
"<stderr>"
when Array
spec[:default].join(", ")
else
spec[:default].to_s
end
if spec[:default]
if spec[:desc] =~ /\.$/
" (Default: #{default_s})"
else
" (default: #{default_s})"
end
else
""
end
end
stream.puts wrap(desc, :width => width - rightcol_start - 1, :prefix => rightcol_start)
end
end
def width #:nodoc:
@width ||= if $stdout.tty?
begin
require 'curses'
Curses::init_screen
x = Curses::cols
Curses::close_screen
x
rescue Exception
80
end
else
80
end
end
def wrap str, opts={} # :nodoc:
if str == ""
[""]
else
str.split("\n").map { |s| wrap_line s, opts }.flatten
end
end
## The per-parser version of Trollop::die (see that for documentation).
def die arg, msg
if msg
$stderr.puts "Error: argument --#{@specs[arg][:long]} #{msg}."
else
$stderr.puts "Error: #{arg}."
end
$stderr.puts "Try --help for help."
exit(1)
end
private
## yield successive arg, parameter pairs
def each_arg args
remains = []
i = 0
until i >= args.length
if @stop_words.member? args[i]
remains += args[i .. -1]
return remains
end
case args[i]
when /^--$/ # arg terminator
remains += args[(i + 1) .. -1]
return remains
when /^--(\S+?)=(.*)$/ # long argument with equals
yield "--#{$1}", [$2]
i += 1
when /^--(\S+)$/ # long argument
params = collect_argument_parameters(args, i + 1)
unless params.empty?
num_params_taken = yield args[i], params
unless num_params_taken
if @stop_on_unknown
remains += args[i + 1 .. -1]
return remains
else
remains += params
end
end
i += 1 + num_params_taken
else # long argument no parameter
yield args[i], nil
i += 1
end
when /^-(\S+)$/ # one or more short arguments
shortargs = $1.split(//)
shortargs.each_with_index do |a, j|
if j == (shortargs.length - 1)
params = collect_argument_parameters(args, i + 1)
unless params.empty?
num_params_taken = yield "-#{a}", params
unless num_params_taken
if @stop_on_unknown
remains += args[i + 1 .. -1]
return remains
else
remains += params
end
end
i += 1 + num_params_taken
else # argument no parameter
yield "-#{a}", nil
i += 1
end
else
yield "-#{a}", nil
end
end
else
if @stop_on_unknown
remains += args[i .. -1]
return remains
else
remains << args[i]
i += 1
end
end
end
remains
end
def parse_integer_parameter param, arg
raise CommandlineError, "option '#{arg}' needs an integer" unless param =~ /^\d+$/
param.to_i
end
def parse_float_parameter param, arg
raise CommandlineError, "option '#{arg}' needs a floating-point number" unless param =~ FLOAT_RE
param.to_f
end
def parse_io_parameter param, arg
case param
when /^(stdin|-)$/i;
$stdin
else
require 'open-uri'
begin
open param
rescue SystemCallError => e
raise CommandlineError, "file or url for option '#{arg}' cannot be opened: #{e.message}"
end
end
end
def collect_argument_parameters args, start_at
params = []
pos = start_at
while args[pos] && args[pos] !~ PARAM_RE && !@stop_words.member?(args[pos]) do
params << args[pos]
pos += 1
end
params
end
def resolve_default_short_options!
@order.each do |type, name|
next unless type == :opt
opts = @specs[name]
next if opts[:short]
c = opts[:long].split(//).find { |d| d !~ INVALID_SHORT_ARG_REGEX && !@short.member?(d) }
if c # found a character to use
opts[:short] = c
@short[c] = name
end
end
end
def wrap_line str, opts={}
prefix = opts[:prefix] || 0
width = opts[:width] || (self.width - 1)
start = 0
ret = []
until start > str.length
nextt =
if start + width >= str.length
str.length
else
x = str.rindex(/\s/, start + width)
x = str.index(/\s/, start) if x && x < start
x || str.length
end
ret << (ret.empty? ? "" : " " * prefix) + str[start ... nextt]
start = nextt + 1
end
ret
end
## instance_eval but with ability to handle block arguments
## thanks to _why: http://redhanded.hobix.com/inspect/aBlockCostume.html
def cloaker &b
(
class << self;
self;
end).class_eval do
define_method :cloaker_, &b
meth = instance_method :cloaker_
remove_method :cloaker_
meth
end
end
end
## The easy, syntactic-sugary entry method into Trollop. Creates a Parser,
## passes the block to it, then parses +args+ with it, handling any errors or
## requests for help or version information appropriately (and then exiting).
## Modifies +args+ in place. Returns a hash of option values.
##
## The block passed in should contain zero or more calls to +opt+
## (Parser#opt), zero or more calls to +text+ (Parser#text), and
## probably a call to +version+ (Parser#version).
##
## The returned block contains a value for every option specified with
## +opt+. The value will be the value given on the commandline, or the
## default value if the option was not specified on the commandline. For
## every option specified on the commandline, a key "<option
## name>_given" will also be set in the hash.
##
## Example:
##
## require 'trollop'
## opts = Trollop::options do
## opt :monkey, "Use monkey mode" # a flag --monkey, defaulting to false
## opt :name, "Monkey name", :type => :string # a string --name <s>, defaulting to nil
## opt :num_limbs, "Number of limbs", :default => 4 # an integer --num-limbs <i>, defaulting to 4
## end
##
## ## if called with no arguments
## p opts # => {:monkey=>false, :name=>nil, :num_limbs=>4, :help=>false}
##
## ## if called with --monkey
## p opts # => {:monkey=>true, :name=>nil, :num_limbs=>4, :help=>false, :monkey_given=>true}
##
## See more examples at http://trollop.rubyforge.org.
def options args=ARGV, *a, &b
@last_parser = Parser.new(*a, &b)
with_standard_exception_handling(@last_parser) { @last_parser.parse args }
end
## If Trollop::options doesn't do quite what you want, you can create a Parser
## object and call Parser#parse on it. That method will throw CommandlineError,
## HelpNeeded and VersionNeeded exceptions when necessary; if you want to
## have these handled for you in the standard manner (e.g. show the help
## and then exit upon an HelpNeeded exception), call your code from within
## a block passed to this method.
##
## Note that this method will call System#exit after handling an exception!
##
## Usage example:
##
## require 'trollop'
## p = Trollop::Parser.new do
## opt :monkey, "Use monkey mode" # a flag --monkey, defaulting to false
## opt :goat, "Use goat mode", :default => true # a flag --goat, defaulting to true
## end
##
## opts = Trollop::with_standard_exception_handling p do
## o = p.parse ARGV
## raise Trollop::HelpNeeded if ARGV.empty? # show help screen
## o
## end
##
## Requires passing in the parser object.
def with_standard_exception_handling(parser)
begin
yield
rescue CommandlineError => e
raise Convoy::UserError.new(e.message, e)
#$stderr.puts "Error: #{e.message}."
#$stderr.puts "Try --help for help."
#exit(1)
rescue HelpNeeded
parser.current_help_formatter ? parser.current_help_formatter.print(parser) : parser.educate
exit
rescue VersionNeeded
puts parser.version
exit
end
end
## Informs the user that their usage of 'arg' was wrong, as detailed by
## 'msg', and dies. Example:
##
## options do
## opt :volume, :default => 0.0
## end
##
## die :volume, "too loud" if opts[:volume] > 10.0
## die :volume, "too soft" if opts[:volume] < 0.1
##
## In the one-argument case, simply print that message, a notice
## about -h, and die. Example:
##
## options do
## opt :whatever # ...
## end
##
## Trollop::die "need at least one filename" if ARGV.empty?
def die arg, msg=nil
if @last_parser
@last_parser.die arg, msg
else
raise ArgumentError, "Trollop::die can only be called after Trollop::options"
end
end
module_function :options, :die, :with_standard_exception_handling
end # module
|
# Usage:
# $ cat ~/.rvm/environments/(current ruby version) > ~/.bashrc-rvm
# $ crontab -u (user) -e
# SHELL=/bin/bash
# BASH_ENV=/home/(user)/.bashrc-rvm
# 0 0 * * * /home/(user)/.rvm/rubies/(current ruby version)/bin/ruby /path/to/script/location.rb
#!/usr/bin/env ruby
require "./lib/SignalFlare.rb"
hostname = "test.parodybit.net"
api_key = "not_a_real_api_key"
email = "kirk@parodybit.net"
cloudflare = SignalFlare.new(api_key, email)
puts cloudflare.update_ip(hostname)
non-relative gem path
# Usage:
# $ cat ~/.rvm/environments/(current ruby version) > ~/.bashrc-rvm
# $ crontab -u (user) -e
# SHELL=/bin/bash
# BASH_ENV=/home/(user)/.bashrc-rvm
# 0 0 * * * /home/(user)/.rvm/rubies/(current ruby version)/bin/ruby /path/to/script/location.rb
#!/usr/bin/env ruby
require "SignalFlare"
hostname = "test.parodybit.net"
api_key = "not_a_real_api_key"
email = "kirk@parodybit.net"
cloudflare = SignalFlare.new(api_key, email)
puts cloudflare.update_ip(hostname)
|
more test code
# -*- coding: utf-8 -*-
# class A
# BAR = 1
# def self.bar
# puts BAR
# end
# end
# A.bar
# module A
# puts Object.constants
# end
# class A < Numeric
# def to_i; 1; end
# end
# puts A.new.to_int
# puts [1, 2, 3].map(&:to_s)
# puts [1.send(:to_s), 1.send('+', 2)]
# puts [1, 1, 2, '3'] - [1, '3']
# puts [1, '1'] - [1, '1']
# raise nil
# TypeError.new
# puts File.new("/tmp/nsemail.eml")
# a = "hi"
# puts a
# a.clear
# puts a
# DATA = "LOCAL"
# puts DATA.read()
# __END__
# blah blah blah, this is not parsed
# if 1
# then
# 1
# end
# $"
# $!
# @@class_var
# module M
# end
# class M::A
# MYCONST = "a"
# @@foo = "a"
# def initialize
# @foo = "a"
# end
# def get
# [@foo,
# @@foo,
# MYCONST]
# end
# end
# class M::B < M::A
# MYCONST = "b"
# @@foo = "b"
# def initialize
# @foo = "b"
# end
# end
# puts M::B.new.get
# puts M::A.new.get
# class A; end
# class B < A; @@foo = "b"; end
# class C < A; @@foo = "c"; end
# class B < A; puts @@foo; end
# class C < A; puts @@foo; end
# class A; @@foo = "a"; end
# class A; puts @@foo; end
# class B < A; puts @@foo; end
# class C < A; puts @@foo; end
# class B < A
# def get
# @@foo
# end
# end
# puts B.new.get
# class A
# def self.get
# @@foo
# end
# def get
# @@foo
# end
# @@foo = 'a'
# end
# class B < A
# def self.get
# @@foo
# end
# def get
# @@foo
# end
# end
# # puts [A.get, A.new.get, B.get]
# B.get
# class A
# @foo = 1
# puts @foo
# end
# class B
# puts @foo
# end
# class A
# puts @foo
# end
# puts 1
# class A; end
# class B < A
# @@foo = "B"
# def get; @@foo; end
# end
# class C < A
# @@foo = "C"
# def get; @@foo; end
# end
# puts [B.object_id, C.object_id]
# in_subclasses = [B.new.get, C.new.get]
# class A; @@foo = "A overrides all"; end
# puts in_subclasses + [B.new.get, C.new.get]
# a = 1
# class Fixnum
# def set
# @foo = "A"
# end
# def get
# @foo
# end
# end
# a.set
# # b = 1
# # puts b.get
# class A
# def self.get()
# puts 1
# end
# end
# class B < A; end
# # puts B.singleton_class.ancestors
# B.get
# Exception
# File
# puts Object.constants
# puts Exception.constants
# puts File.constants
# # Object::FOO
# # Object.const_missing :FOO
# class A
# def method_missing(name, *args, &block)
# puts name
# puts args
# puts block
# end
# end
# A.new.foo('blah', 2)
# OPT_TABLE = {}
# OPT_TABLE["a"] = "b"
# puts $LOADED_FEATURES
# require "test_req"
# puts $LOADED_FEATURES
# class A
# CONST = 5
# def self.m
# CONST
# end
# end
# puts A.m
# __LINE__ = 1
# puts __LINE__
# private
# def f(*a)
# puts "hi"
# end
# f "hello", "world"
# puts "\w"
# puts /\w/
# def f(h, *a)
# h
# end
# puts f "hello", "world"
# foo = "foo"
# a = <<EOF
# def #{foo}(*args)
# super(*fu_update_option(args, :verbose => true))
# end
# EOF
# puts a
# puts ENV
# puts Topaz::EnvironmentVariables
# class Foo
# def self.c
# @config ||= 'a'
# end
# end
# Foo.c
# puts [1,2,3].pop(2)
# puts "abc"[1]
# h = {}
# h[:option] = 1
# puts h[:option]
# puts "\012" == "\n"
# puts "\0" == "\x0"
# ?\012
# puts "\342\234\224" == "✔"
# def broken
# each {
# saved_env = 1
# each { saved_env }
# each { saved_env }
# }
# end
# if nil
# puts "shouldn't print"
# elsif nil
# puts "shouldn't print"
# else
# puts "should print"
# end
# self.each do end
# a = [1]
# a[1] = 2
# puts a[0, -1]
# [][0..-2]
# begin
# Process.exit(1)
# rescue SystemExit => e
# puts e.success?
# puts e.status
# end
# Process.exit(1)
# puts Dir.home
# exec(["/usr/bin/du", "du"], "-s")
# at_exit do
# end
# [65] * 3
# "now's the time".split(/ /)
# puts [65].pack("C")
# class A
# CONST = "a"
# def get
# CONST
# end
# end
# class B < A
# CONST = "b"
# end
# puts A.new.get == B.new.get # => should return true
# class X
# class Y
# end
# end
# class A < X
# module M; end
# include M
# puts Y.object_id
# class Y < X::Y
# puts self.object_id
# include M
# end
# puts Y.object_id
# end
# require 'pp'
# puts $>
# puts $stdout
# pp false
# pp true
# pp 1
# pp Kernel
# pp Symbol
# pp []
# pp (1..2)
# pp({"1" => "2"})
# Thread.current["a"]
# class Bar
# module M
# end
# end
# class Y
# FRIZZ = 1
# end
# class X < Y
# module M
# FOO = 5
# end
# class Foo < Bar
# def f
# M::FOO
# end
# end
# end
# class X::Foo
# def g
# M::FOO
# end
# end
# puts X::Foo.new.f
# puts X::Foo.new.g
# for i in []; end
# puts i
# i = 5
# puts i
# for i in [1] do
# bbb = "hello"
# puts bbb
# end
# puts bbb
# puts i
# j = 5
# puts j
# [1].each {|j| puts j }
# puts j
# i = 0
# for i, *rest, b in [1, 2, 3] do
# bbb = "hello"
# end
# puts i.inspect, rest.inspect, b.inspect, bbb.inspect
# puts ([1,2,3].each {|i, *b| puts i.inspect, b.inspect }).inspect
# class A
# def hello
# end
# undef_method :hello
# end
# A.new.hello
# a = []
# i = 0
# for a[i], i in [[1, 1], [2, 2]]; end
# puts a, i
# def foo(&block)
# for block, in [[1, 1], [2, 2]]; end
# block
# end
# puts foo
# foo = 1
# puts defined? foo
# a = 1
# puts defined?(foo.bar),
# defined?(a)
# puts defined?(Foo::FAS),
# defined?(Object)
# puts defined?($aaa),
# defined?($")
# puts defined?(a = 1)
# puts defined?(x[y]),
# defined?([]),
# defined?([][1])
# puts defined?({}),
# defined?({a: 2}[:a])
# puts defined?(for i in a; puts 1; end)
# puts defined?(Object.new(b))
# puts defined?(Object.new(1))
# class A
# def each
# [1, 2, 3]
# end
# end
# puts defined? i
# for i in A.new; end
# puts defined? i
# puts i
# for a in []; end
# puts defined?(a)
# a = 1
# puts defined?(a)
# for Const in []; end; puts Const
# 2 ** 'hallo'
# instance_eval("")
# puts `echo 1`
# a = "a"
# a.freeze
# a.taint
# puts 'hello'.gsub("he", "ha")
# puts 'hello'.gsub(/(.)/, "ha")
# puts 'hello'.gsub(/(.)/, "ha\\1ho")
# puts 'hello'.gsub(/(.)/) { |e| e + "1" }
# puts "hello\n".chomp
# load "fileutils"
# puts FileUtils
# puts $LOADED_FEATURES
# def []=(idx, value)
# @foo = [idx, value]
# end
# def [](idx)
# @foo
# end
# self[1] = 2
# self[1]
# puts @foo
# replacements = [1, 2]
# puts 'helloo'.gsub("l", Hash.new { |h, k| replacements.pop() })
# puts a
a = 1
b = 1
c = foo(a + b == 1)
c = constrain:(a + b == 1)
c2 = constrain:(a.area == 1)
c.enable()
|
# Test.rb
#
# Description: Implements replacive fuzzing by testing a set of URL's with
# attack vectors and determines if a vulnerabilty occured on
# the target.
#
# Author: Peter Mikitsh pam3961
# Author: Akshay Karnawat
require 'mechanize'
require 'net/http'
require 'timeout'
class Test
# urls: string array of absolute URL paths to test
# vectors: string array of replacive vectors to append to urls
# authAgent: optional 'Mechanize' agent (if authentication used
def self.test(urls, vectors, authAgent, random, timeout)
puts "Testing Vectors..."
# create a new agent with timeout attributes
agent = authAgent ? authAgent : Mechanize.new
urls.each do |url|
vectors.each do |vector|
Test.replaciveFuzz(url, vector, agent, timeout)
end
end
end
def self.createAttackURL(url, vector)
return url + vector;
end
def self.replaciveFuzz(url, vector, agent, timeout)
begin
puts "Testing #{vector} on #{url}"
Timeout.timeout(5) { agent.get(Test.createAttackURL(url, vector)) }
rescue Mechanize::ResponseCodeError => e
puts "\t#{e.response_code} Unexcepted response code for url #{url} with vector #{vector}."
rescue Timeout::Error
puts "Timeout error for url #{url} with vector #{vector}."
end
end
end
print statement if response body contains vector
# Test.rb
#
# Description: Implements replacive fuzzing by testing a set of URL's with
# attack vectors and determines if a vulnerabilty occured on
# the target.
#
# Author: Peter Mikitsh pam3961
# Author: Akshay Karnawat
require 'mechanize'
require 'net/http'
require 'timeout'
class Test
# urls: string array of absolute URL paths to test
# vectors: string array of replacive vectors to append to urls
# authAgent: optional 'Mechanize' agent (if authentication used
def self.test(urls, vectors, authAgent, random, timeout)
puts "Testing Vectors..."
# create a new agent with timeout attributes
agent = authAgent ? authAgent : Mechanize.new
urls.each do |url|
vectors.each do |vector|
Test.replaciveFuzz(url, vector, agent, timeout)
end
end
end
def self.createAttackURL(url, vector)
return url + vector;
end
def self.replaciveFuzz(url, vector, agent, timeout)
begin
puts "Testing #{vector} on #{url}"
Timeout.timeout(5) { agent.get(Test.createAttackURL(url, vector)) }
rescue Mechanize::ResponseCodeError => e
puts "\t Possible vulnerability identified. #{e.response_code} Unexcepted response code for url #{url} with vector #{vector}."
rescue Timeout::Error
puts "\t Possible vulnerability identified. Timeout error for url #{url} with vector #{vector}."
end
if agent.body.include? vector
puts "\t Possible vulnerability identified. The response body contains the attack vector. Vector: #{vector} Url: #{url}"
end
end
end
|
class Admin::PetitionDetailsController < Admin::AdminController
respond_to :html
before_action :fetch_petition
def show
end
def update
if @petition.update_attributes(petition_params)
flash[:notice] = 'Petition has been successfully updated'
redirect_to [:admin, @petition]
else
render :show
end
end
private
def fetch_petition
@petition = Petition.find(params[:petition_id]
#raise ActiveRecord::RecordNotFound unless @petition.in_todo_list?
end
def petition_params
params.require(:petition).permit(:action, :background, :additional_details)
end
end
Fix syntax error
class Admin::PetitionDetailsController < Admin::AdminController
respond_to :html
before_action :fetch_petition
def show
end
def update
if @petition.update_attributes(petition_params)
flash[:notice] = 'Petition has been successfully updated'
redirect_to [:admin, @petition]
else
render :show
end
end
private
def fetch_petition
@petition = Petition.find(params[:petition_id])
#raise ActiveRecord::RecordNotFound unless @petition.in_todo_list?
end
def petition_params
params.require(:petition).permit(:action, :background, :additional_details)
end
end
|
module WithRedisSessionStore
private
def redis_session_store(suffix)
raise 'session_id is not found' unless session.id
redis = Redis::Namespace.new(['redis_session_store', session.id, suffix].join(':'))
yield(redis) if block_given?
redis
end
end
Initialize session id
module WithRedisSessionStore
private
def redis_session_store(suffix)
raise 'session_id is not found' unless current_session_id
redis = Redis::Namespace.new(['redis_session_store', current_session_id, suffix].join(':'))
yield(redis) if block_given?
redis
end
def current_session_id
session.send(:load_for_write!) unless session.id
session.id
end
end
|
class DeviseOtp::CredentialsController < DeviseController
helper_method :new_session_path
prepend_before_filter :authenticate_scope!, :only => [:get_refresh, :set_refresh]
prepend_before_filter :require_no_authentication, :only => [ :show, :update ]
#
# show a request for the OTP token
#
def show
@challenge = params[:challenge]
@recovery = (params[:recovery] == 'true') && recovery_enabled?
if @challenge.nil?
redirect_to :root
else
self.resource = resource_class.find_valid_otp_challenge(@challenge)
if resource.nil?
redirect_to :root
elsif @recovery
@recovery_count = resource.otp_recovery_counter
render :show
else
render :show
end
end
end
#
# signs the resource in, if the OTP token is valid and the user has a valid challenge
#
def update
resource = resource_class.find_valid_otp_challenge(params[resource_name][:challenge])
recovery = (params[resource_name][:recovery] == 'true') && recovery_enabled?
token = params[resource_name][:token]
if token.blank?
otp_set_flash_message(:alert, :token_blank)
redirect_to otp_credential_path_for(resource_name, :challenge => params[resource_name][:challenge],
:recovery => recovery)
elsif resource.nil?
otp_set_flash_message(:alert, :otp_session_invalid)
redirect_to new_session_path(resource_name)
else
if resource.otp_challenge_valid? && resource.validate_otp_token(params[resource_name][:token], recovery)
set_flash_message(:success, :signed_in) if is_navigational_format?
sign_in(resource_name, resource)
otp_refresh_credentials_for(resource)
respond_with resource, :location => after_sign_in_path_for(resource)
else
otp_set_flash_message :alert, :token_invalid
redirect_to new_session_path(resource_name)
end
end
end
#
# displays the request for a credentials refresh
#
def get_refresh
ensure_resource!
render :refresh
end
#
# lets the user through is the refresh is valid
#
def set_refresh
ensure_resource!
# I am sure there's a much better way
if resource.valid_password?(params[resource_name][:refresh_password])
if resource.otp_enabled?
if resource.validate_otp_token(params[resource_name][:token])
done_valid_refresh
else
failed_refresh
end
else
done_valid_refresh
end
else
failed_refresh
end
end
private
def done_valid_refresh
otp_refresh_credentials_for(resource)
otp_set_flash_message :success, :valid_refresh if is_navigational_format?
respond_with resource, :location => otp_fetch_refresh_return_url
end
def failed_refresh
otp_set_flash_message :alert, :invalid_refresh
render :refresh
end
end
Update credentials_controller.rb
class DeviseOtp::CredentialsController < DeviseController
helper_method :new_session_path
prepend_before_filter :authenticate_scope!, :only => [:get_refresh, :set_refresh]
prepend_before_filter :require_no_authentication, :only => [ :show, :update ]
#
# show a request for the OTP token
#
def show
@challenge = params[:challenge]
@recovery = (params[:recovery] == 'true') && recovery_enabled?
if @challenge.nil?
redirect_to :root
else
self.resource = resource_class.find_valid_otp_challenge(@challenge)
if resource.nil?
redirect_to :root
elsif @recovery
@recovery_count = resource.otp_recovery_counter
render :show
else
render :show
end
end
end
#
# signs the resource in, if the OTP token is valid and the user has a valid challenge
#
def update
resource = resource_class.find_valid_otp_challenge(params[resource_name][:challenge])
recovery = (params[resource_name][:recovery] == 'true') && recovery_enabled?
token = params[resource_name][:token]
if token.blank?
otp_set_flash_message(:alert, :token_blank)
redirect_to otp_credential_path_for(resource_name, :challenge => params[resource_name][:challenge],
:recovery => recovery)
elsif resource.nil?
otp_set_flash_message(:alert, :otp_session_invalid)
redirect_to new_session_path(resource_name)
else
if resource.otp_challenge_valid? && resource.validate_otp_token(params[resource_name][:token], recovery)
set_flash_message(:success, :signed_in) if is_navigational_format?
sign_in(resource_name, resource)
otp_refresh_credentials_for(resource)
respond_with resource, :location => after_sign_in_path_for(resource)
else
otp_set_flash_message :alert, :token_invalid
redirect_to new_session_path(resource_name)
end
end
end
#
# displays the request for a credentials refresh
#
def get_refresh
ensure_resource!
render :refresh
end
#
# lets the user through is the refresh is valid
#
def set_refresh
ensure_resource!
# I am sure there's a much better way
if self.class.otp_authentication_after_sign_in or resource.valid_password?(params[resource_name][:refresh_password])
if resource.otp_enabled?
if resource.validate_otp_token(params[resource_name][:token])
done_valid_refresh
else
failed_refresh
end
else
done_valid_refresh
end
else
failed_refresh
end
end
private
def done_valid_refresh
otp_refresh_credentials_for(resource)
otp_set_flash_message :success, :valid_refresh if is_navigational_format?
respond_with resource, :location => otp_fetch_refresh_return_url
end
def failed_refresh
otp_set_flash_message :alert, :invalid_refresh
render :refresh
end
end
|
# -*- encoding : utf-8 -*-
class FiremanAvailabilitiesController < BackController
before_action :load_fireman
def index
respond_to do |format|
format.html
format.json do
start_date = DateTime.parse(params[:start])
end_date = DateTime.parse(params[:end])
@availabilities = @fireman.fireman_availabilities.where(:availability => start_date..end_date)
end
end
end
def create
@fireman_availability = @station.fireman_availabilities.new(fireman_availability_params)
respond_to do |format|
if @fireman_availability.save
format.json { render json: @fireman_availability }
else
format.json { render json: @fireman_availability.errors, status: :unprocessable_entity }
end
end
end
def create_all
FiremanAvailability::create_all(@station,
@fireman.id,
params[:fireman_availability][:availability])
head :ok
end
def destroy
respond_to do |format|
format.json do
begin
@station.fireman_availabilities.destroy(params[:id])
render :nothing => true, :status => :ok
rescue ActiveRecord::RecordNotDestroyed
render :nothing => true, :status => :unprocessable_entity
rescue ActiveRecord::RecordNotFound
render :nothing => true, :status => :not_found
end
end
end
end
def destroy_all
FiremanAvailability::destroy_all(@fireman.id,
params[:fireman_availability][:availability])
head :ok
end
private
def load_fireman
@fireman = @station.firemen.find(params[:fireman_id])
if @fireman.status != Fireman::STATUS['Actif']
flash[:error] = "La disponibilité n'est disponible que pour les hommes actifs."
redirect_to(fireman_path(@fireman))
end
end
def fireman_availability_params
params[:fireman_availability][:availability] = DateTime.parse(params[:fireman_availability][:availability])
params.require(:fireman_availability).permit(:availability, :fireman_id)
end
end
Rescue missing fireman for availabilities.
# -*- encoding : utf-8 -*-
class FiremanAvailabilitiesController < BackController
before_action :load_fireman
def index
respond_to do |format|
format.html
format.json do
start_date = DateTime.parse(params[:start])
end_date = DateTime.parse(params[:end])
@availabilities = @fireman.fireman_availabilities.where(:availability => start_date..end_date)
end
end
end
def create
@fireman_availability = @station.fireman_availabilities.new(fireman_availability_params)
respond_to do |format|
if @fireman_availability.save
format.json { render json: @fireman_availability }
else
format.json { render json: @fireman_availability.errors, status: :unprocessable_entity }
end
end
end
def create_all
FiremanAvailability::create_all(@station,
@fireman.id,
params[:fireman_availability][:availability])
head :ok
end
def destroy
respond_to do |format|
format.json do
begin
@station.fireman_availabilities.destroy(params[:id])
render :nothing => true, :status => :ok
rescue ActiveRecord::RecordNotDestroyed
render :nothing => true, :status => :unprocessable_entity
rescue ActiveRecord::RecordNotFound
render :nothing => true, :status => :not_found
end
end
end
end
def destroy_all
FiremanAvailability::destroy_all(@fireman.id,
params[:fireman_availability][:availability])
head :ok
end
private
def load_fireman
@fireman = @station.firemen.find(params[:fireman_id])
if @fireman.status != Fireman::STATUS['Actif']
flash[:error] = "La disponibilité n'est disponible que pour les hommes actifs."
redirect_to(fireman_path(@fireman))
end
rescue ActiveRecord::RecordNotFound
flash[:error] = "La personne n'existe pas."
redirect_to(firemen_path)
end
def fireman_availability_params
params[:fireman_availability][:availability] = DateTime.parse(params[:fireman_availability][:availability])
params.require(:fireman_availability).permit(:availability, :fireman_id)
end
end
|
require_dependency "application_controller"
class LightweightActivitiesController < ApplicationController
# TODO: We use "run key", "session key" and "response key" for the same bit of data here. Refactor to fix.
before_filter :set_activity, :except => [:index, :new, :create]
before_filter :set_run_key, :only => [:summary, :show, :preview, :resubmit_answers, :single_page]
before_filter :set_sequence, :only => [:summary, :show, :single_page]
before_filter :enable_js_logger, :only => [:summary, :show, :preview, :single_page]
layout :set_layout
def index
@filter = CollectionFilter.new(current_user, LightweightActivity, params[:filter] || {})
@community_activities = @filter.collection.includes(:user,:changed_by,:portal_publications).community
@official_activities = @filter.collection.includes(:user,:changed_by,:portal_publications).official
@community_activities = @community_activities.paginate(:page => params['community_page_number'], :per_page => 10)
@official_activities = @official_activities.paginate(:page => params['official_page_number'], :per_page => 10)
end
# These are the runtime (student-facing) actions, show and summary
def show # show index
authorize! :read, @activity
if params[:print]
redirect_to activity_single_page_with_response_path(@activity, @run.key, request.query_parameters) and return
end
if params[:response_key]
redirect_to sequence_activity_path(@run.sequence, @activity, request.query_parameters) and return if @run.sequence
redirect_to activity_path(@activity, request.query_parameters) and return
end
@run.increment_run_count!
if @activity.layout == LightweightActivity::LAYOUT_SINGLE_PAGE
redirect_to activity_single_page_with_response_path(@activity, @run.key) and return
end
if @run.last_page && !@run.last_page.is_hidden && !params[:show_index]
# TODO: If the Page isn't in this activity... Then we need to log that as an error,
# and do the best we can to get back to the right page...
if @activity != @run.last_page.lightweight_activity
Rails.logger.error("Page has wrong activity or vice versa")
Rails.logger.error("Page: #{@run.last_page.id} wrong activity: #{@activity.id} right activity: #{@run.last_page.lightweight_activity.id}")
@activity = @run.last_page.lightweight_activity
end
redirect_to page_with_response_path(@activity.id, @run.last_page.id, @run.key) and return
end
setup_show
end
def preview
# This is "show" but it clears answers first
authorize! :update, @activity # Authors only
@run.clear_answers
if @activity.layout == LightweightActivity::LAYOUT_SINGLE_PAGE
setup_single_page_show
@labbook_is_under_interactive = true
render :single_page, :locals => {:print => params[:print]}
else
setup_show
render :show
end
end
def single_page
authorize! :read, @activity
if !params[:response_key]
redirect_to activity_single_page_with_response_path(@activity, @session_key) and return
end
setup_single_page_show
# the authorization needs to be after the setup method so that at least the @theme instance variable
# is set, so the theme of the unauthorized_run page remains the same.
begin
authorize! :access, @run
rescue
user_id_mismatch()
render 'runs/unauthorized_run'
return
end
@labbook_is_under_interactive = true
end
def print_blank
authorize! :read, @activity
@run = Run.new()
setup_show
end
def summary
authorize! :read, @activity
current_theme
current_project
if !params[:response_key]
redirect_to summary_with_response_path(@activity, @session_key) and return
end
@answers = @activity.answers(@run)
end
# The remaining actions are all authoring actions.
def new
@activity = LightweightActivity.new()
authorize! :create, @activity
end
def create
@activity = LightweightActivity.new(params[:lightweight_activity])
authorize! :create, @activity
@activity.user = current_user
@activity.changed_by = current_user
if @activity.save
flash[:notice] = "Lightweight Activity #{@activity.name} was created."
redirect_to edit_activity_path(@activity)
else
flash[:warning] = "There was a problem creating the new Lightweight Activity."
render :new
end
end
def edit
authorize! :update, @activity
@editor_mode = @activity.editor_mode
if params[:mode] && current_user.admin?
@editor_mode = case params[:mode]
when "itsi" then LightweightActivity::ITSI_EDITOR_MODE
else LightweightActivity::STANDARD_EDITOR_MODE
end
end
# Data assigned to `gon` variable will be available for JavaScript code in `window.gon` object.
# this is used in both the itsi editor and in the standard editor to show the published activity
gon.ITSIEditor = ITSIAuthoring::Editor.new(@activity).to_json
if @editor_mode == LightweightActivity::ITSI_EDITOR_MODE
render :itsi_edit
else
render :edit
end
end
def update
authorize! :update, @activity
update_activity_changed_by
respond_to do |format|
if @activity.update_attributes(params[:lightweight_activity])
format.json { render json: @activity }
format.html {
flash[:notice] = "Activity #{@activity.name} was updated."
redirect_to edit_activity_path(@activity)
}
else
format.json { render json: @activity.errors, :status => :unprocessable_entity }
format.html {
# I'd like to use the activity name here, but what if that's what's the invalid update?
flash[:warning] = "There was a problem updating your activity."
redirect_to edit_activity_path(@activity)
}
end
end
end
def destroy
authorize! :destroy, @activity
if @activity.delete
flash[:notice] = "Activity #{@activity.name} was deleted."
redirect_to activities_path
else
flash[:warning] = "There was a problem deleting activity #{@activity.name}."
redirect_to edit_activity_path(@activity)
end
end
# FIXME this should really be something other than a GET since it has side effects
def duplicate
authorize! :duplicate, @activity
@new_activity = @activity.duplicate(current_user)
unless @new_activity.valid?
flash[:warning] = "<p>The duplicated activity had validation issues:</p> #{@new_activity.errors} <p>Work carefully with the new activity.</p>"
end
if @new_activity.save(:validations => false) # In case the old activity was invalid
# check if we should publish this new activity somewhere
json_response = nil
if params['add_to_portal']
req_url = "#{request.protocol}#{request.host_with_port}"
# this might take a little time so it might be better do this in the background
response = @new_activity.portal_publish(current_user,params['add_to_portal'],req_url)
begin
json_response = JSON.parse(response.body)
rescue
end
end
if params['redirect_on_success'] && json_response && json_response.has_key?("activity_id")
redirect_to params['redirect_on_success'].sub!(':activity_id', json_response['activity_id'].to_s)
else
redirect_to edit_activity_path(@new_activity)
end
else
flash[:warning] = "Copy failed"
redirect_to activities_path
end
end
def show_status
@message = params[:message] || ''
respond_to do |format|
format.js { render :json => { :html => render_to_string('export')}, :content_type => 'text/json' }
format.html
end
end
def export
authorize! :export, @activity
lightweight_activity_json = @activity.export.to_json
send_data lightweight_activity_json, type: :json, disposition: "attachment", filename: "#{@activity.name}_version_1.json"
end
def move_up
authorize! :update, @activity
@page = @activity.pages.find(params[:id])
@page.move_higher
update_activity_changed_by
redirect_to :back
end
def move_down
authorize! :update, @activity
@page = @activity.pages.find(params[:id])
@page.move_lower
update_activity_changed_by
redirect_to :back
end
def reorder_pages
authorize! :update, @activity
params[:item_interactive_page].each do |p|
# Format: item_interactive_page[]=1&item_interactive_page[]=3&item_interactive_page[]=11&item_interactive_page[]=12&item_interactive_page[]=13&item_interactive_page[]=21&item_interactive_page[]=20&item_interactive_page[]=2
page = @activity.pages.find(p)
# If we move everything to the bottom in order, the first one should be at the top
page.move_to_bottom
end
update_activity_changed_by
# Respond with 200
if request.xhr?
respond_with_nothing
else
redirect_to edit_activity_path(@activity)
end
end
def resubmit_answers
authorize! :manage, :all
if !params[:response_key]
# If we don't know the run, we can't do this.
redirect_to summary_with_response_path(@activity, @session_key) and return
end
answers = @activity.answers(@run)
answers.each { |a| a.mark_dirty }
# Kick off a resubmit
answers.last.send_to_portal
flash[:notice] = "#{answers.length} #{'answer'.pluralize(answers.length)} requeued for submission."
redirect_to :back
end
private
def set_activity
id = params[:id]
id = params[:activity_id] if params[:activity_id]
if params[:sequence_id]
sequence = Sequence.find(params[:sequence_id])
@activity = sequence.activities.find(id)
else
@activity = LightweightActivity.find(id)
end
end
def set_layout
case params[:action]
when 'show'
return 'runtime'
when 'preview'
return 'runtime'
when 'print_blank'
return 'print_blank'
when 'single_page'
return 'runtime'
when 'summary'
return 'summary'
else
return 'application'
end
end
def setup_show
current_theme
current_project
@pages = @activity.pages
end
def setup_single_page_show
setup_show
setup_global_interactive_state_data
end
end
make default id lookup more clear
require_dependency "application_controller"
class LightweightActivitiesController < ApplicationController
# TODO: We use "run key", "session key" and "response key" for the same bit of data here. Refactor to fix.
before_filter :set_activity, :except => [:index, :new, :create]
before_filter :set_run_key, :only => [:summary, :show, :preview, :resubmit_answers, :single_page]
before_filter :set_sequence, :only => [:summary, :show, :single_page]
before_filter :enable_js_logger, :only => [:summary, :show, :preview, :single_page]
layout :set_layout
def index
@filter = CollectionFilter.new(current_user, LightweightActivity, params[:filter] || {})
@community_activities = @filter.collection.includes(:user,:changed_by,:portal_publications).community
@official_activities = @filter.collection.includes(:user,:changed_by,:portal_publications).official
@community_activities = @community_activities.paginate(:page => params['community_page_number'], :per_page => 10)
@official_activities = @official_activities.paginate(:page => params['official_page_number'], :per_page => 10)
end
# These are the runtime (student-facing) actions, show and summary
def show # show index
authorize! :read, @activity
if params[:print]
redirect_to activity_single_page_with_response_path(@activity, @run.key, request.query_parameters) and return
end
if params[:response_key]
redirect_to sequence_activity_path(@run.sequence, @activity, request.query_parameters) and return if @run.sequence
redirect_to activity_path(@activity, request.query_parameters) and return
end
@run.increment_run_count!
if @activity.layout == LightweightActivity::LAYOUT_SINGLE_PAGE
redirect_to activity_single_page_with_response_path(@activity, @run.key) and return
end
if @run.last_page && !@run.last_page.is_hidden && !params[:show_index]
# TODO: If the Page isn't in this activity... Then we need to log that as an error,
# and do the best we can to get back to the right page...
if @activity != @run.last_page.lightweight_activity
Rails.logger.error("Page has wrong activity or vice versa")
Rails.logger.error("Page: #{@run.last_page.id} wrong activity: #{@activity.id} right activity: #{@run.last_page.lightweight_activity.id}")
@activity = @run.last_page.lightweight_activity
end
redirect_to page_with_response_path(@activity.id, @run.last_page.id, @run.key) and return
end
setup_show
end
def preview
# This is "show" but it clears answers first
authorize! :update, @activity # Authors only
@run.clear_answers
if @activity.layout == LightweightActivity::LAYOUT_SINGLE_PAGE
setup_single_page_show
@labbook_is_under_interactive = true
render :single_page, :locals => {:print => params[:print]}
else
setup_show
render :show
end
end
def single_page
authorize! :read, @activity
if !params[:response_key]
redirect_to activity_single_page_with_response_path(@activity, @session_key) and return
end
setup_single_page_show
# the authorization needs to be after the setup method so that at least the @theme instance variable
# is set, so the theme of the unauthorized_run page remains the same.
begin
authorize! :access, @run
rescue
user_id_mismatch()
render 'runs/unauthorized_run'
return
end
@labbook_is_under_interactive = true
end
def print_blank
authorize! :read, @activity
@run = Run.new()
setup_show
end
def summary
authorize! :read, @activity
current_theme
current_project
if !params[:response_key]
redirect_to summary_with_response_path(@activity, @session_key) and return
end
@answers = @activity.answers(@run)
end
# The remaining actions are all authoring actions.
def new
@activity = LightweightActivity.new()
authorize! :create, @activity
end
def create
@activity = LightweightActivity.new(params[:lightweight_activity])
authorize! :create, @activity
@activity.user = current_user
@activity.changed_by = current_user
if @activity.save
flash[:notice] = "Lightweight Activity #{@activity.name} was created."
redirect_to edit_activity_path(@activity)
else
flash[:warning] = "There was a problem creating the new Lightweight Activity."
render :new
end
end
def edit
authorize! :update, @activity
@editor_mode = @activity.editor_mode
if params[:mode] && current_user.admin?
@editor_mode = case params[:mode]
when "itsi" then LightweightActivity::ITSI_EDITOR_MODE
else LightweightActivity::STANDARD_EDITOR_MODE
end
end
# Data assigned to `gon` variable will be available for JavaScript code in `window.gon` object.
# this is used in both the itsi editor and in the standard editor to show the published activity
gon.ITSIEditor = ITSIAuthoring::Editor.new(@activity).to_json
if @editor_mode == LightweightActivity::ITSI_EDITOR_MODE
render :itsi_edit
else
render :edit
end
end
def update
authorize! :update, @activity
update_activity_changed_by
respond_to do |format|
if @activity.update_attributes(params[:lightweight_activity])
format.json { render json: @activity }
format.html {
flash[:notice] = "Activity #{@activity.name} was updated."
redirect_to edit_activity_path(@activity)
}
else
format.json { render json: @activity.errors, :status => :unprocessable_entity }
format.html {
# I'd like to use the activity name here, but what if that's what's the invalid update?
flash[:warning] = "There was a problem updating your activity."
redirect_to edit_activity_path(@activity)
}
end
end
end
def destroy
authorize! :destroy, @activity
if @activity.delete
flash[:notice] = "Activity #{@activity.name} was deleted."
redirect_to activities_path
else
flash[:warning] = "There was a problem deleting activity #{@activity.name}."
redirect_to edit_activity_path(@activity)
end
end
# FIXME this should really be something other than a GET since it has side effects
def duplicate
authorize! :duplicate, @activity
@new_activity = @activity.duplicate(current_user)
unless @new_activity.valid?
flash[:warning] = "<p>The duplicated activity had validation issues:</p> #{@new_activity.errors} <p>Work carefully with the new activity.</p>"
end
if @new_activity.save(:validations => false) # In case the old activity was invalid
# check if we should publish this new activity somewhere
json_response = nil
if params['add_to_portal']
req_url = "#{request.protocol}#{request.host_with_port}"
# this might take a little time so it might be better do this in the background
response = @new_activity.portal_publish(current_user,params['add_to_portal'],req_url)
begin
json_response = JSON.parse(response.body)
rescue
end
end
if params['redirect_on_success'] && json_response && json_response.has_key?("activity_id")
redirect_to params['redirect_on_success'].sub!(':activity_id', json_response['activity_id'].to_s)
else
redirect_to edit_activity_path(@new_activity)
end
else
flash[:warning] = "Copy failed"
redirect_to activities_path
end
end
def show_status
@message = params[:message] || ''
respond_to do |format|
format.js { render :json => { :html => render_to_string('export')}, :content_type => 'text/json' }
format.html
end
end
def export
authorize! :export, @activity
lightweight_activity_json = @activity.export.to_json
send_data lightweight_activity_json, type: :json, disposition: "attachment", filename: "#{@activity.name}_version_1.json"
end
def move_up
authorize! :update, @activity
@page = @activity.pages.find(params[:id])
@page.move_higher
update_activity_changed_by
redirect_to :back
end
def move_down
authorize! :update, @activity
@page = @activity.pages.find(params[:id])
@page.move_lower
update_activity_changed_by
redirect_to :back
end
def reorder_pages
authorize! :update, @activity
params[:item_interactive_page].each do |p|
# Format: item_interactive_page[]=1&item_interactive_page[]=3&item_interactive_page[]=11&item_interactive_page[]=12&item_interactive_page[]=13&item_interactive_page[]=21&item_interactive_page[]=20&item_interactive_page[]=2
page = @activity.pages.find(p)
# If we move everything to the bottom in order, the first one should be at the top
page.move_to_bottom
end
update_activity_changed_by
# Respond with 200
if request.xhr?
respond_with_nothing
else
redirect_to edit_activity_path(@activity)
end
end
def resubmit_answers
authorize! :manage, :all
if !params[:response_key]
# If we don't know the run, we can't do this.
redirect_to summary_with_response_path(@activity, @session_key) and return
end
answers = @activity.answers(@run)
answers.each { |a| a.mark_dirty }
# Kick off a resubmit
answers.last.send_to_portal
flash[:notice] = "#{answers.length} #{'answer'.pluralize(answers.length)} requeued for submission."
redirect_to :back
end
private
def set_activity
if params[:activity_id]
id = params[:activity_id]
else
id = params[:id]
end
if params[:sequence_id]
sequence = Sequence.find(params[:sequence_id])
@activity = sequence.activities.find(id)
else
@activity = LightweightActivity.find(id)
end
end
def set_layout
case params[:action]
when 'show'
return 'runtime'
when 'preview'
return 'runtime'
when 'print_blank'
return 'print_blank'
when 'single_page'
return 'runtime'
when 'summary'
return 'summary'
else
return 'application'
end
end
def setup_show
current_theme
current_project
@pages = @activity.pages
end
def setup_single_page_show
setup_show
setup_global_interactive_state_data
end
end
|
require 'json'
class Reflect::ReflectBulletController < ApplicationController
respond_to :json
def index
comments = JSON.parse(params[:comments].gsub('\\',''))
data = {}
db_bullets = ReflectBulletRevision.all( :conditions => { :comment_id => comments } )
db_bullets.each do |db_bullet_rev|
db_bullet = db_bullet_rev.bullet
response = db_bullet.response
res = response.nil? ? nil : {
:id => response.response_id.to_s,
:rev => response.id.to_s,
:ts => response.created_at.to_s,
:u => response.user.name,
:sig => response.signal.to_s,
:txt => response.text
}
highlights = []
db_bullet_rev.highlights.each do |hi|
highlights.push( hi.element_id )
end
bullet = {
:id => db_bullet_rev.bullet_id.to_s,
:ts => db_bullet_rev.created_at.to_s,
:u => db_bullet_rev.user.nil? ? 'Anonymous' : db_bullet_rev.user.name,
:txt => db_bullet_rev.text,
:rev => db_bullet_rev.id.to_s,
:highlights => highlights,
:response => res
}
if !data.key?(db_bullet.comment_id.to_s)
data[db_bullet.comment_id.to_s] = {db_bullet_rev.bullet_id.to_s => bullet}
else
data[db_bullet.comment_id.to_s][db_bullet_rev.bullet_id.to_s] = bullet
end
end
render :json => data.to_json
end
def create
if has_permission?('add', current_user)
json_response = add_or_update()
else
json_response = ''.to_json
end
render :json => json_response
end
def update
cur_bullet, bullet_rev = get_current_bullet
if has_permission?('add', current_user, cur_bullet, bullet_rev)
json_response = add_or_update( cur_bullet, bullet_rev )
else
json_response = ''.to_json
end
render :json => json_response
end
def destroy
cur_bullet, bullet_rev = get_current_bullet
if has_permission?('delete', current_user, cur_bullet, bullet_rev)
cur_bullet.destroy
end
render :json => ''.to_json
end
protected
def get_current_bullet
bullet_rev = ReflectBulletRevision.find_by_bullet_id(params[:bullet_id])
if bullet_rev
bullet_obj = bullet_rev.bullet
else
raise 'Could not find bullet with that id'
end
return bullet_obj, bullet_rev
end
def has_permission?(verb, user, cur_bullet= nil, bullet_rev = nil)
comment = Comment.find(params[:comment_id])
commentAuthor = comment.user
if bullet_rev.nil?
bulletAuthor = user
else
bulletAuthor = bullet_rev.user
end
if current_user.nil?
userLevel = -1
else
userLevel = user.admin ? 1 : 0
end
denied = ( # only admins and bullet authors can delete bullets
verb == 'delete' \
&& bulletAuthor.id != user.id \
&& userLevel < 1
) \
|| ( # commenters can't add bullets to their comment
verb == 'add' \
&& userLevel > -1 && commentAuthor.id == user.id
)
return !denied
end
def add_or_update( bullet_obj = nil, bullet_rev = nil)
user = current_user
comment_id = params[:comment_id].to_i
text = params[:text]
if (text == '')
respond_to do |format|
format.js {
render :json => {}.to_json
}
end
end
modify = !bullet_obj.nil?
new_rev = ReflectBulletRevision.new(
:comment_id => comment_id,
:user => user,
:text => text
)
if modify
new_rev.bullet_id = bullet_obj.id
bullet_rev.active = false
else
bullet_obj = ReflectBullet.new(
:comment_id => comment_id
)
bullet_obj.save
new_rev.bullet_id = bullet_obj.id
new_rev.notify_parties
end
new_rev.save
if params.key?(:highlights)
highlights = JSON.parse(params[:highlights].gsub('\\',''))
highlights.each do |hi|
Reflect::ReflectHighlight.create(
:bullet_id => bullet_obj.id,
:bullet_rev => new_rev.id,
:element_id => hi
)
end
end
return {:insert_id => bullet_obj.id, :rev_id => new_rev.id, :u => user.nil? ? 'Anonymous' : user.name}.to_json
end
end
fixing Reflect namespacing error
Former-commit-id: 48a1c8b6ff9d10b92a26a9752b964ef6b299a987 [formerly 442d1ff0269b54308a1d2e2f55251351328e4c8e] [formerly 536b3c9b3dd3f6b17436df7ec3cf8d85f591475d [formerly 536b3c9b3dd3f6b17436df7ec3cf8d85f591475d [formerly 3af3844cc3eb262dfee0c6559858ab3ba58e7e80]]]
Former-commit-id: 135046e14e67d6b331b0d61e8c7dcb5445009dac [formerly 51c8e9c9268596ae92175e82c6ac75676a5be46a]
Former-commit-id: 5c0888ac80b4b0973e74e3bcdbcd285e6b5aa466
Former-commit-id: 43cea2d105a3aaa08c5f79eeb9edf25815ca6537
require 'json'
class Reflect::ReflectBulletController < ApplicationController
respond_to :json
def index
comments = JSON.parse(params[:comments].gsub('\\',''))
data = {}
db_bullets = Reflect::ReflectBulletRevision.all( :conditions => { :comment_id => comments } )
db_bullets.each do |db_bullet_rev|
db_bullet = db_bullet_rev.bullet
response = db_bullet.response
res = response.nil? ? nil : {
:id => response.response_id.to_s,
:rev => response.id.to_s,
:ts => response.created_at.to_s,
:u => response.user.name,
:sig => response.signal.to_s,
:txt => response.text
}
highlights = []
db_bullet_rev.highlights.each do |hi|
highlights.push( hi.element_id )
end
bullet = {
:id => db_bullet_rev.bullet_id.to_s,
:ts => db_bullet_rev.created_at.to_s,
:u => db_bullet_rev.user.nil? ? 'Anonymous' : db_bullet_rev.user.name,
:txt => db_bullet_rev.text,
:rev => db_bullet_rev.id.to_s,
:highlights => highlights,
:response => res
}
if !data.key?(db_bullet.comment_id.to_s)
data[db_bullet.comment_id.to_s] = {db_bullet_rev.bullet_id.to_s => bullet}
else
data[db_bullet.comment_id.to_s][db_bullet_rev.bullet_id.to_s] = bullet
end
end
render :json => data.to_json
end
def create
if has_permission?('add', current_user)
json_response = add_or_update()
else
json_response = ''.to_json
end
render :json => json_response
end
def update
cur_bullet, bullet_rev = get_current_bullet
if has_permission?('add', current_user, cur_bullet, bullet_rev)
json_response = add_or_update( cur_bullet, bullet_rev )
else
json_response = ''.to_json
end
render :json => json_response
end
def destroy
cur_bullet, bullet_rev = get_current_bullet
if has_permission?('delete', current_user, cur_bullet, bullet_rev)
cur_bullet.destroy
end
render :json => ''.to_json
end
protected
def get_current_bullet
bullet_rev = Reflect::ReflectBulletRevision.find_by_bullet_id(params[:bullet_id])
if bullet_rev
bullet_obj = bullet_rev.bullet
else
raise 'Could not find bullet with that id'
end
return bullet_obj, bullet_rev
end
def has_permission?(verb, user, cur_bullet= nil, bullet_rev = nil)
comment = Comment.find(params[:comment_id])
commentAuthor = comment.user
if bullet_rev.nil?
bulletAuthor = user
else
bulletAuthor = bullet_rev.user
end
if current_user.nil?
userLevel = -1
else
userLevel = user.admin ? 1 : 0
end
denied = ( # only admins and bullet authors can delete bullets
verb == 'delete' \
&& bulletAuthor.id != user.id \
&& userLevel < 1
) \
|| ( # commenters can't add bullets to their comment
verb == 'add' \
&& userLevel > -1 && commentAuthor.id == user.id
)
return !denied
end
def add_or_update( bullet_obj = nil, bullet_rev = nil)
user = current_user
comment_id = params[:comment_id].to_i
text = params[:text]
if (text == '')
respond_to do |format|
format.js {
render :json => {}.to_json
}
end
end
modify = !bullet_obj.nil?
new_rev = Reflect::ReflectBulletRevision.new(
:comment_id => comment_id,
:user => user,
:text => text
)
if modify
new_rev.bullet_id = bullet_obj.id
bullet_rev.active = false
else
bullet_obj = Reflect::ReflectBullet.new(
:comment_id => comment_id
)
bullet_obj.save
new_rev.bullet_id = bullet_obj.id
new_rev.notify_parties
end
new_rev.save
if params.key?(:highlights)
highlights = JSON.parse(params[:highlights].gsub('\\',''))
highlights.each do |hi|
Reflect::ReflectHighlight.create(
:bullet_id => bullet_obj.id,
:bullet_rev => new_rev.id,
:element_id => hi
)
end
end
return {:insert_id => bullet_obj.id, :rev_id => new_rev.id, :u => user.nil? ? 'Anonymous' : user.name}.to_json
end
end
|
module Spree
OrdersController.class_eval do
include ProductCustomizations
include AdHocUtils
# the inbound variant is determined either from products[pid]=vid or variants[master_vid], depending on whether or not the product has_variants, or not
#
# Currently, we are assuming the inbound ad_hoc_option_values and customizations apply to the entire inbound product/variant 'group', as more surgery
# needs to occur in the cart partial for this to be done 'right'
#
# Adds a new item to the order (creating a new order if none already exists)
def populate
populator = Spree::OrderPopulator.new(current_order(create_order_if_necessary: true), current_currency)
flexi_hash = {ad_hoc_option_value_ids: ad_hoc_option_value_ids, product_customizations: product_customizations}
if populator.populate(params.slice(:products, :variants, :quantity).merge(flexi_hash))
current_order.ensure_updated_shipments
fire_event('spree.cart.add')
fire_event('spree.order.contents_changed')
respond_with(@order) do |format|
format.html { redirect_to cart_path }
end
else
flash[:error] = populator.errors.full_messages.join(" ")
redirect_to :back
end
end
end
end
changes in controller
module Spree
OrdersController.class_eval do
include ProductCustomizations
include AdHocUtils
# the inbound variant is determined either from products[pid]=vid or variants[master_vid], depending on whether or not the product has_variants, or not
#
# Currently, we are assuming the inbound ad_hoc_option_values and customizations apply to the entire inbound product/variant 'group', as more surgery
# needs to occur in the cart partial for this to be done 'right'
#
# Adds a new item to the order (creating a new order if none already exists)
def populate
populator = Spree::OrderPopulator.new(current_order(create_order_if_necessary: true), current_currency)
flexi_hash = {ad_hoc_option_value_ids: ad_hoc_option_value_ids, product_customizations: product_customizations}
if populator.populate(params.slice(:products, :variants, :quantity).merge(flexi_hash))
current_order.ensure_updated_shipments
respond_with(@order) do |format|
format.html { redirect_to cart_path }
end
else
flash[:error] = populator.errors.full_messages.join(" ")
redirect_to :back
end
end
end
end
|
module Spree
class RedsysCallbacksController < Spree::BaseController
skip_before_filter :verify_authenticity_token
#ssl_required
# Receive a direct notification from the gateway
def redsys_notify
@order ||= Spree::Order.find_by_number!(params[:order_id])
notify_acknowledge = acknowledgeSignature(redsys_credentials(payment_method))
if notify_acknowledge
#TODO add source to payment
unless @order.state == "complete"
order_upgrade
end
payment_upgrade(params, true)
@payment = Spree::Payment.find_by_order_id(@order)
@payment.complete!
else
payment_upgrade(params, false)
end
render :nothing => true
end
# Handle the incoming user
def redsys_confirm
@order ||= Spree::Order.find_by_number!(params[:order_id])
unless @order.state == "complete"
order_upgrade()
payment_upgrade(params, false)
end
# Unset the order id as it's completed.
session[:order_id] = nil #deprecated from 2.3
flash.notice = Spree.t(:order_processed_successfully)
flash['order_completed'] = true
redirect_to order_path(@order)
end
def redsys_credentials (payment_method)
{
:terminal_id => payment_method.preferred_terminal_id,
:commercial_id => payment_method.preferred_commercial_id,
:secret_key => payment_method.preferred_secret_key,
:key_type => payment_method.preferred_key_type
}
end
def payment_upgrade (params, no_risky)
payment = @order.payments.create!({:amount => @order.total,
:payment_method => payment_method,
:response_code => params['Ds_Response'].to_s,
:avs_response => params['Ds_AuthorisationCode'].to_s})
payment.started_processing!
@order.update(:considered_risky => 0) if no_risky
end
def payment_method
@payment_method ||= Spree::PaymentMethod.find(params[:payment_method_id])
@payment_method ||= Spree::PaymentMethod.find_by_type("Spree::BillingIntegration::redsysPayment")
end
def order_upgrade
@order.update(:state => "complete", :considered_risky => 1, :completed_at => Time.now)
# Since we dont rely on state machine callback, we just explicitly call this method for spree_store_credits
if @order.respond_to?(:consume_users_credit, true)
@order.send(:consume_users_credit)
end
@order.finalize!
end
protected
def decode_Merchant_Parameters
jsonrec = Base64.urlsafe_decode64(params[:Ds_MerchantParameters])
JSON.parse(jsonrec)
end
def create_MerchantSignature_Notif(key)
keyDecoded=Base64.decode64(key)
#obtenemos el orderId.
orderrec = (decode_Merchant_Parameters['Ds_Order'].blank?)? decode_Merchant_Parameters['DS_ORDER'] : decode_Merchant_Parameters['Ds_Order']
key3des=des3key(keyDecoded, orderrec)
hmac=hmac(key3des,params[:Ds_MerchantParameters])
sign=Base64.urlsafe_encode64(hmac)
end
def acknowledgeSignature(credentials = nil)
return false if(params[:Ds_SignatureVersion].blank? ||
params[:Ds_MerchantParameters].blank? ||
params[:Ds_Signature].blank?)
#HMAC_SHA256_V1
return false if(params[:Ds_SignatureVersion] != credentials[:key_type])
decodec = decode_Merchant_Parameters
Rails.logger.debug "Decodec: #{decodec}"
Rails.logger.debug "Ds_Response: #{decodec['Ds_Response']}"
Rails.logger.debug "Ds_ResponseInt: #{decodec['Ds_Response'].to_i}"
#Segons la doc, els codis OKs poden anar de 0000 a 0099 o 900 per a devolucions.
return false if (decodec['Ds_Response'].to_i > 99 || decodec['Ds_Response'].to_i==900)
create_Signature = create_MerchantSignature_Notif(credentials[:secret_key])
msg =
"REDSYS_NOTIFY: " +
"----- order_TS: " + decodec['Ds_Order'].to_s +
"----- order_Number: " + @order.number +
"----- Signature: " + create_Signature.to_s.upcase +
" ---- Ds_Signature " + params[:Ds_Signature].to_s.upcase +
" ---- RESULT " + ((create_Signature.to_s.upcase == params[:Ds_Signature].to_s.upcase)? 'OK' : 'KO')
Rails.logger.info "#{msg}"
create_Signature.to_s.upcase == params[:Ds_Signature].to_s.upcase
end
def des3key(key,message)
block_length = 8
cipher = OpenSSL::Cipher::Cipher.new("des-ede3-cbc")
cipher.padding = 0
cipher.encrypt
cipher.key = key
message += "\0" until message.bytesize % block_length == 0
ciphertext = cipher.update(message)
ciphertext << cipher.final
ciphertext
end
def hmac(key,message)
hash = OpenSSL::HMAC.digest('sha256', key, message)
end
end
end
Added some log messages. Bug in insert some spree_payments' fields
module Spree
class RedsysCallbacksController < Spree::BaseController
skip_before_filter :verify_authenticity_token
#ssl_required
# Receive a direct notification from the gateway
def redsys_notify
@order ||= Spree::Order.find_by_number!(params[:order_id])
notify_acknowledge = acknowledgeSignature(redsys_credentials(payment_method))
if notify_acknowledge
#TODO add source to payment
unless @order.state == "complete"
order_upgrade
end
payment_upgrade(params, true)
@payment = Spree::Payment.find_by_order_id(@order)
@payment.complete!
else
payment_upgrade(params, false)
end
render :nothing => true
end
# Handle the incoming user
def redsys_confirm
@order ||= Spree::Order.find_by_number!(params[:order_id])
unless @order.state == "complete"
order_upgrade()
payment_upgrade(params, false)
end
# Unset the order id as it's completed.
session[:order_id] = nil #deprecated from 2.3
flash.notice = Spree.t(:order_processed_successfully)
flash['order_completed'] = true
redirect_to order_path(@order)
end
def redsys_credentials (payment_method)
{
:terminal_id => payment_method.preferred_terminal_id,
:commercial_id => payment_method.preferred_commercial_id,
:secret_key => payment_method.preferred_secret_key,
:key_type => payment_method.preferred_key_type
}
end
def payment_upgrade (params, no_risky)
decodec = decode_Merchant_Parameters
payment = @order.payments.create!({:amount => @order.total,
:payment_method => payment_method,
:response_code => decodec['Ds_Response'].to_s,
:avs_response => decodec['Ds_AuthorisationCode'].to_s})
payment.started_processing!
@order.update(:considered_risky => 0) if no_risky
end
def payment_method
@payment_method ||= Spree::PaymentMethod.find(params[:payment_method_id])
@payment_method ||= Spree::PaymentMethod.find_by_type("Spree::BillingIntegration::redsysPayment")
end
def order_upgrade
@order.update(:state => "complete", :considered_risky => 1, :completed_at => Time.now)
# Since we dont rely on state machine callback, we just explicitly call this method for spree_store_credits
if @order.respond_to?(:consume_users_credit, true)
@order.send(:consume_users_credit)
end
@order.finalize!
end
protected
def decode_Merchant_Parameters
jsonrec = Base64.urlsafe_decode64(params[:Ds_MerchantParameters])
JSON.parse(jsonrec)
end
def create_MerchantSignature_Notif(key)
keyDecoded=Base64.decode64(key)
#obtenemos el orderId.
orderrec = (decode_Merchant_Parameters['Ds_Order'].blank?)? decode_Merchant_Parameters['DS_ORDER'] : decode_Merchant_Parameters['Ds_Order']
key3des=des3key(keyDecoded, orderrec)
hmac=hmac(key3des,params[:Ds_MerchantParameters])
sign=Base64.urlsafe_encode64(hmac)
end
def acknowledgeSignature(credentials = nil)
return false if(params[:Ds_SignatureVersion].blank? ||
params[:Ds_MerchantParameters].blank? ||
params[:Ds_Signature].blank?)
#HMAC_SHA256_V1
return false if(params[:Ds_SignatureVersion] != credentials[:key_type])
decodec = decode_Merchant_Parameters
Rails.logger.debug "JSON Decodec: #{decodec}"
create_Signature = create_MerchantSignature_Notif(credentials[:secret_key])
msg =
"REDSYS_NOTIFY: " +
" ---- Ds_Response: " + decodec['Ds_Response'].to_s +
" ---- order_TS: " + decodec['Ds_Order'].to_s +
" ---- order_Number: " + @order.number +
" ---- Signature: " + create_Signature.to_s.upcase +
" ---- Ds_Signature " + params[:Ds_Signature].to_s.upcase +
" ---- RESULT " + ((create_Signature.to_s.upcase == params[:Ds_Signature].to_s.upcase)? 'OK' : 'KO')
Rails.logger.info "#{msg}"
res=create_Signature.to_s.upcase == params[:Ds_Signature].to_s.upcase
responseCode=decodec['Ds_Response'].to_i
Rails.logger.debug "Ds_ResponseInt: #{responseCode}"
#Potser és una mica rebuscat, però comprovem primer la signature perquè si un señor
#maligno envia una petició fake amb Ds_Response d'error, estariem denegant la compra
#sense comprovar que la request és correcte.
#Segons la doc, els codis OKs poden anar de 0000 a 0099 o 900 per a devolucions.
return false if (responseCode > 99 && responseCode!=900)
res
end
def des3key(key,message)
block_length = 8
cipher = OpenSSL::Cipher::Cipher.new("des-ede3-cbc")
cipher.padding = 0
cipher.encrypt
cipher.key = key
message += "\0" until message.bytesize % block_length == 0
ciphertext = cipher.update(message)
ciphertext << cipher.final
ciphertext
end
def hmac(key,message)
hash = OpenSSL::HMAC.digest('sha256', key, message)
end
end
end
|
Complete View class skeleton
class View
def initialize
end
def render_player_names
end
def initialize_game_render
end
def render_scores
end
def render_track
end
def render_question
end
def render_answer
end
def render_winner
end
def render_wrong_answer
end
end
|
#!/usr/bin/ruby -w
# -*- encoding: utf-8 -*-
require './grammar'
module Grammar
class Word
attr_reader :text, :gram_props, :frequency
def initialize(text,gram_props=[],general_props={},frequency=100)
gram_props ||= []
@text,@frequency,@gram_props,@general_props=text,frequency,gram_props,general_props
unless gram_props.respond_to?(:each) && gram_props.respond_to?(:size)
raise "expect gram_props to behave like an array but got #{gram_props.inspect}"
end
unless general_props.respond_to?(:keys)
raise "expect general props to behave like a hash but got #{general_props.inspect}"
end
if !gram_props.empty? && !gram_props[0].kind_of?(String)
raise "gram_props should be an array of strings"
end
if frequency < 0
raise "invalid frequency for #{text}: #{frequency}"
end
end
def <=>(other)
res = @text <=> other.text
res = self.class.name <=> other.class.name if (res == 0)
res = @gram_props <=> other.gram_props if (res == 0)
res = @frequency <=> other.frequency if (res == 0)
res
end
# returns an Enumerable collection of all applicable grammar forms
def all_forms
return [{}]
end
def inflect(grammar,form)
return @text
end
def get_property(prop_name)
@general_props[prop_name]
end
protected
attr_reader :general_props
# global_props - hash where read options will be stored
# block - will receive split params to parse
def self.parse(line,global_props,&block)
line.strip! if line
if line && !line.empty?
semantic_opts = {'SEMANTIC'=>:semantic,
'ONLY_WITH'=>:only_with, 'NOT_WITH'=>:not_with,
'ONLY_WITH_W'=>:only_with_word, 'NOT_WITH_W'=>:not_with_word,
'TAKES_ONLY'=>:takes_only, 'TAKES_NO'=>:takes_no,
'TAKES_ONLY_W'=>:takes_only_word, 'TAKES_NO_W'=>:takes_no_word}
escaped = []
last_e = -1
# ignore whitespaces inside brackets by escaping what's inside
line.gsub!(/\([^)]+\)/) { |match| last_e +=1; escaped[last_e] = match; "$#{last_e}" }
line.split(/\s+/).each do |part|
catch(:process_next_part) do
part.gsub!(/\$(\d+)/) { escaped[$1.to_i] }
semantic_opts.each_pair do |string,name|
if part =~ /^#{string}\(([^)]+)\)$/
global_props[name] ||= []
global_props[name] += $1.split(/, */)
throw :process_next_part
end
end
if block_given?
block.call(part)
else
puts "warn: unknown option #{part}"
end
end
end
end
end
end
class Noun < Word
attr_reader :animate,:gender, :number, :person,:attributes
STRING2GENDER = {'m'=>MASCULINE,'n'=>NEUTER,'f'=>FEMININE}
def initialize(text,gram_props,frequency,gender,general_props={},number=SINGULAR,person=3,animate=true,attributes=[],suffix=nil)
super(text,gram_props,general_props,frequency)
raise NounError, "invalid gender #{gender}" unless(GENDERS.include?(gender))
raise NounError, "invalid number #{number}" unless(NUMBERS.include?(number))
raise NounError, "invalid person #{person}" unless([1,2,3].include?(person))
raise NounError, "not allowed to have more than 1 attribute" if attributes.size > 1
@gender,@number,@person,@animate,@attributes,@suffix = gender,number,person,animate,attributes,suffix
end
def Noun.parse(text,gram_props,frequency,line)
gender,number,person,animate,suffix = MASCULINE,SINGULAR,3,true,nil
general_props = {}
attributes = []
Word.parse(line,general_props) do |part|
case part
when /^([mfn])$/ then gender = STRING2GENDER[$1]
when 'Pl' then number = PLURAL
when 'nan' then animate = false
when /^PERSON\(([^)]*)\)/
person = Integer($1.strip)
when /^SUFFIX\(([^)]+)\)$/
suffix = $1
when 'NO_ADJ' then general_props[:no_adjective] = true
when 'ONLY_SUBJ' then general_props[:only_subj] = true
when 'ONLY_OBJ' then general_props[:only_obj] = true
when /^OBJ_FREQ/
unless part =~ /^OBJ_FREQ\((\d+)\)$/
raise NounError, "illegal format of OBJ_FREQ in #{line}"
end
general_props[:obj_freq] = $1.to_i
when /^ATTR\(([^)]+)\)$/
opts = $1
object_case, preposition = nil, nil
case opts
when /^([^,]+),(\d+)$/
preposition = $1.strip
object_case = Integer($2)
when /^\d+$/
object_case = Integer(opts)
else
raise ParseError, "wrong option format for #{line}: '#{part}'"
end
attributes << NounObject.new(object_case, preposition)
else puts "warn: unknown option #{part}"
end
end
Noun.new(text,gram_props,frequency,gender,general_props,number,person,animate,attributes,suffix)
rescue GramObjectError, NounError, ArgumentError
raise ParseError, "cannot parse '#{line}': #{$!.message}"
end
# returns an Enumerable collection of all applicable grammar forms
def all_forms
retval = []
[1,2].each do |number|
CASES.each do |gram_case|
retval << {:case => gram_case, :number => number}
end
end
retval
end
def inflect(grammar,form)
form[:number] ||= @number
inflected = grammar.inflect_noun(text,form,*gram_props)
inflected += ' ' + @suffix if (@suffix)
inflected
end
def to_s
"Noun(#{text} n=#{number})"
end
private
class NounError < RuntimeError
end
end
# abstract base class for grammatical object in a sentence
class GramObject
def initialize
@is_noun, @is_adjective, @is_infinitive = false, false, false
end
def is_noun?
@is_noun
end
def is_adjective?
@is_adjective
end
def is_infinitive?
@is_infinitive
end
end
# common exception thrown by GramObject descendants when given invalid data
class GramObjectError < RuntimeError
end
class NounObject < GramObject
attr_reader :case, :preposition
def initialize(noun_case, preposition=nil)
super()
@case, @preposition, @is_noun = noun_case, preposition, true
raise GramObjectError, "invalid case: #{noun_case}" if !CASES.include? noun_case
end
end
class AdjectiveObject < GramObject
def initialize
super()
@is_adjective = true
end
end
class InfinitiveObject < GramObject
attr_reader :preposition
def initialize(preposition=nil)
super()
@preposition, @is_infinitive = preposition, true
end
end
class Verb < Word
attr_reader :objects, :reflexive
def initialize(text,gram_props,frequency,general_props={},reflexive=false,
objects=[],suffix=nil)
super(text,gram_props,general_props,frequency)
@reflexive,@objects,@suffix = reflexive,objects,suffix
end
def Verb.parse(text,gram_props,frequency,line)
reflexive, suffix = false, nil
objects = []
general_props = {}
Word.parse(line,general_props) do |part|
case part
when /^REFL(?:EXIVE|EX)?$/ then reflexive = true
when /^INF(?:\(([^)]+)\))?$/
objects << InfinitiveObject.new($1)
when 'ADJ' then objects << AdjectiveObject.new
when /^SUFFIX\(([^)]+)\)$/
suffix = $1
when /^OBJ\(([^)]+)\)$/
opts = $1
object_case, preposition = nil, nil
case opts
when /^([^,]+),(\d+)$/
preposition = $1.strip
object_case = Integer($2)
when /^\d+$/
object_case = Integer(opts)
else
raise ParseError, "wrong option format for #{line}: '#{part}'"
end
objects << NounObject.new(object_case, preposition)
else
puts "warn: unknown option '#{part}' for '#{text}'"
end
end
Verb.new(text,gram_props,frequency,general_props,reflexive,
objects,suffix)
rescue VerbError, GramObjectError => e
raise ParseError, e.message
end
def inflect(grammar,form)
inflected = grammar.inflect_verb(text,form,@reflexive,*gram_props)
inflected += ' ' + @suffix if (@suffix)
inflected
end
# returns an Enumerable collection of all applicable grammar forms
def all_forms
retval = []
[1,2].each do |number|
[1,2,3].each do |person|
retval << {:person => person, :number => number}
end
end
retval << {:infinitive =>1 }
retval
end
def to_s
result = "Verb(#{text}"
result += ' reflexive' if reflexive;
result += ')'
result
end
private
class VerbError < RuntimeError
end
end
class Adverb < Word
def initialize(text,gram_props,frequency,general_props={})
super(text,gram_props,general_props,frequency)
end
def self.parse(text,gram_props,frequency,line)
general_props = {}
Word.parse(line,general_props)
Adverb.new(text,gram_props,frequency,general_props)
end
def to_s
"Adverb(#{text})"
end
end
class Other < Word
def initialize(text,gram_props,frequency)
super(text,gram_props,{},frequency)
end
def self.parse(text,gram_props,frequency,line)
raise ParseError, "does not expect any grammar properties for other but got '#{gram_props}'" if !gram_props.empty?
raise ParseError, "does not expect other properties for other but got '#{line}'" if line && line =~ /\w/
Other.new(text,gram_props,frequency)
end
def to_s
"OtherWord(#{text})"
end
end
class Adjective < Word
attr_reader :attributes, :double
def initialize(text,gram_props,frequency,double=false,attributes=[],general_props={})
super(text,gram_props,general_props,frequency)
if attributes.size > 1
raise AdjectiveError, "not allowed to have more than 1 attribute"
end
@attributes,@double=attributes,double
end
def Adjective.parse(text,gram_props,frequency,line)
general_props = {}
double = false
attributes = []
Word.parse(line,general_props) do |part|
case part
when 'NOT_AS_OBJ' then general_props[:not_as_object] = true
when 'DOUBLE' then double = true
when 'POSS' then double = true
when /^ATTR\(([^)]+)\)$/
opts = $1
object_case, preposition = nil, nil
case opts
when /^([^,]+),(\d+)$/
preposition = $1.strip
object_case = Integer($2)
when /^\d+$/
object_case = Integer(opts)
else
raise ParseError, "wrong option format for #{line}: '#{part}'"
end
attributes << NounObject.new(object_case, preposition)
else puts "warn: unknown option #{part}"
end
end
Adjective.new(text,gram_props,frequency,double,attributes,general_props)
rescue GramObjectError, AdjectiveError => e
raise ParseError, e.message
end
# returns an Enumerable collection of all applicable grammar forms
def all_forms
retval = []
GENDERS.each do |gender|
[1,2].each do |number|
[true,false].each do |animate|
CASES.each do |gram_case|
retval << {:case => gram_case, :number => number, :gender => gender, :animate => animate}
end
end
end
end
retval
end
def inflect(grammar,form)
return grammar.inflect_adjective(text,form,*gram_props)
end
def to_s
"Adjective(#{text})"
end
private
class AdjectiveError < RuntimeError
end
end
class Words
private_class_method :new
def Words.get_class(speech_part)
case speech_part
when NOUN then Noun
when VERB then Verb
when ADJECTIVE then Adjective
when ADVERB then Adverb
when OTHER then Other
else raise "unknown speech part: #{speech_part}"
end
end
end
end
remove animate for non-masculine for all_forms
#!/usr/bin/ruby -w
# -*- encoding: utf-8 -*-
require './grammar'
module Grammar
class Word
attr_reader :text, :gram_props, :frequency
def initialize(text,gram_props=[],general_props={},frequency=100)
gram_props ||= []
@text,@frequency,@gram_props,@general_props=text,frequency,gram_props,general_props
unless gram_props.respond_to?(:each) && gram_props.respond_to?(:size)
raise "expect gram_props to behave like an array but got #{gram_props.inspect}"
end
unless general_props.respond_to?(:keys)
raise "expect general props to behave like a hash but got #{general_props.inspect}"
end
if !gram_props.empty? && !gram_props[0].kind_of?(String)
raise "gram_props should be an array of strings"
end
if frequency < 0
raise "invalid frequency for #{text}: #{frequency}"
end
end
def <=>(other)
res = @text <=> other.text
res = self.class.name <=> other.class.name if (res == 0)
res = @gram_props <=> other.gram_props if (res == 0)
res = @frequency <=> other.frequency if (res == 0)
res
end
# returns an Enumerable collection of all applicable grammar forms
def all_forms
return [{}]
end
def inflect(grammar,form)
return @text
end
def get_property(prop_name)
@general_props[prop_name]
end
protected
attr_reader :general_props
# global_props - hash where read options will be stored
# block - will receive split params to parse
def self.parse(line,global_props,&block)
line.strip! if line
if line && !line.empty?
semantic_opts = {'SEMANTIC'=>:semantic,
'ONLY_WITH'=>:only_with, 'NOT_WITH'=>:not_with,
'ONLY_WITH_W'=>:only_with_word, 'NOT_WITH_W'=>:not_with_word,
'TAKES_ONLY'=>:takes_only, 'TAKES_NO'=>:takes_no,
'TAKES_ONLY_W'=>:takes_only_word, 'TAKES_NO_W'=>:takes_no_word}
escaped = []
last_e = -1
# ignore whitespaces inside brackets by escaping what's inside
line.gsub!(/\([^)]+\)/) { |match| last_e +=1; escaped[last_e] = match; "$#{last_e}" }
line.split(/\s+/).each do |part|
catch(:process_next_part) do
part.gsub!(/\$(\d+)/) { escaped[$1.to_i] }
semantic_opts.each_pair do |string,name|
if part =~ /^#{string}\(([^)]+)\)$/
global_props[name] ||= []
global_props[name] += $1.split(/, */)
throw :process_next_part
end
end
if block_given?
block.call(part)
else
puts "warn: unknown option #{part}"
end
end
end
end
end
end
class Noun < Word
attr_reader :animate,:gender, :number, :person,:attributes
STRING2GENDER = {'m'=>MASCULINE,'n'=>NEUTER,'f'=>FEMININE}
def initialize(text,gram_props,frequency,gender,general_props={},number=SINGULAR,person=3,animate=true,attributes=[],suffix=nil)
super(text,gram_props,general_props,frequency)
raise NounError, "invalid gender #{gender}" unless(GENDERS.include?(gender))
raise NounError, "invalid number #{number}" unless(NUMBERS.include?(number))
raise NounError, "invalid person #{person}" unless([1,2,3].include?(person))
raise NounError, "not allowed to have more than 1 attribute" if attributes.size > 1
@gender,@number,@person,@animate,@attributes,@suffix = gender,number,person,animate,attributes,suffix
end
def Noun.parse(text,gram_props,frequency,line)
gender,number,person,animate,suffix = MASCULINE,SINGULAR,3,true,nil
general_props = {}
attributes = []
Word.parse(line,general_props) do |part|
case part
when /^([mfn])$/ then gender = STRING2GENDER[$1]
when 'Pl' then number = PLURAL
when 'nan' then animate = false
when /^PERSON\(([^)]*)\)/
person = Integer($1.strip)
when /^SUFFIX\(([^)]+)\)$/
suffix = $1
when 'NO_ADJ' then general_props[:no_adjective] = true
when 'ONLY_SUBJ' then general_props[:only_subj] = true
when 'ONLY_OBJ' then general_props[:only_obj] = true
when /^OBJ_FREQ/
unless part =~ /^OBJ_FREQ\((\d+)\)$/
raise NounError, "illegal format of OBJ_FREQ in #{line}"
end
general_props[:obj_freq] = $1.to_i
when /^ATTR\(([^)]+)\)$/
opts = $1
object_case, preposition = nil, nil
case opts
when /^([^,]+),(\d+)$/
preposition = $1.strip
object_case = Integer($2)
when /^\d+$/
object_case = Integer(opts)
else
raise ParseError, "wrong option format for #{line}: '#{part}'"
end
attributes << NounObject.new(object_case, preposition)
else puts "warn: unknown option #{part}"
end
end
Noun.new(text,gram_props,frequency,gender,general_props,number,person,animate,attributes,suffix)
rescue GramObjectError, NounError, ArgumentError
raise ParseError, "cannot parse '#{line}': #{$!.message}"
end
# returns an Enumerable collection of all applicable grammar forms
def all_forms
retval = []
[1,2].each do |number|
CASES.each do |gram_case|
retval << {:case => gram_case, :number => number}
end
end
retval
end
def inflect(grammar,form)
form[:number] ||= @number
inflected = grammar.inflect_noun(text,form,*gram_props)
inflected += ' ' + @suffix if (@suffix)
inflected
end
def to_s
"Noun(#{text} n=#{number})"
end
private
class NounError < RuntimeError
end
end
# abstract base class for grammatical object in a sentence
class GramObject
def initialize
@is_noun, @is_adjective, @is_infinitive = false, false, false
end
def is_noun?
@is_noun
end
def is_adjective?
@is_adjective
end
def is_infinitive?
@is_infinitive
end
end
# common exception thrown by GramObject descendants when given invalid data
class GramObjectError < RuntimeError
end
class NounObject < GramObject
attr_reader :case, :preposition
def initialize(noun_case, preposition=nil)
super()
@case, @preposition, @is_noun = noun_case, preposition, true
raise GramObjectError, "invalid case: #{noun_case}" if !CASES.include? noun_case
end
end
class AdjectiveObject < GramObject
def initialize
super()
@is_adjective = true
end
end
class InfinitiveObject < GramObject
attr_reader :preposition
def initialize(preposition=nil)
super()
@preposition, @is_infinitive = preposition, true
end
end
class Verb < Word
attr_reader :objects, :reflexive
def initialize(text,gram_props,frequency,general_props={},reflexive=false,
objects=[],suffix=nil)
super(text,gram_props,general_props,frequency)
@reflexive,@objects,@suffix = reflexive,objects,suffix
end
def Verb.parse(text,gram_props,frequency,line)
reflexive, suffix = false, nil
objects = []
general_props = {}
Word.parse(line,general_props) do |part|
case part
when /^REFL(?:EXIVE|EX)?$/ then reflexive = true
when /^INF(?:\(([^)]+)\))?$/
objects << InfinitiveObject.new($1)
when 'ADJ' then objects << AdjectiveObject.new
when /^SUFFIX\(([^)]+)\)$/
suffix = $1
when /^OBJ\(([^)]+)\)$/
opts = $1
object_case, preposition = nil, nil
case opts
when /^([^,]+),(\d+)$/
preposition = $1.strip
object_case = Integer($2)
when /^\d+$/
object_case = Integer(opts)
else
raise ParseError, "wrong option format for #{line}: '#{part}'"
end
objects << NounObject.new(object_case, preposition)
else
puts "warn: unknown option '#{part}' for '#{text}'"
end
end
Verb.new(text,gram_props,frequency,general_props,reflexive,
objects,suffix)
rescue VerbError, GramObjectError => e
raise ParseError, e.message
end
def inflect(grammar,form)
inflected = grammar.inflect_verb(text,form,@reflexive,*gram_props)
inflected += ' ' + @suffix if (@suffix)
inflected
end
# returns an Enumerable collection of all applicable grammar forms
def all_forms
retval = []
[1,2].each do |number|
[1,2,3].each do |person|
retval << {:person => person, :number => number}
end
end
retval << {:infinitive =>1 }
retval
end
def to_s
result = "Verb(#{text}"
result += ' reflexive' if reflexive;
result += ')'
result
end
private
class VerbError < RuntimeError
end
end
class Adverb < Word
def initialize(text,gram_props,frequency,general_props={})
super(text,gram_props,general_props,frequency)
end
def self.parse(text,gram_props,frequency,line)
general_props = {}
Word.parse(line,general_props)
Adverb.new(text,gram_props,frequency,general_props)
end
def to_s
"Adverb(#{text})"
end
end
class Other < Word
def initialize(text,gram_props,frequency)
super(text,gram_props,{},frequency)
end
def self.parse(text,gram_props,frequency,line)
raise ParseError, "does not expect any grammar properties for other but got '#{gram_props}'" if !gram_props.empty?
raise ParseError, "does not expect other properties for other but got '#{line}'" if line && line =~ /\w/
Other.new(text,gram_props,frequency)
end
def to_s
"OtherWord(#{text})"
end
end
class Adjective < Word
attr_reader :attributes, :double
def initialize(text,gram_props,frequency,double=false,attributes=[],general_props={})
super(text,gram_props,general_props,frequency)
if attributes.size > 1
raise AdjectiveError, "not allowed to have more than 1 attribute"
end
@attributes,@double=attributes,double
end
def Adjective.parse(text,gram_props,frequency,line)
general_props = {}
double = false
attributes = []
Word.parse(line,general_props) do |part|
case part
when 'NOT_AS_OBJ' then general_props[:not_as_object] = true
when 'DOUBLE' then double = true
when 'POSS' then double = true
when /^ATTR\(([^)]+)\)$/
opts = $1
object_case, preposition = nil, nil
case opts
when /^([^,]+),(\d+)$/
preposition = $1.strip
object_case = Integer($2)
when /^\d+$/
object_case = Integer(opts)
else
raise ParseError, "wrong option format for #{line}: '#{part}'"
end
attributes << NounObject.new(object_case, preposition)
else puts "warn: unknown option #{part}"
end
end
Adjective.new(text,gram_props,frequency,double,attributes,general_props)
rescue GramObjectError, AdjectiveError => e
raise ParseError, e.message
end
# returns an Enumerable collection of all applicable grammar forms
def all_forms
retval = []
GENDERS.each do |gender|
[1,2].each do |number|
CASES.each do |gram_case|
form = {:case => gram_case, :number => number, :gender => gender}
if gender == MASCULINE
retval << form.merge({:animate => true})
retval << form.merge({:animate => false})
else
retval << form
end
end
end
end
retval
end
def inflect(grammar,form)
return grammar.inflect_adjective(text,form,*gram_props)
end
def to_s
"Adjective(#{text})"
end
private
class AdjectiveError < RuntimeError
end
end
class Words
private_class_method :new
def Words.get_class(speech_part)
case speech_part
when NOUN then Noun
when VERB then Verb
when ADJECTIVE then Adjective
when ADVERB then Adverb
when OTHER then Other
else raise "unknown speech part: #{speech_part}"
end
end
end
end
|
class Pspp < Formula
homepage "https://www.gnu.org/software/pspp/"
url "https://ftpmirror.gnu.org/pspp/pspp-0.8.4.tar.gz"
mirror "https://ftp.gnu.org/gnu/pspp/pspp-0.8.4.tar.gz"
sha256 "bfcc3b0e98a5e2d44b4f3383e52c1a26b7eacf5bf419786f49fa16d00bc1c52c"
revision 1
option "without-check", "Skip running the PSPP test suite"
option "without-gui", "Build without gui support"
depends_on "pkg-config" => :build
depends_on "gsl"
depends_on "glib"
depends_on "gettext"
depends_on "readline"
depends_on "libxml2"
depends_on "cairo"
depends_on "pango"
depends_on "postgresql" => :optional
if build.with? "gui"
depends_on "gtk+"
depends_on "gtksourceview"
depends_on "freetype"
depends_on "fontconfig"
end
patch :DATA
def install
args = ["--disable-rpath"]
args << "--without-libpq" if build.without? "postgresql"
args << "--without-gui" if build.without? "gui"
system "./configure", "--prefix=#{prefix}", *args
system "make"
system "make", "check" if build.with? "check"
system "make", "install"
end
test do
system "#{bin}/pspp", "--version"
end
end
__END__
diff --git a/tests/language/stats/frequencies.at b/tests/language/stats/frequencies.at
index d321e57..90d54f5 100644
--- a/tests/language/stats/frequencies.at
+++ b/tests/language/stats/frequencies.at
@@ -180,7 +180,7 @@ frequencies /x
])
# Cannot use the CSV driver for this because it does not output charts
# at all.
-AT_CHECK([pspp -O format=pdf frequencies.sps], [0], [ignore])
+AT_CHECK([pspp -O format=pdf frequencies.sps], [0], [ignore], [ignore])
AT_CLEANUP
# Tests for a bug which crashed PSPP when the median and a histogram
diff --git a/tests/language/data-io/get-data-psql.at b/tests/language/data-io/get-data-psql.at
index 692de09..ea9b222 100644
--- a/tests/language/data-io/get-data-psql.at
+++ b/tests/language/data-io/get-data-psql.at
@@ -12,7 +12,7 @@ m4_define([INIT_PSQL],
PGHOST="$socket_dir"
export PGHOST
AT_CHECK([initdb -A trust], [0], [ignore])
- AT_CHECK([pg_ctl start -w -o "-k $socket_dir -h ''"], [0], [ignore])
+ AT_CHECK([pg_ctl start -w -o "-k $socket_dir -h 'example.com'"], [0], [ignore])
trap 'CLEANUP_PSQL' 0
AT_CHECK([createdb -h "$socket_dir" -p $PG_PORT $PG_DBASE],
[0], [ignore], [ignore])
pspp: add 0.8.4_1 bottle.
class Pspp < Formula
homepage "https://www.gnu.org/software/pspp/"
url "https://ftpmirror.gnu.org/pspp/pspp-0.8.4.tar.gz"
mirror "https://ftp.gnu.org/gnu/pspp/pspp-0.8.4.tar.gz"
sha256 "bfcc3b0e98a5e2d44b4f3383e52c1a26b7eacf5bf419786f49fa16d00bc1c52c"
revision 1
bottle do
root_url "https://homebrew.bintray.com/bottles-science"
sha256 "de81367499dae0031cbab3a3421dc2817f884a1e7ba92a69aa6707e00ef9d63e" => :yosemite
sha256 "78e70c08f4f537fafd7c0f297926bb17776c89acc3b1318125b94e9398090db4" => :mavericks
sha256 "a66ac66e4dbf4426ef2ecb849b9f02462db0f0715a5726dae0111adb01303c31" => :mountain_lion
end
option "without-check", "Skip running the PSPP test suite"
option "without-gui", "Build without gui support"
depends_on "pkg-config" => :build
depends_on "gsl"
depends_on "glib"
depends_on "gettext"
depends_on "readline"
depends_on "libxml2"
depends_on "cairo"
depends_on "pango"
depends_on "postgresql" => :optional
if build.with? "gui"
depends_on "gtk+"
depends_on "gtksourceview"
depends_on "freetype"
depends_on "fontconfig"
end
patch :DATA
def install
args = ["--disable-rpath"]
args << "--without-libpq" if build.without? "postgresql"
args << "--without-gui" if build.without? "gui"
system "./configure", "--prefix=#{prefix}", *args
system "make"
system "make", "check" if build.with? "check"
system "make", "install"
end
test do
system "#{bin}/pspp", "--version"
end
end
__END__
diff --git a/tests/language/stats/frequencies.at b/tests/language/stats/frequencies.at
index d321e57..90d54f5 100644
--- a/tests/language/stats/frequencies.at
+++ b/tests/language/stats/frequencies.at
@@ -180,7 +180,7 @@ frequencies /x
])
# Cannot use the CSV driver for this because it does not output charts
# at all.
-AT_CHECK([pspp -O format=pdf frequencies.sps], [0], [ignore])
+AT_CHECK([pspp -O format=pdf frequencies.sps], [0], [ignore], [ignore])
AT_CLEANUP
# Tests for a bug which crashed PSPP when the median and a histogram
diff --git a/tests/language/data-io/get-data-psql.at b/tests/language/data-io/get-data-psql.at
index 692de09..ea9b222 100644
--- a/tests/language/data-io/get-data-psql.at
+++ b/tests/language/data-io/get-data-psql.at
@@ -12,7 +12,7 @@ m4_define([INIT_PSQL],
PGHOST="$socket_dir"
export PGHOST
AT_CHECK([initdb -A trust], [0], [ignore])
- AT_CHECK([pg_ctl start -w -o "-k $socket_dir -h ''"], [0], [ignore])
+ AT_CHECK([pg_ctl start -w -o "-k $socket_dir -h 'example.com'"], [0], [ignore])
trap 'CLEANUP_PSQL' 0
AT_CHECK([createdb -h "$socket_dir" -p $PG_PORT $PG_DBASE],
[0], [ignore], [ignore])
|
class Pspp < Formula
desc "Statistical analysis of sampled data (FOSS SPSS clone)"
homepage "https://www.gnu.org/software/pspp/"
url "http://ftpmirror.gnu.org/pspp/pspp-0.10.2.tar.gz"
mirror "https://ftp.gnu.org/gnu/pspp/pspp-0.10.2.tar.gz"
sha256 "f77cacae6948689a60f1a5808a5d2e183c1cd0847c7fc6142646c63814c0daa9"
bottle do
sha256 "bc080b73c3eec57444db490c9bed2a78378d282818716de6f9379e5b0744ff14" => :el_capitan
sha256 "e3f0e23ec13b11c61b46e88946c99737f9da99a5c59776816edbd076cbb91b19" => :yosemite
sha256 "4ea5e8f5ed67e4b8ff74168385be5efe8b1c83fc51d07ac439a3be58bab5a9bf" => :mavericks
end
option "without-test", "Skip running the PSPP test suite"
option "without-gui", "Build without GUI support"
deprecated_option "without-check" => "without-test"
depends_on "pkg-config" => :build
depends_on "texinfo" => :build
depends_on "gsl"
depends_on "glib"
depends_on "gettext"
depends_on "readline"
depends_on "libxml2"
depends_on "cairo"
depends_on "pango"
depends_on "postgresql" => :optional
if build.with? "gui"
depends_on "gtk+"
depends_on "gtksourceview3"
depends_on "freetype"
depends_on "fontconfig"
end
def install
args = ["--disable-rpath"]
args << "--without-libpq" if build.without? "postgresql"
args << "--without-gui" if build.without? "gui"
args << "--without-perl-module" # not built by default but tests run for it
system "./configure", "--prefix=#{prefix}", *args
system "make"
system "make", "check" if build.with? "test"
system "make", "install"
end
test do
system "#{bin}/pspp", "--version"
end
end
pspp: update 0.10.2 bottle.
class Pspp < Formula
desc "Statistical analysis of sampled data (FOSS SPSS clone)"
homepage "https://www.gnu.org/software/pspp/"
url "http://ftpmirror.gnu.org/pspp/pspp-0.10.2.tar.gz"
mirror "https://ftp.gnu.org/gnu/pspp/pspp-0.10.2.tar.gz"
sha256 "f77cacae6948689a60f1a5808a5d2e183c1cd0847c7fc6142646c63814c0daa9"
bottle do
sha256 "29155939c2351f285697bf17bce5f25ea9c2704116bb9d633759ae05e0923eff" => :el_capitan
sha256 "40577dce7827cd38caf880a7812ed2245d7e16de4c1fdd8daa2366360f308c23" => :yosemite
sha256 "b667543806facb540c2ebadd90ad66d6c644f5651c50f585ca3ada036730f32b" => :mavericks
end
option "without-test", "Skip running the PSPP test suite"
option "without-gui", "Build without GUI support"
deprecated_option "without-check" => "without-test"
depends_on "pkg-config" => :build
depends_on "texinfo" => :build
depends_on "gsl"
depends_on "glib"
depends_on "gettext"
depends_on "readline"
depends_on "libxml2"
depends_on "cairo"
depends_on "pango"
depends_on "postgresql" => :optional
if build.with? "gui"
depends_on "gtk+"
depends_on "gtksourceview3"
depends_on "freetype"
depends_on "fontconfig"
end
def install
args = ["--disable-rpath"]
args << "--without-libpq" if build.without? "postgresql"
args << "--without-gui" if build.without? "gui"
args << "--without-perl-module" # not built by default but tests run for it
system "./configure", "--prefix=#{prefix}", *args
system "make"
system "make", "check" if build.with? "test"
system "make", "install"
end
test do
system "#{bin}/pspp", "--version"
end
end
|
name "bowser"
description "Master role applied to bowser"
default_attributes(
:networking => {
:interfaces => {
:external_ipv4 => {
:interface => "bond0",
:role => :external,
:family => :inet,
:address => "138.44.68.106",
:prefix => "30",
:gateway => "138.44.68.105",
:bond => {
:slaves => %w[ens14f0 ens14f1]
}
}
}
},
:postgresql => {
:versions => ["10"],
:settings => {
:defaults => {
:shared_buffers => "8GB",
:maintenance_work_mem => "7144MB",
:effective_cache_size => "16GB"
}
}
},
:sysctl => {
:postgres => {
:comment => "Increase shared memory for postgres",
:parameters => {
"kernel.shmmax" => 9 * 1024 * 1024 * 1024,
"kernel.shmall" => 9 * 1024 * 1024 * 1024 / 4096
}
}
},
:tile => {
:database => {
:cluster => "10/main",
:postgis => "2.4"
},
:styles => {
:default => {
:tile_directories => [
{ :name => "/store/tiles/default", :min_zoom => 0, :max_zoom => 19 }
]
}
}
}
)
run_list(
"role[aarnet]",
"role[tile]"
)
Update bowser to postgres 12, postgis 3 and carto 5.0.0
name "bowser"
description "Master role applied to bowser"
default_attributes(
:networking => {
:interfaces => {
:external_ipv4 => {
:interface => "bond0",
:role => :external,
:family => :inet,
:address => "138.44.68.106",
:prefix => "30",
:gateway => "138.44.68.105",
:bond => {
:slaves => %w[ens14f0 ens14f1]
}
}
}
},
:postgresql => {
:versions => ["12"],
:settings => {
:defaults => {
:shared_buffers => "8GB",
:maintenance_work_mem => "7144MB",
:effective_cache_size => "16GB"
}
}
},
:sysctl => {
:postgres => {
:comment => "Increase shared memory for postgres",
:parameters => {
"kernel.shmmax" => 9 * 1024 * 1024 * 1024,
"kernel.shmall" => 9 * 1024 * 1024 * 1024 / 4096
}
}
},
:tile => {
:database => {
:cluster => "12/main",
:postgis => "3"
},
:styles => {
:default => {
:revision => "v5.0.0",
:tile_directories => [
{ :name => "/store/tiles/default", :min_zoom => 0, :max_zoom => 19 }
]
}
}
}
)
run_list(
"role[aarnet]",
"role[tile]"
)
|
name "ridley"
description "Master role applied to ridley"
default_attributes(
:dhcpd => {
:first_address => "10.0.15.1",
:last_address => "10.0.15.254"
},
:munin => {
:graphs => {
:apcpdu_ucl => {
:title => "Current for UCL",
:vlabel => "Amps",
:category => "Ups",
:values => {
:load => {
:sum => ["apcpdu_apc1.load", "apcpdu_apc2.load", "apcpdu_apc3.load"],
:label => "Load"
}
}
}
}
},
:networking => {
:interfaces => {
:external_ipv4 => {
:interface => "eth0.2800",
:role => :external,
:family => :inet,
:address => "193.60.236.19"
},
:internal_ipv4 => {
:interface => "eth0.2801",
:role => :internal,
:family => :inet,
:address => "10.0.0.3"
}
}
},
:openvpn => {
:address => "10.0.16.1",
:tunnels => {
:ic2ucl => {
:port => "1194",
:mode => "client",
:peer => {
:host => "ironbelly.openstreetmap.org",
:port => "1194"
}
},
:shenron2ucl => {
:port => "1195",
:mode => "client",
:peer => {
:host => "shenron.openstreetmap.org",
:port => "1194"
}
},
:ucl2bm => {
:port => "1196",
:mode => "client",
:peer => {
:host => "grisu.openstreetmap.org",
:port => "1196"
}
},
:firefishy => {
:port => "1197",
:mode => "client",
:peer => {
:host => "home.firefishy.com",
:port => "1194",
:address => "10.0.16.201"
}
}
}
}
)
run_list(
"role[ucl]",
"role[hp-dl360-g6]",
"role[gateway]",
"role[foundation]",
"role[stateofthemap]",
"role[switch2osm]",
"role[blog]",
"role[otrs]",
"role[donate]",
"recipe[dhcpd]",
"recipe[openvpn]"
)
Upgrade ridley to chef 13
name "ridley"
description "Master role applied to ridley"
default_attributes(
:chef => {
:client => {
:version => "13.6.4"
}
},
:dhcpd => {
:first_address => "10.0.15.1",
:last_address => "10.0.15.254"
},
:munin => {
:graphs => {
:apcpdu_ucl => {
:title => "Current for UCL",
:vlabel => "Amps",
:category => "Ups",
:values => {
:load => {
:sum => ["apcpdu_apc1.load", "apcpdu_apc2.load", "apcpdu_apc3.load"],
:label => "Load"
}
}
}
}
},
:networking => {
:interfaces => {
:external_ipv4 => {
:interface => "eth0.2800",
:role => :external,
:family => :inet,
:address => "193.60.236.19"
},
:internal_ipv4 => {
:interface => "eth0.2801",
:role => :internal,
:family => :inet,
:address => "10.0.0.3"
}
}
},
:openvpn => {
:address => "10.0.16.1",
:tunnels => {
:ic2ucl => {
:port => "1194",
:mode => "client",
:peer => {
:host => "ironbelly.openstreetmap.org",
:port => "1194"
}
},
:shenron2ucl => {
:port => "1195",
:mode => "client",
:peer => {
:host => "shenron.openstreetmap.org",
:port => "1194"
}
},
:ucl2bm => {
:port => "1196",
:mode => "client",
:peer => {
:host => "grisu.openstreetmap.org",
:port => "1196"
}
},
:firefishy => {
:port => "1197",
:mode => "client",
:peer => {
:host => "home.firefishy.com",
:port => "1194",
:address => "10.0.16.201"
}
}
}
}
)
run_list(
"role[ucl]",
"role[hp-dl360-g6]",
"role[gateway]",
"role[foundation]",
"role[stateofthemap]",
"role[switch2osm]",
"role[blog]",
"role[otrs]",
"role[donate]",
"recipe[dhcpd]",
"recipe[openvpn]"
)
|
Pod::Spec.new do |s|
s.name = "FlyImage"
s.version = "1.0"
s.summary = "Download, cache, render small images with UIImageView category"
s.description = 'FlyImage takes the advantages of SDWebImage, FastImageCache and AFNetworking, ' \
'is a simple and high performance image library.Features: ' \
'High Performance, reduce memory operations while rendering, avoid Memory warning caused by image; ' \
'Store and retrieve different size of small images in one memory file, smooth scrolling; ' \
'Simple, support UIImageView, CALayer category; ' \
'An asynchronous image downloader; ' \
'Support WebP format; ' \
'Support mmap to improve I/O performace;'
s.homepage = "https://github.com/northwind/FlyImage"
s.license = "MIT"
s.author = { "norristong" => "norristong_x@qq.com" }
s.platform = :ios, "8.0"
s.source = { :git => 'https://github.com/northwind/FlyImage.git', :tag => s.version.to_s }
s.source_files = "FlyImage", "FlyImage/**/*.{h,m}"
s.frameworks = "ImageIO", 'UIKit'
s.requires_arc = true
s.dependency 'AFNetworking', '~> 3.1'
s.default_subspec = 'Core'
s.subspec 'Core' do |core|
core.source_files = "FlyImage", 'FlyImage/**/*.{h,m}'
end
s.subspec 'WebP' do |webp|
webp.xcconfig = {
'GCC_PREPROCESSOR_DEFINITIONS' => '$(inherited) FLYIMAGE_WEBP=1',
'USER_HEADER_SEARCH_PATHS' => '$(inherited) $(SRCROOT)/libwebp/src'
}
webp.watchos.xcconfig = {
'GCC_PREPROCESSOR_DEFINITIONS' => '$(inherited) FLYIMAGE_WEBP=1',
'USER_HEADER_SEARCH_PATHS' => '$(inherited) $(SRCROOT)/libwebp/src'
}
webp.dependency 'FlyImage/Core'
webp.dependency 'libwebp'
end
end
[MOD] Bump version to 1.1
Pod::Spec.new do |s|
s.name = "FlyImage"
s.version = "1.1"
s.summary = "Download, cache, render small images with UIImageView category"
s.description = 'FlyImage takes the advantages of SDWebImage, FastImageCache and AFNetworking, ' \
'is a simple and high performance image library.Features: ' \
'High Performance, reduce memory operations while rendering, avoid Memory warning caused by image; ' \
'Store and retrieve different size of small images in one memory file, smooth scrolling; ' \
'Simple, support UIImageView, CALayer category; ' \
'An asynchronous image downloader; ' \
'Support WebP format; ' \
'Support mmap to improve I/O performace;'
s.homepage = "https://github.com/northwind/FlyImage"
s.license = "MIT"
s.author = { "norristong" => "norristong_x@qq.com" }
s.platform = :ios, "8.0"
s.source = { :git => 'https://github.com/northwind/FlyImage.git', :tag => s.version.to_s }
s.source_files = "FlyImage", "FlyImage/**/*.{h,m}"
s.frameworks = "ImageIO", 'UIKit'
s.requires_arc = true
s.dependency 'AFNetworking', '~> 3.1'
s.default_subspec = 'Core'
s.subspec 'Core' do |core|
core.source_files = "FlyImage", 'FlyImage/**/*.{h,m}'
end
s.subspec 'WebP' do |webp|
webp.xcconfig = {
'GCC_PREPROCESSOR_DEFINITIONS' => '$(inherited) FLYIMAGE_WEBP=1',
'USER_HEADER_SEARCH_PATHS' => '$(inherited) $(SRCROOT)/libwebp/src'
}
webp.watchos.xcconfig = {
'GCC_PREPROCESSOR_DEFINITIONS' => '$(inherited) FLYIMAGE_WEBP=1',
'USER_HEADER_SEARCH_PATHS' => '$(inherited) $(SRCROOT)/libwebp/src'
}
webp.dependency 'FlyImage/Core'
webp.dependency 'libwebp'
end
end
|
class Abyss < Formula
desc "Genome sequence assembler for short reads"
homepage "http://www.bcgsc.ca/platform/bioinfo/software/abyss"
url "https://github.com/bcgsc/abyss/releases/download/2.2.1/abyss-2.2.1.tar.gz"
sha256 "838c478b0fb5092e508f0253e213a820cd3faaa45546236f43b87a7194aa2cdf"
bottle do
cellar :any
sha256 "b36e77523181b9af7ad7453e32d409e070d2dc9ba40671d37da0d89ccb4f7948" => :catalina
sha256 "c97c5da6397f990889bc108183aeba752d7dfa0d096ee1362ae5b66352ce08d6" => :mojave
sha256 "64db8abd2422f7c484a94e3912e9fc607027d47c3491fe533bf4d77a0f30ef3a" => :high_sierra
sha256 "fe96208a98f962f62ccd572846d817742eef39b20ee8432dd950587b5a919429" => :sierra
end
head do
url "https://github.com/bcgsc/abyss.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "multimarkdown" => :build
end
depends_on "boost" => :build
depends_on "google-sparsehash" => :build
depends_on "gcc"
depends_on "open-mpi"
fails_with :clang # no OpenMP support
resource("testdata") do
url "http://www.bcgsc.ca/platform/bioinfo/software/abyss/releases/1.3.4/test-data.tar.gz"
sha256 "28f8592203daf2d7c3b90887f9344ea54fda39451464a306ef0226224e5f4f0e"
end
def install
system "./autogen.sh" if build.head?
system "./configure", "--enable-maxk=128",
"--prefix=#{prefix}",
"--with-boost=#{Formula["boost"].include}",
"--with-mpi=#{Formula["open-mpi"].prefix}",
"--with-sparsehash=#{Formula["google-sparsehash"].prefix}",
"--disable-dependency-tracking",
"--disable-silent-rules"
system "make", "install"
end
test do
testpath.install resource("testdata")
system "#{bin}/abyss-pe", "k=25", "name=ts", "in=reads1.fastq reads2.fastq"
system "#{bin}/abyss-fac", "ts-unitigs.fa"
end
end
abyss 2.2.3
abyss.rb: fix test data link
abyss.rb: fix abyss homepage hyperlink
abyss.rb: delete HOMEBREW_SDKROOT if macOS >= mojave
abyss.rb: use gsc software hyperlink
Closes #47076.
Signed-off-by: Rui Chen <5fd29470147430022ff146db88de16ee91dea376@gmail.com>
class Abyss < Formula
desc "Genome sequence assembler for short reads"
homepage "https://www.bcgsc.ca/resources/gsc-software-centre"
url "https://github.com/bcgsc/abyss/releases/download/2.2.3/abyss-2.2.3.tar.gz"
sha256 "ac7679ececbdd89cc050998eae31fa5f8bf7cdab6a0c05eb5eb1e3867c7e75cb"
bottle do
cellar :any
sha256 "b36e77523181b9af7ad7453e32d409e070d2dc9ba40671d37da0d89ccb4f7948" => :catalina
sha256 "c97c5da6397f990889bc108183aeba752d7dfa0d096ee1362ae5b66352ce08d6" => :mojave
sha256 "64db8abd2422f7c484a94e3912e9fc607027d47c3491fe533bf4d77a0f30ef3a" => :high_sierra
sha256 "fe96208a98f962f62ccd572846d817742eef39b20ee8432dd950587b5a919429" => :sierra
end
head do
url "https://github.com/bcgsc/abyss.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "multimarkdown" => :build
end
depends_on "boost" => :build
depends_on "google-sparsehash" => :build
depends_on "gcc"
depends_on "open-mpi"
fails_with :clang # no OpenMP support
resource("testdata") do
url "https://www.bcgsc.ca/sites/default/files/bioinformatics/software/abyss/releases/1.3.4/test-data.tar.gz"
sha256 "28f8592203daf2d7c3b90887f9344ea54fda39451464a306ef0226224e5f4f0e"
end
def install
ENV.delete("HOMEBREW_SDKROOT") if MacOS.version >= :mojave && MacOS::CLT.installed?
system "./autogen.sh" if build.head?
system "./configure", "--enable-maxk=128",
"--prefix=#{prefix}",
"--with-boost=#{Formula["boost"].include}",
"--with-mpi=#{Formula["open-mpi"].prefix}",
"--with-sparsehash=#{Formula["google-sparsehash"].prefix}",
"--disable-dependency-tracking",
"--disable-silent-rules"
system "make", "install"
end
test do
testpath.install resource("testdata")
system "#{bin}/abyss-pe", "k=25", "name=ts", "in=reads1.fastq reads2.fastq"
system "#{bin}/abyss-fac", "ts-unitigs.fa"
end
end
|
class Ahcpd < Formula
desc "Autoconfiguration protocol for IPv6 and IPv6/IPv4 networks"
homepage "https://www.irif.univ-paris-diderot.fr/~jch/software/ahcp/"
url "https://www.irif.univ-paris-diderot.fr/~jch/software/files/ahcpd-0.53.tar.gz"
sha256 "a4622e817d2b2a9b878653f085585bd57f3838cc546cca6028d3b73ffcac0d52"
bottle do
cellar :any_skip_relocation
sha256 "ab3221a9f28ded916f8d2ef4b8377a2a793fa2fee5f891b9a97e3dede0d294ae" => :high_sierra
sha256 "d3a8a4efb712e2c6a8a055276e5d93d3275a638df4231a4dfe8d428a2606d776" => :sierra
sha256 "b37143ee365a4a3afd9623d5f49eab0bc4bdf9ac3662d22db9671cffa1078224" => :el_capitan
sha256 "36907bc1aadc9d9d874ebd74624d8c2c2e8b4057181df1e964720a41f72ccae8" => :yosemite
sha256 "8518f82187d2b8d2bc24648bd072f19073e159abb2bdaf5418ad31e3ab966d0b" => :mavericks
end
patch :DATA
def install
system "make", "LDLIBS=''"
system "make", "install", "PREFIX=", "TARGET=#{prefix}"
end
end
__END__
diff --git a/Makefile b/Makefile
index e52eeb7..28e1043 100644
--- a/Makefile
+++ b/Makefile
@@ -40,8 +40,8 @@ install.minimal: all
chmod +x $(TARGET)/etc/ahcp/ahcp-config.sh
install: all install.minimal
- mkdir -p $(TARGET)$(PREFIX)/man/man8/
- cp -f ahcpd.man $(TARGET)$(PREFIX)/man/man8/ahcpd.8
+ mkdir -p $(TARGET)$(PREFIX)/share/man/man8/
+ cp -f ahcpd.man $(TARGET)$(PREFIX)/share/man/man8/ahcpd.8
.PHONY: uninstall
@@ -49,7 +49,7 @@ uninstall:
-rm -f $(TARGET)$(PREFIX)/bin/ahcpd
-rm -f $(TARGET)$(PREFIX)/bin/ahcp-config.sh
-rm -f $(TARGET)$(PREFIX)/bin/ahcp-dummy-config.sh
- -rm -f $(TARGET)$(PREFIX)/man/man8/ahcpd.8
+ -rm -f $(TARGET)$(PREFIX)/share/man/man8/ahcpd.8
.PHONY: clean
ahcpd: update 0.53 bottle.
class Ahcpd < Formula
desc "Autoconfiguration protocol for IPv6 and IPv6/IPv4 networks"
homepage "https://www.irif.univ-paris-diderot.fr/~jch/software/ahcp/"
url "https://www.irif.univ-paris-diderot.fr/~jch/software/files/ahcpd-0.53.tar.gz"
sha256 "a4622e817d2b2a9b878653f085585bd57f3838cc546cca6028d3b73ffcac0d52"
bottle do
cellar :any_skip_relocation
sha256 "8852e7e5e11d6ea413657d012e4d49ca0d9ac406e56da6bf7c0daa6d4d788a16" => :mojave
sha256 "ab3221a9f28ded916f8d2ef4b8377a2a793fa2fee5f891b9a97e3dede0d294ae" => :high_sierra
sha256 "d3a8a4efb712e2c6a8a055276e5d93d3275a638df4231a4dfe8d428a2606d776" => :sierra
sha256 "b37143ee365a4a3afd9623d5f49eab0bc4bdf9ac3662d22db9671cffa1078224" => :el_capitan
sha256 "36907bc1aadc9d9d874ebd74624d8c2c2e8b4057181df1e964720a41f72ccae8" => :yosemite
sha256 "8518f82187d2b8d2bc24648bd072f19073e159abb2bdaf5418ad31e3ab966d0b" => :mavericks
end
patch :DATA
def install
system "make", "LDLIBS=''"
system "make", "install", "PREFIX=", "TARGET=#{prefix}"
end
end
__END__
diff --git a/Makefile b/Makefile
index e52eeb7..28e1043 100644
--- a/Makefile
+++ b/Makefile
@@ -40,8 +40,8 @@ install.minimal: all
chmod +x $(TARGET)/etc/ahcp/ahcp-config.sh
install: all install.minimal
- mkdir -p $(TARGET)$(PREFIX)/man/man8/
- cp -f ahcpd.man $(TARGET)$(PREFIX)/man/man8/ahcpd.8
+ mkdir -p $(TARGET)$(PREFIX)/share/man/man8/
+ cp -f ahcpd.man $(TARGET)$(PREFIX)/share/man/man8/ahcpd.8
.PHONY: uninstall
@@ -49,7 +49,7 @@ uninstall:
-rm -f $(TARGET)$(PREFIX)/bin/ahcpd
-rm -f $(TARGET)$(PREFIX)/bin/ahcp-config.sh
-rm -f $(TARGET)$(PREFIX)/bin/ahcp-dummy-config.sh
- -rm -f $(TARGET)$(PREFIX)/man/man8/ahcpd.8
+ -rm -f $(TARGET)$(PREFIX)/share/man/man8/ahcpd.8
.PHONY: clean
|
require "formula"
require "language/go"
class Aptly < Formula
homepage "https://www.aptly.info/"
url "https://github.com/smira/aptly/archive/v0.9.1.tar.gz"
sha1 "d38a20f04ba70c67a86a3e04b2cd2641674371d2"
head "https://github.com/smira/aptly.git"
bottle do
cellar :any
sha1 "0c8c7a948f123d1a40bc2259d8445021094887b0" => :yosemite
sha1 "e9fbdfb93bd116385478176835ca5b848b8c24d2" => :mavericks
sha1 "73ee380d7e60ce73dfd37c91fcbdafea446f8910" => :mountain_lion
end
depends_on :hg => :build
depends_on "go" => :build
go_resource "github.com/mattn/gom" do
url "https://github.com/mattn/gom.git", :revision => "2ed6c170e43a3fea036789a1e60a25c0a3bde149"
end
go_resource "code.google.com/p/go-uuid" do
url "https://code.google.com/p/go-uuid/", :revision => "5fac954758f5", :using => :hg
end
go_resource "code.google.com/p/go.crypto" do
url "https://code.google.com/p/go.crypto/", :revision => "7aa593ce8cea", :using => :hg
end
go_resource "code.google.com/p/gographviz" do
url "https://code.google.com/p/gographviz/", :revision => "454bc64fdfa2", :using => :git
end
go_resource "code.google.com/p/mxk" do
url "https://code.google.com/p/mxk/", :revision => "5ff2502e2556", :using => :hg
end
go_resource "code.google.com/p/snappy-go" do
url "https://code.google.com/p/snappy-go/", :revision => "12e4b4183793", :using => :hg
end
go_resource "github.com/AlekSi/pointer" do
url "https://github.com/AlekSi/pointer.git", :revision => "5f6d527dae3d678b46fbb20331ddf44e2b841943"
end
go_resource "github.com/cheggaaa/pb" do
url "https://github.com/cheggaaa/pb.git", :revision => "2c1b74620cc58a81ac152ee2d322e28c806d81ed"
end
go_resource "github.com/gin-gonic/gin" do
url "https://github.com/gin-gonic/gin.git", :revision => "b1758d3bfa09e61ddbc1c9a627e936eec6a170de"
end
go_resource "github.com/jlaffaye/ftp" do
url "https://github.com/jlaffaye/ftp.git", :revision => "fec71e62e457557fbe85cefc847a048d57815d76"
end
go_resource "github.com/julienschmidt/httprouter" do
url "https://github.com/julienschmidt/httprouter.git", :revision => "46807412fe50aaceb73bb57061c2230fd26a1640"
end
go_resource "github.com/mattn/go-shellwords" do
url "https://github.com/mattn/go-shellwords.git", :revision => "c7ca6f94add751566a61cf2199e1de78d4c3eee4"
end
go_resource "github.com/mitchellh/goamz" do
url "https://github.com/mitchellh/goamz.git", :revision => "e7664b32019f31fd1bdf33f9e85f28722f700405"
end
go_resource "github.com/mkrautz/goar" do
url "https://github.com/mkrautz/goar.git", :revision => "36eb5f3452b1283a211fa35bc00c646fd0db5c4b"
end
go_resource "github.com/ncw/swift" do
url "https://github.com/ncw/swift.git", :revision => "384ef27c70645e285f8bb9d02276bf654d06027e"
end
go_resource "github.com/smira/commander" do
url "https://github.com/smira/commander.git", :revision => "f408b00e68d5d6e21b9f18bd310978dafc604e47"
end
go_resource "github.com/smira/flag" do
url "https://github.com/smira/flag.git", :revision => "357ed3e599ffcbd4aeaa828e1d10da2df3ea5107"
end
go_resource "github.com/smira/go-ftp-protocol" do
url "https://github.com/smira/go-ftp-protocol.git", :revision => "066b75c2b70dca7ae10b1b88b47534a3c31ccfaa"
end
go_resource "github.com/syndtr/goleveldb" do
url "https://github.com/syndtr/goleveldb.git", :revision => "97e257099d2ab9578151ba85e2641e2cd14d3ca8"
end
go_resource "github.com/syndtr/gosnappy" do
url "https://github.com/syndtr/gosnappy.git", :revision => "ce8acff4829e0c2458a67ead32390ac0a381c862"
end
go_resource "github.com/ugorji/go" do
url "https://github.com/ugorji/go.git", :revision => "71c2886f5a673a35f909803f38ece5810165097b"
end
go_resource "github.com/vaughan0/go-ini" do
url "https://github.com/vaughan0/go-ini.git", :revision => "a98ad7ee00ec53921f08832bc06ecf7fd600e6a1"
end
go_resource "github.com/wsxiaoys/terminal" do
url "https://github.com/wsxiaoys/terminal.git", :revision => "5668e431776a7957528361f90ce828266c69ed08"
end
go_resource "github.com/daviddengcn/go-colortext" do
url "https://github.com/daviddengcn/go-colortext.git", :revision => "b5c0891944c2f150ccc9d02aecf51b76c14c2948"
end
go_resource "golang.org/x/crypto" do
url "https://go.googlesource.com/crypto.git", :revision => "a7ead6ddf06233883deca151dffaef2effbf498f"
end
def install
mkdir_p "#{buildpath}/src/github.com/smira/"
ln_s buildpath, "#{buildpath}/src/github.com/smira/aptly"
ENV["GOPATH"] = buildpath
ENV.append_path "PATH", "#{ENV["GOPATH"]}/bin"
Language::Go.stage_deps resources, buildpath/"src"
cd "#{buildpath}/src/github.com/mattn/gom" do
system "go", "install"
end
system "./bin/gom", "build", "-o", "bin/aptly"
bin.install "bin/aptly"
end
test do
assert shell_output("aptly version").include?("aptly version:")
(testpath/".aptly.conf").write("{}")
result = shell_output("aptly -config='#{testpath}/.aptly.conf' mirror list")
assert result.include? "No mirrors found, create one with"
end
end
aptly: update 0.9.1 bottle.
require "formula"
require "language/go"
class Aptly < Formula
homepage "https://www.aptly.info/"
url "https://github.com/smira/aptly/archive/v0.9.1.tar.gz"
sha1 "d38a20f04ba70c67a86a3e04b2cd2641674371d2"
head "https://github.com/smira/aptly.git"
bottle do
cellar :any
sha256 "ae41cab3740f582c5be6486c220f92387371f94b6bf014c4915f6cb5f554893d" => :yosemite
sha256 "b6032c35e8793255acd7c38a16601e7757670d5ed1ba150829f1aeb59a3dc562" => :mavericks
sha256 "c4fa5584fd59586544f7741e8fd1a37a8eea4e3af347fdd77a7959655f2cc569" => :mountain_lion
end
depends_on :hg => :build
depends_on "go" => :build
go_resource "github.com/mattn/gom" do
url "https://github.com/mattn/gom.git", :revision => "2ed6c170e43a3fea036789a1e60a25c0a3bde149"
end
go_resource "code.google.com/p/go-uuid" do
url "https://code.google.com/p/go-uuid/", :revision => "5fac954758f5", :using => :hg
end
go_resource "code.google.com/p/go.crypto" do
url "https://code.google.com/p/go.crypto/", :revision => "7aa593ce8cea", :using => :hg
end
go_resource "code.google.com/p/gographviz" do
url "https://code.google.com/p/gographviz/", :revision => "454bc64fdfa2", :using => :git
end
go_resource "code.google.com/p/mxk" do
url "https://code.google.com/p/mxk/", :revision => "5ff2502e2556", :using => :hg
end
go_resource "code.google.com/p/snappy-go" do
url "https://code.google.com/p/snappy-go/", :revision => "12e4b4183793", :using => :hg
end
go_resource "github.com/AlekSi/pointer" do
url "https://github.com/AlekSi/pointer.git", :revision => "5f6d527dae3d678b46fbb20331ddf44e2b841943"
end
go_resource "github.com/cheggaaa/pb" do
url "https://github.com/cheggaaa/pb.git", :revision => "2c1b74620cc58a81ac152ee2d322e28c806d81ed"
end
go_resource "github.com/gin-gonic/gin" do
url "https://github.com/gin-gonic/gin.git", :revision => "b1758d3bfa09e61ddbc1c9a627e936eec6a170de"
end
go_resource "github.com/jlaffaye/ftp" do
url "https://github.com/jlaffaye/ftp.git", :revision => "fec71e62e457557fbe85cefc847a048d57815d76"
end
go_resource "github.com/julienschmidt/httprouter" do
url "https://github.com/julienschmidt/httprouter.git", :revision => "46807412fe50aaceb73bb57061c2230fd26a1640"
end
go_resource "github.com/mattn/go-shellwords" do
url "https://github.com/mattn/go-shellwords.git", :revision => "c7ca6f94add751566a61cf2199e1de78d4c3eee4"
end
go_resource "github.com/mitchellh/goamz" do
url "https://github.com/mitchellh/goamz.git", :revision => "e7664b32019f31fd1bdf33f9e85f28722f700405"
end
go_resource "github.com/mkrautz/goar" do
url "https://github.com/mkrautz/goar.git", :revision => "36eb5f3452b1283a211fa35bc00c646fd0db5c4b"
end
go_resource "github.com/ncw/swift" do
url "https://github.com/ncw/swift.git", :revision => "384ef27c70645e285f8bb9d02276bf654d06027e"
end
go_resource "github.com/smira/commander" do
url "https://github.com/smira/commander.git", :revision => "f408b00e68d5d6e21b9f18bd310978dafc604e47"
end
go_resource "github.com/smira/flag" do
url "https://github.com/smira/flag.git", :revision => "357ed3e599ffcbd4aeaa828e1d10da2df3ea5107"
end
go_resource "github.com/smira/go-ftp-protocol" do
url "https://github.com/smira/go-ftp-protocol.git", :revision => "066b75c2b70dca7ae10b1b88b47534a3c31ccfaa"
end
go_resource "github.com/syndtr/goleveldb" do
url "https://github.com/syndtr/goleveldb.git", :revision => "97e257099d2ab9578151ba85e2641e2cd14d3ca8"
end
go_resource "github.com/syndtr/gosnappy" do
url "https://github.com/syndtr/gosnappy.git", :revision => "ce8acff4829e0c2458a67ead32390ac0a381c862"
end
go_resource "github.com/ugorji/go" do
url "https://github.com/ugorji/go.git", :revision => "71c2886f5a673a35f909803f38ece5810165097b"
end
go_resource "github.com/vaughan0/go-ini" do
url "https://github.com/vaughan0/go-ini.git", :revision => "a98ad7ee00ec53921f08832bc06ecf7fd600e6a1"
end
go_resource "github.com/wsxiaoys/terminal" do
url "https://github.com/wsxiaoys/terminal.git", :revision => "5668e431776a7957528361f90ce828266c69ed08"
end
go_resource "github.com/daviddengcn/go-colortext" do
url "https://github.com/daviddengcn/go-colortext.git", :revision => "b5c0891944c2f150ccc9d02aecf51b76c14c2948"
end
go_resource "golang.org/x/crypto" do
url "https://go.googlesource.com/crypto.git", :revision => "a7ead6ddf06233883deca151dffaef2effbf498f"
end
def install
mkdir_p "#{buildpath}/src/github.com/smira/"
ln_s buildpath, "#{buildpath}/src/github.com/smira/aptly"
ENV["GOPATH"] = buildpath
ENV.append_path "PATH", "#{ENV["GOPATH"]}/bin"
Language::Go.stage_deps resources, buildpath/"src"
cd "#{buildpath}/src/github.com/mattn/gom" do
system "go", "install"
end
system "./bin/gom", "build", "-o", "bin/aptly"
bin.install "bin/aptly"
end
test do
assert shell_output("aptly version").include?("aptly version:")
(testpath/".aptly.conf").write("{}")
result = shell_output("aptly -config='#{testpath}/.aptly.conf' mirror list")
assert result.include? "No mirrors found, create one with"
end
end
|
class Aria2 < Formula
desc "Download with resuming and segmented downloading"
homepage "https://aria2.github.io/"
url "https://github.com/aria2/aria2/releases/download/release-1.35.0/aria2-1.35.0.tar.xz"
sha256 "1e2b7fd08d6af228856e51c07173cfcf987528f1ac97e04c5af4a47642617dfd"
bottle do
cellar :any
sha256 "9cc5e04be8b0a58d1f2b60b8abfc636168edbf23e7018003c40f1dd6952aab0c" => :catalina
sha256 "761836ac608eb0a59d4a6f6065860c0e809ce454692e0937d9d0d89ad47f3ce4" => :mojave
sha256 "70cc7566a23c283015368f92dfeaa0d119e53cfc7c1b2276a73ff9f6167b529d" => :high_sierra
end
depends_on "pkg-config" => :build
depends_on "libssh2"
uses_from_macos "libxml2"
def install
ENV.cxx11
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--with-appletls
--with-libssh2
--without-openssl
--without-gnutls
--without-libgmp
--without-libnettle
--without-libgcrypt
]
system "./configure", *args
system "make", "install"
bash_completion.install "doc/bash_completion/aria2c"
end
test do
system "#{bin}/aria2c", "https://brew.sh/"
assert_predicate testpath/"index.html", :exist?, "Failed to create index.html!"
end
end
aria2: add linux-only dependencies
class Aria2 < Formula
desc "Download with resuming and segmented downloading"
homepage "https://aria2.github.io/"
url "https://github.com/aria2/aria2/releases/download/release-1.35.0/aria2-1.35.0.tar.xz"
sha256 "1e2b7fd08d6af228856e51c07173cfcf987528f1ac97e04c5af4a47642617dfd"
bottle do
cellar :any
sha256 "9cc5e04be8b0a58d1f2b60b8abfc636168edbf23e7018003c40f1dd6952aab0c" => :catalina
sha256 "761836ac608eb0a59d4a6f6065860c0e809ce454692e0937d9d0d89ad47f3ce4" => :mojave
sha256 "70cc7566a23c283015368f92dfeaa0d119e53cfc7c1b2276a73ff9f6167b529d" => :high_sierra
end
depends_on "pkg-config" => :build
depends_on "libssh2"
uses_from_macos "libxml2"
uses_from_macos "zlib"
on_linux do
depends_on "openssl@1.1"
end
def install
ENV.cxx11
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--with-appletls
--with-libssh2
--without-openssl
--without-gnutls
--without-libgmp
--without-libnettle
--without-libgcrypt
]
system "./configure", *args
system "make", "install"
bash_completion.install "doc/bash_completion/aria2c"
end
test do
system "#{bin}/aria2c", "https://brew.sh/"
assert_predicate testpath/"index.html", :exist?, "Failed to create index.html!"
end
end
|
class Astro < Formula
desc "To build and run Airflow DAGs locally and interact with the Astronomer API"
homepage "https://www.astronomer.io/"
url "https://github.com/astronomer/astro-cli/archive/refs/tags/v1.5.0.tar.gz"
sha256 "904be5c6f2a0d50fdec8b750b5cea81efb9a8b39efa38376427baa2f32008cc6"
license "Apache-2.0"
livecheck do
url :stable
strategy :github_latest
end
bottle do
rebuild 1
sha256 cellar: :any_skip_relocation, arm64_monterey: "b5c2dc0d931436a7b65b437c4a2168af617c52414d179b9b1754501b56108b3e"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "b5c2dc0d931436a7b65b437c4a2168af617c52414d179b9b1754501b56108b3e"
sha256 cellar: :any_skip_relocation, monterey: "b9aacce854a9456cd71b12ea59ae21bf4497b282fe0d562a0b1beb45e8cf0549"
sha256 cellar: :any_skip_relocation, big_sur: "b9aacce854a9456cd71b12ea59ae21bf4497b282fe0d562a0b1beb45e8cf0549"
sha256 cellar: :any_skip_relocation, catalina: "b9aacce854a9456cd71b12ea59ae21bf4497b282fe0d562a0b1beb45e8cf0549"
sha256 cellar: :any_skip_relocation, x86_64_linux: "15b1e016fb19e14ad147faf473ddbf12426289f5a4f846d18f567e6c80e54214"
end
depends_on "go" => :build
def install
ENV["CGO_ENABLED"] = "0"
system "go", "build", *std_go_args(ldflags: "-s -w -X github.com/astronomer/astro-cli/version.CurrVersion=#{version}")
generate_completions_from_executable(bin/"astro", "completion")
end
test do
version_output = shell_output("#{bin}/astro version")
assert_match("Astro CLI Version: #{version}", version_output)
run_output = shell_output("echo 'y' | #{bin}/astro dev init")
assert_match(/^Initializing Astro project*/, run_output)
assert_predicate testpath/".astro/config.yaml", :exist?
run_output = shell_output("echo 'test@invalid.io' | #{bin}/astro login astronomer.io", 1)
assert_match(/^Welcome to the Astro CLI*/, run_output)
end
end
astro 1.5.1
Closes #111527.
Signed-off-by: Sean Molenaar <2b250e3fea88cfef248b497ad5fc17f7dc435154@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Astro < Formula
desc "To build and run Airflow DAGs locally and interact with the Astronomer API"
homepage "https://www.astronomer.io/"
url "https://github.com/astronomer/astro-cli/archive/refs/tags/v1.5.1.tar.gz"
sha256 "594c8edd0e22fc51a9f0961b00b3c7c15aeb0054b8774f55914440cff7e89d75"
license "Apache-2.0"
livecheck do
url :stable
strategy :github_latest
end
bottle do
rebuild 1
sha256 cellar: :any_skip_relocation, arm64_monterey: "b5c2dc0d931436a7b65b437c4a2168af617c52414d179b9b1754501b56108b3e"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "b5c2dc0d931436a7b65b437c4a2168af617c52414d179b9b1754501b56108b3e"
sha256 cellar: :any_skip_relocation, monterey: "b9aacce854a9456cd71b12ea59ae21bf4497b282fe0d562a0b1beb45e8cf0549"
sha256 cellar: :any_skip_relocation, big_sur: "b9aacce854a9456cd71b12ea59ae21bf4497b282fe0d562a0b1beb45e8cf0549"
sha256 cellar: :any_skip_relocation, catalina: "b9aacce854a9456cd71b12ea59ae21bf4497b282fe0d562a0b1beb45e8cf0549"
sha256 cellar: :any_skip_relocation, x86_64_linux: "15b1e016fb19e14ad147faf473ddbf12426289f5a4f846d18f567e6c80e54214"
end
depends_on "go" => :build
def install
ENV["CGO_ENABLED"] = "0"
system "go", "build", *std_go_args(ldflags: "-s -w -X github.com/astronomer/astro-cli/version.CurrVersion=#{version}")
generate_completions_from_executable(bin/"astro", "completion")
end
test do
version_output = shell_output("#{bin}/astro version")
assert_match("Astro CLI Version: #{version}", version_output)
run_output = shell_output("echo 'y' | #{bin}/astro dev init")
assert_match(/^Initializing Astro project*/, run_output)
assert_predicate testpath/".astro/config.yaml", :exist?
run_output = shell_output("echo 'test@invalid.io' | #{bin}/astro login astronomer.io", 1)
assert_match(/^Welcome to the Astro CLI*/, run_output)
end
end
|
ActionController::Routing::Routes.draw do |map|
#Default routes are set for everything,
#so these routes are testing for specific problems
map.with_options :controller => 'other', :action => 'nothing' do |r|
r.connect 'blah'
end
# The priority is based upon order of creation: first created -> highest priority.
# Sample of regular route:
# map.connect 'products/:id', :controller => 'catalog', :action => 'view'
# Keep in mind you can assign values other than :controller and :action
# Sample of named route:
# map.purchase 'products/:id/purchase', :controller => 'catalog', :action => 'purchase'
# This route can be invoked with purchase_url(:id => product.id)
# Sample resource route (maps HTTP verbs to controller actions automatically):
# map.resources :products
# Sample resource route with options:
# map.resources :products, :member => { :short => :get, :toggle => :post }, :collection => { :sold => :get }
# Sample resource route with sub-resources:
# map.resources :products, :has_many => [ :comments, :sales ], :has_one => :seller
# Sample resource route with more complex sub-resources
# map.resources :products do |products|
# products.resources :comments
# products.resources :sales, :collection => { :recent => :get }
# end
# Sample resource route within a namespace:
# map.namespace :admin do |admin|
# # Directs /admin/products/* to Admin::ProductsController (app/controllers/admin/products_controller.rb)
# admin.resources :products
# end
# You can have the root of your site routed with map.root -- just remember to delete public/index.html.
# map.root :controller => "welcome"
# See how all your routes lay out with "rake routes"
# Install the default routes as the lowest priority.
# Note: These default routes make all actions in every controller accessible via GET requests. You should
# consider removing or commenting them out if you're using named routes and resources.
map.connect ':controller/:action/:id'
map.connect ':controller/:action/:id.:format'
end
Add test for dynamic option values in Rails 2 routes
ActionController::Routing::Routes.draw do |map|
#Default routes are set for everything,
#so these routes are testing for specific problems
map.with_options :controller => 'other', :action => 'nothing' do |r|
r.connect 'blah'
end
map.connect 'something', :controller => "something#{dynamic}"
# The priority is based upon order of creation: first created -> highest priority.
# Sample of regular route:
# map.connect 'products/:id', :controller => 'catalog', :action => 'view'
# Keep in mind you can assign values other than :controller and :action
# Sample of named route:
# map.purchase 'products/:id/purchase', :controller => 'catalog', :action => 'purchase'
# This route can be invoked with purchase_url(:id => product.id)
# Sample resource route (maps HTTP verbs to controller actions automatically):
# map.resources :products
# Sample resource route with options:
# map.resources :products, :member => { :short => :get, :toggle => :post }, :collection => { :sold => :get }
# Sample resource route with sub-resources:
# map.resources :products, :has_many => [ :comments, :sales ], :has_one => :seller
# Sample resource route with more complex sub-resources
# map.resources :products do |products|
# products.resources :comments
# products.resources :sales, :collection => { :recent => :get }
# end
# Sample resource route within a namespace:
# map.namespace :admin do |admin|
# # Directs /admin/products/* to Admin::ProductsController (app/controllers/admin/products_controller.rb)
# admin.resources :products
# end
# You can have the root of your site routed with map.root -- just remember to delete public/index.html.
# map.root :controller => "welcome"
# See how all your routes lay out with "rake routes"
# Install the default routes as the lowest priority.
# Note: These default routes make all actions in every controller accessible via GET requests. You should
# consider removing or commenting them out if you're using named routes and resources.
map.connect ':controller/:action/:id'
map.connect ':controller/:action/:id.:format'
end
|
class Avahi < Formula
desc "Service Discovery for Linux using mDNS/DNS-SD"
homepage "https://avahi.org"
url "https://github.com/lathiat/avahi/archive/v0.8.tar.gz"
sha256 "c15e750ef7c6df595fb5f2ce10cac0fee2353649600e6919ad08ae8871e4945f"
license "LGPL-2.1-or-later"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "intltool" => :build
depends_on "libtool" => :build
depends_on "m4" => :build
depends_on "perl" => :build
depends_on "pkg-config" => [:build, :test]
depends_on "xmltoman" => :build
depends_on "dbus"
depends_on "glib"
depends_on "libdaemon"
depends_on :linux
def install
system "./bootstrap.sh", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--sysconfdir=#{prefix}/etc",
"--localstatedir=#{prefix}/var",
"--disable-mono",
"--disable-monodoc",
"--disable-python",
"--disable-qt4",
"--disable-qt5",
"--disable-gtk",
"--disable-gtk3",
"--disable-libevent",
"--with-distro=none",
"--with-systemdsystemunitdir=no"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <glib.h>
#include <avahi-client/client.h>
#include <avahi-common/error.h>
#include <avahi-glib/glib-watch.h>
#include <avahi-glib/glib-malloc.h>
static void avahi_client_callback (AVAHI_GCC_UNUSED AvahiClient *client, AvahiClientState state, void *userdata)
{
GMainLoop *loop = userdata;
g_message ("Avahi Client State Change: %d", state);
if (state == AVAHI_CLIENT_FAILURE)
{
g_message ("Disconnected from the Avahi Daemon: %s", avahi_strerror(avahi_client_errno(client)));
g_main_loop_quit (loop);
}
}
int main (AVAHI_GCC_UNUSED int argc, AVAHI_GCC_UNUSED char *argv[])
{
GMainLoop *loop = NULL;
const AvahiPoll *poll_api;
AvahiGLibPoll *glib_poll;
AvahiClient *client;
const char *version;
int error;
avahi_set_allocator (avahi_glib_allocator ());
loop = g_main_loop_new (NULL, FALSE);
glib_poll = avahi_glib_poll_new (NULL, G_PRIORITY_DEFAULT);
poll_api = avahi_glib_poll_get (glib_poll);
client = avahi_client_new (poll_api, 0, avahi_client_callback, loop, &error);
if (client == NULL)
{
g_warning ("Error initializing Avahi: %s", avahi_strerror (error));
}
g_main_loop_unref (loop);
avahi_client_free (client);
avahi_glib_poll_free (glib_poll);
return 0;
}
EOS
pkg_config_flags = shell_output("pkg-config --cflags --libs avahi-client avahi-core avahi-glib").chomp.split
system ENV.cc, "test.c", *pkg_config_flags, "-o", "test"
assert_match "Avahi", shell_output("#{testpath}/test 2>&1", 134)
end
end
avahi: add 0.8 bottle.
class Avahi < Formula
desc "Service Discovery for Linux using mDNS/DNS-SD"
homepage "https://avahi.org"
url "https://github.com/lathiat/avahi/archive/v0.8.tar.gz"
sha256 "c15e750ef7c6df595fb5f2ce10cac0fee2353649600e6919ad08ae8871e4945f"
license "LGPL-2.1-or-later"
bottle do
rebuild 1
sha256 x86_64_linux: "bcd10354c4c18f283268b681b80f33262163d4d12ff4ccd55361c19b1b2005cf"
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "intltool" => :build
depends_on "libtool" => :build
depends_on "m4" => :build
depends_on "perl" => :build
depends_on "pkg-config" => [:build, :test]
depends_on "xmltoman" => :build
depends_on "dbus"
depends_on "glib"
depends_on "libdaemon"
depends_on :linux
def install
system "./bootstrap.sh", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--sysconfdir=#{prefix}/etc",
"--localstatedir=#{prefix}/var",
"--disable-mono",
"--disable-monodoc",
"--disable-python",
"--disable-qt4",
"--disable-qt5",
"--disable-gtk",
"--disable-gtk3",
"--disable-libevent",
"--with-distro=none",
"--with-systemdsystemunitdir=no"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <glib.h>
#include <avahi-client/client.h>
#include <avahi-common/error.h>
#include <avahi-glib/glib-watch.h>
#include <avahi-glib/glib-malloc.h>
static void avahi_client_callback (AVAHI_GCC_UNUSED AvahiClient *client, AvahiClientState state, void *userdata)
{
GMainLoop *loop = userdata;
g_message ("Avahi Client State Change: %d", state);
if (state == AVAHI_CLIENT_FAILURE)
{
g_message ("Disconnected from the Avahi Daemon: %s", avahi_strerror(avahi_client_errno(client)));
g_main_loop_quit (loop);
}
}
int main (AVAHI_GCC_UNUSED int argc, AVAHI_GCC_UNUSED char *argv[])
{
GMainLoop *loop = NULL;
const AvahiPoll *poll_api;
AvahiGLibPoll *glib_poll;
AvahiClient *client;
const char *version;
int error;
avahi_set_allocator (avahi_glib_allocator ());
loop = g_main_loop_new (NULL, FALSE);
glib_poll = avahi_glib_poll_new (NULL, G_PRIORITY_DEFAULT);
poll_api = avahi_glib_poll_get (glib_poll);
client = avahi_client_new (poll_api, 0, avahi_client_callback, loop, &error);
if (client == NULL)
{
g_warning ("Error initializing Avahi: %s", avahi_strerror (error));
}
g_main_loop_unref (loop);
avahi_client_free (client);
avahi_glib_poll_free (glib_poll);
return 0;
}
EOS
pkg_config_flags = shell_output("pkg-config --cflags --libs avahi-client avahi-core avahi-glib").chomp.split
system ENV.cc, "test.c", *pkg_config_flags, "-o", "test"
assert_match "Avahi", shell_output("#{testpath}/test 2>&1", 134)
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.